i386.c revision 51411
1/* Subroutines for insn-output.c for Intel X86.
2   Copyright (C) 1988, 92, 94-98, 1999 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING.  If not, write to
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
20
21/* $FreeBSD: head/contrib/gcc/config/i386/i386.c 51411 1999-09-19 10:43:38Z obrien $ */
22
23#include <setjmp.h>
24#include "config.h"
25#include "system.h"
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
29#include "real.h"
30#include "insn-config.h"
31#include "conditions.h"
32#include "insn-flags.h"
33#include "output.h"
34#include "insn-attr.h"
35#include "tree.h"
36#include "flags.h"
37#include "except.h"
38#include "function.h"
39#include "recog.h"
40#include "expr.h"
41#include "toplev.h"
42
43#ifdef EXTRA_CONSTRAINT
44/* If EXTRA_CONSTRAINT is defined, then the 'S'
45   constraint in REG_CLASS_FROM_LETTER will no longer work, and various
46   asm statements that need 'S' for class SIREG will break.  */
47 error EXTRA_CONSTRAINT conflicts with S constraint letter
48/* The previous line used to be #error, but some compilers barf
49   even if the conditional was untrue.  */
50#endif
51
52#ifndef CHECK_STACK_LIMIT
53#define CHECK_STACK_LIMIT -1
54#endif
55
56#define PIC_REG_USED 					\
57  (flag_pic && (current_function_uses_pic_offset_table	\
58		|| current_function_uses_const_pool	\
59		|| profile_flag || profile_block_flag))
60
61/* Type of an operand for ix86_{binary,unary}_operator_ok */
62enum reg_mem
63{
64  reg_p,
65  mem_p,
66  imm_p
67};
68
69/* Processor costs (relative to an add) */
70struct processor_costs i386_cost = {	/* 386 specific costs */
71  1,					/* cost of an add instruction */
72  1,					/* cost of a lea instruction */
73  3,					/* variable shift costs */
74  2,					/* constant shift costs */
75  6,					/* cost of starting a multiply */
76  1,					/* cost of multiply per each bit set */
77  23					/* cost of a divide/mod */
78};
79
80struct processor_costs i486_cost = {	/* 486 specific costs */
81  1,					/* cost of an add instruction */
82  1,					/* cost of a lea instruction */
83  3,					/* variable shift costs */
84  2,					/* constant shift costs */
85  12,					/* cost of starting a multiply */
86  1,					/* cost of multiply per each bit set */
87  40					/* cost of a divide/mod */
88};
89
90struct processor_costs pentium_cost = {
91  1,					/* cost of an add instruction */
92  1,					/* cost of a lea instruction */
93  4,					/* variable shift costs */
94  1,					/* constant shift costs */
95  11,					/* cost of starting a multiply */
96  0,					/* cost of multiply per each bit set */
97  25					/* cost of a divide/mod */
98};
99
100struct processor_costs pentiumpro_cost = {
101  1,					/* cost of an add instruction */
102  1,					/* cost of a lea instruction */
103  3,					/* variable shift costs */
104  1,					/* constant shift costs */
105  4,					/* cost of starting a multiply */
106  0,					/* cost of multiply per each bit set */
107  17					/* cost of a divide/mod */
108};
109
110struct processor_costs *ix86_cost = &pentium_cost;
111
112#define AT_BP(mode) (gen_rtx_MEM ((mode), frame_pointer_rtx))
113
114extern FILE *asm_out_file;
115extern char *strcat ();
116
117static void ix86_epilogue PROTO((int));
118static void ix86_prologue PROTO((int));
119
120char *singlemove_string ();
121char *output_move_const_single ();
122char *output_fp_cc0_set ();
123
124char *hi_reg_name[] = HI_REGISTER_NAMES;
125char *qi_reg_name[] = QI_REGISTER_NAMES;
126char *qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES;
127
128/* Array of the smallest class containing reg number REGNO, indexed by
129   REGNO.  Used by REGNO_REG_CLASS in i386.h. */
130
131enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
132{
133  /* ax, dx, cx, bx */
134  AREG, DREG, CREG, BREG,
135  /* si, di, bp, sp */
136  SIREG, DIREG, INDEX_REGS, GENERAL_REGS,
137  /* FP registers */
138  FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
139  FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
140  /* arg pointer */
141  INDEX_REGS
142};
143
144/* Test and compare insns in i386.md store the information needed to
145   generate branch and scc insns here.  */
146
147struct rtx_def *i386_compare_op0 = NULL_RTX;
148struct rtx_def *i386_compare_op1 = NULL_RTX;
149struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
150
151/* which cpu are we scheduling for */
152enum processor_type ix86_cpu;
153
154/* which instruction set architecture to use.  */
155int ix86_arch;
156
157/* Strings to hold which cpu and instruction set architecture  to use.  */
158char *ix86_cpu_string;		/* for -mcpu=<xxx> */
159char *ix86_arch_string;		/* for -march=<xxx> */
160
161/* Register allocation order */
162char *i386_reg_alloc_order;
163static char regs_allocated[FIRST_PSEUDO_REGISTER];
164
165/* # of registers to use to pass arguments. */
166char *i386_regparm_string;
167
168/* i386_regparm_string as a number */
169int i386_regparm;
170
171/* Alignment to use for loops and jumps:  */
172
173/* Power of two alignment for loops. */
174char *i386_align_loops_string;
175
176/* Power of two alignment for non-loop jumps. */
177char *i386_align_jumps_string;
178
179/* Values 1-5: see jump.c */
180int i386_branch_cost;
181char *i386_branch_cost_string;
182
183/* Power of two alignment for functions. */
184int i386_align_funcs;
185char *i386_align_funcs_string;
186
187/* Power of two alignment for loops. */
188int i386_align_loops;
189
190/* Power of two alignment for non-loop jumps. */
191int i386_align_jumps;
192
193/* Sometimes certain combinations of command options do not make
194   sense on a particular target machine.  You can define a macro
195   `OVERRIDE_OPTIONS' to take account of this.  This macro, if
196   defined, is executed once just after all the command options have
197   been parsed.
198
199   Don't use this macro to turn on various extra optimizations for
200   `-O'.  That is what `OPTIMIZATION_OPTIONS' is for.  */
201
202void
203override_options ()
204{
205  int ch, i, j;
206  int def_align;
207
208  static struct ptt
209    {
210      char *name;		/* Canonical processor name.  */
211      enum processor_type processor; /* Processor type enum value.  */
212      struct processor_costs *cost; /* Processor costs */
213      int target_enable;	/* Target flags to enable.  */
214      int target_disable;	/* Target flags to disable.  */
215    } processor_target_table[]
216      = {{PROCESSOR_I386_STRING, PROCESSOR_I386, &i386_cost, 0, 0},
217	   {PROCESSOR_I486_STRING, PROCESSOR_I486, &i486_cost, 0, 0},
218	   {PROCESSOR_I586_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
219	   {PROCESSOR_PENTIUM_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
220	   {PROCESSOR_I686_STRING, PROCESSOR_PENTIUMPRO, &pentiumpro_cost,
221	      0, 0},
222	   {PROCESSOR_PENTIUMPRO_STRING, PROCESSOR_PENTIUMPRO,
223	      &pentiumpro_cost, 0, 0}};
224
225  int ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
226
227#ifdef SUBTARGET_OVERRIDE_OPTIONS
228  SUBTARGET_OVERRIDE_OPTIONS;
229#endif
230
231  /* Validate registers in register allocation order.  */
232  if (i386_reg_alloc_order)
233    {
234      for (i = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
235	{
236	  int regno = 0;
237
238	  switch (ch)
239	    {
240	    case 'a':	regno = 0;	break;
241	    case 'd':	regno = 1;	break;
242	    case 'c':	regno = 2;	break;
243	    case 'b':	regno = 3;	break;
244	    case 'S':	regno = 4;	break;
245	    case 'D':	regno = 5;	break;
246	    case 'B':	regno = 6;	break;
247
248	    default:	fatal ("Register '%c' is unknown", ch);
249	    }
250
251	  if (regs_allocated[regno])
252	    fatal ("Register '%c' already specified in allocation order", ch);
253
254	  regs_allocated[regno] = 1;
255	}
256    }
257
258  if (ix86_arch_string == 0)
259    {
260      ix86_arch_string = PROCESSOR_PENTIUM_STRING;
261      if (ix86_cpu_string == 0)
262	ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
263    }
264
265  for (i = 0; i < ptt_size; i++)
266    if (! strcmp (ix86_arch_string, processor_target_table[i].name))
267      {
268	ix86_arch = processor_target_table[i].processor;
269	if (ix86_cpu_string == 0)
270	  ix86_cpu_string = processor_target_table[i].name;
271	break;
272      }
273
274  if (i == ptt_size)
275    {
276      error ("bad value (%s) for -march= switch", ix86_arch_string);
277      ix86_arch_string = PROCESSOR_PENTIUM_STRING;
278      ix86_arch = PROCESSOR_DEFAULT;
279    }
280
281  if (ix86_cpu_string == 0)
282    ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
283
284  for (j = 0; j < ptt_size; j++)
285    if (! strcmp (ix86_cpu_string, processor_target_table[j].name))
286      {
287	ix86_cpu = processor_target_table[j].processor;
288	ix86_cost = processor_target_table[j].cost;
289	if (i > j && (int) ix86_arch >= (int) PROCESSOR_PENTIUMPRO)
290	  error ("-mcpu=%s does not support -march=%s",
291		 ix86_cpu_string, ix86_arch_string);
292
293	target_flags |= processor_target_table[j].target_enable;
294	target_flags &= ~processor_target_table[j].target_disable;
295	break;
296      }
297
298  if (j == ptt_size)
299    {
300      error ("bad value (%s) for -mcpu= switch", ix86_cpu_string);
301      ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
302      ix86_cpu = PROCESSOR_DEFAULT;
303    }
304
305  /* Validate -mregparm= value. */
306  if (i386_regparm_string)
307    {
308      i386_regparm = atoi (i386_regparm_string);
309      if (i386_regparm < 0 || i386_regparm > REGPARM_MAX)
310	fatal ("-mregparm=%d is not between 0 and %d",
311	       i386_regparm, REGPARM_MAX);
312    }
313
314  /* The 486 suffers more from non-aligned cache line fills, and the
315     larger code size results in a larger cache foot-print and more misses.
316     The 486 has a 16 byte cache line, pentium and pentiumpro have a 32 byte
317     cache line.  */
318  def_align = (TARGET_486) ? 4 : 2;
319
320  /* Validate -malign-loops= value, or provide default.  */
321  if (i386_align_loops_string)
322    {
323      i386_align_loops = atoi (i386_align_loops_string);
324      if (i386_align_loops < 0 || i386_align_loops > MAX_CODE_ALIGN)
325	fatal ("-malign-loops=%d is not between 0 and %d",
326	       i386_align_loops, MAX_CODE_ALIGN);
327    }
328  else
329#ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
330    i386_align_loops = 4;
331#else
332    i386_align_loops = 2;
333#endif
334
335  /* Validate -malign-jumps= value, or provide default.  */
336  if (i386_align_jumps_string)
337    {
338      i386_align_jumps = atoi (i386_align_jumps_string);
339      if (i386_align_jumps < 0 || i386_align_jumps > MAX_CODE_ALIGN)
340	fatal ("-malign-jumps=%d is not between 0 and %d",
341	       i386_align_jumps, MAX_CODE_ALIGN);
342    }
343  else
344#ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
345    i386_align_jumps = 4;
346#else
347    i386_align_jumps = def_align;
348#endif
349
350  /* Validate -malign-functions= value, or provide default. */
351  if (i386_align_funcs_string)
352    {
353      i386_align_funcs = atoi (i386_align_funcs_string);
354      if (i386_align_funcs < 0 || i386_align_funcs > MAX_CODE_ALIGN)
355	fatal ("-malign-functions=%d is not between 0 and %d",
356	       i386_align_funcs, MAX_CODE_ALIGN);
357    }
358  else
359    i386_align_funcs = def_align;
360
361  /* Validate -mbranch-cost= value, or provide default. */
362  if (i386_branch_cost_string)
363    {
364      i386_branch_cost = atoi (i386_branch_cost_string);
365      if (i386_branch_cost < 0 || i386_branch_cost > 5)
366	fatal ("-mbranch-cost=%d is not between 0 and 5",
367	       i386_branch_cost);
368    }
369  else
370    i386_branch_cost = 1;
371
372  /* Keep nonleaf frame pointers.  */
373  if (TARGET_OMIT_LEAF_FRAME_POINTER)
374    flag_omit_frame_pointer = 1;
375}
376
377/* A C statement (sans semicolon) to choose the order in which to
378   allocate hard registers for pseudo-registers local to a basic
379   block.
380
381   Store the desired register order in the array `reg_alloc_order'.
382   Element 0 should be the register to allocate first; element 1, the
383   next register; and so on.
384
385   The macro body should not assume anything about the contents of
386   `reg_alloc_order' before execution of the macro.
387
388   On most machines, it is not necessary to define this macro.  */
389
390void
391order_regs_for_local_alloc ()
392{
393  int i, ch, order;
394
395  /* User specified the register allocation order.  */
396
397  if (i386_reg_alloc_order)
398    {
399      for (i = order = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
400	{
401	  int regno = 0;
402
403	  switch (ch)
404	    {
405	    case 'a':	regno = 0;	break;
406	    case 'd':	regno = 1;	break;
407	    case 'c':	regno = 2;	break;
408	    case 'b':	regno = 3;	break;
409	    case 'S':	regno = 4;	break;
410	    case 'D':	regno = 5;	break;
411	    case 'B':	regno = 6;	break;
412	    }
413
414	  reg_alloc_order[order++] = regno;
415	}
416
417      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
418	{
419	  if (! regs_allocated[i])
420	    reg_alloc_order[order++] = i;
421	}
422    }
423
424  /* If user did not specify a register allocation order, use natural order. */
425  else
426    {
427      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
428	reg_alloc_order[i] = i;
429    }
430}
431
432void
433optimization_options (level, size)
434     int level;
435     int size ATTRIBUTE_UNUSED;
436{
437  /* For -O2 and beyond, turn off -fschedule-insns by default.  It tends to
438     make the problem with not enough registers even worse.  */
439#ifdef INSN_SCHEDULING
440  if (level > 1)
441    flag_schedule_insns = 0;
442#endif
443}
444
445/* Sign-extend a 16-bit constant */
446
447struct rtx_def *
448i386_sext16_if_const (op)
449     struct rtx_def *op;
450{
451  if (GET_CODE (op) == CONST_INT)
452    {
453      HOST_WIDE_INT val = INTVAL (op);
454      HOST_WIDE_INT sext_val;
455      if (val & 0x8000)
456	sext_val = val | ~0xffff;
457      else
458	sext_val = val & 0xffff;
459      if (sext_val != val)
460	op = GEN_INT (sext_val);
461    }
462  return op;
463}
464
465/* Return nonzero if the rtx is aligned */
466
467static int
468i386_aligned_reg_p (regno)
469     int regno;
470{
471  return (regno == STACK_POINTER_REGNUM
472	  || (! flag_omit_frame_pointer && regno == FRAME_POINTER_REGNUM));
473}
474
475int
476i386_aligned_p (op)
477     rtx op;
478{
479  /* Registers and immediate operands are always "aligned". */
480  if (GET_CODE (op) != MEM)
481    return 1;
482
483  /* Don't even try to do any aligned optimizations with volatiles. */
484  if (MEM_VOLATILE_P (op))
485    return 0;
486
487  /* Get address of memory operand. */
488  op = XEXP (op, 0);
489
490  switch (GET_CODE (op))
491    {
492    case CONST_INT:
493      if (INTVAL (op) & 3)
494	break;
495      return 1;
496
497      /* Match "reg + offset" */
498    case PLUS:
499      if (GET_CODE (XEXP (op, 1)) != CONST_INT)
500	break;
501      if (INTVAL (XEXP (op, 1)) & 3)
502	break;
503
504      op = XEXP (op, 0);
505      if (GET_CODE (op) != REG)
506	break;
507
508      /* ... fall through ... */
509
510    case REG:
511      return i386_aligned_reg_p (REGNO (op));
512
513    default:
514      break;
515    }
516
517  return 0;
518}
519
520/* Return nonzero if INSN looks like it won't compute useful cc bits
521   as a side effect.  This information is only a hint. */
522
523int
524i386_cc_probably_useless_p (insn)
525     rtx insn;
526{
527  return ! next_cc0_user (insn);
528}
529
530/* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
531   attribute for DECL.  The attributes in ATTRIBUTES have previously been
532   assigned to DECL.  */
533
534int
535i386_valid_decl_attribute_p (decl, attributes, identifier, args)
536     tree decl ATTRIBUTE_UNUSED;
537     tree attributes ATTRIBUTE_UNUSED;
538     tree identifier ATTRIBUTE_UNUSED;
539     tree args ATTRIBUTE_UNUSED;
540{
541  return 0;
542}
543
544/* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
545   attribute for TYPE.  The attributes in ATTRIBUTES have previously been
546   assigned to TYPE.  */
547
548int
549i386_valid_type_attribute_p (type, attributes, identifier, args)
550     tree type;
551     tree attributes ATTRIBUTE_UNUSED;
552     tree identifier;
553     tree args;
554{
555  if (TREE_CODE (type) != FUNCTION_TYPE
556      && TREE_CODE (type) != METHOD_TYPE
557      && TREE_CODE (type) != FIELD_DECL
558      && TREE_CODE (type) != TYPE_DECL)
559    return 0;
560
561  /* Stdcall attribute says callee is responsible for popping arguments
562     if they are not variable.  */
563  if (is_attribute_p ("stdcall", identifier))
564    return (args == NULL_TREE);
565
566  /* Cdecl attribute says the callee is a normal C declaration. */
567  if (is_attribute_p ("cdecl", identifier))
568    return (args == NULL_TREE);
569
570  /* Regparm attribute specifies how many integer arguments are to be
571     passed in registers. */
572  if (is_attribute_p ("regparm", identifier))
573    {
574      tree cst;
575
576      if (! args || TREE_CODE (args) != TREE_LIST
577	  || TREE_CHAIN (args) != NULL_TREE
578	  || TREE_VALUE (args) == NULL_TREE)
579	return 0;
580
581      cst = TREE_VALUE (args);
582      if (TREE_CODE (cst) != INTEGER_CST)
583	return 0;
584
585      if (TREE_INT_CST_HIGH (cst) != 0
586	  || TREE_INT_CST_LOW (cst) < 0
587	  || TREE_INT_CST_LOW (cst) > REGPARM_MAX)
588	return 0;
589
590      return 1;
591    }
592
593  return 0;
594}
595
596/* Return 0 if the attributes for two types are incompatible, 1 if they
597   are compatible, and 2 if they are nearly compatible (which causes a
598   warning to be generated).  */
599
600int
601i386_comp_type_attributes (type1, type2)
602     tree type1 ATTRIBUTE_UNUSED;
603     tree type2 ATTRIBUTE_UNUSED;
604{
605  return 1;
606}
607
608
609/* Value is the number of bytes of arguments automatically
610   popped when returning from a subroutine call.
611   FUNDECL is the declaration node of the function (as a tree),
612   FUNTYPE is the data type of the function (as a tree),
613   or for a library call it is an identifier node for the subroutine name.
614   SIZE is the number of bytes of arguments passed on the stack.
615
616   On the 80386, the RTD insn may be used to pop them if the number
617     of args is fixed, but if the number is variable then the caller
618     must pop them all.  RTD can't be used for library calls now
619     because the library is compiled with the Unix compiler.
620   Use of RTD is a selectable option, since it is incompatible with
621   standard Unix calling sequences.  If the option is not selected,
622   the caller must always pop the args.
623
624   The attribute stdcall is equivalent to RTD on a per module basis.  */
625
626int
627i386_return_pops_args (fundecl, funtype, size)
628     tree fundecl;
629     tree funtype;
630     int size;
631{
632  int rtd = TARGET_RTD && (!fundecl || TREE_CODE (fundecl) != IDENTIFIER_NODE);
633
634    /* Cdecl functions override -mrtd, and never pop the stack. */
635  if (! lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) {
636
637    /* Stdcall functions will pop the stack if not variable args. */
638    if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)))
639      rtd = 1;
640
641    if (rtd
642        && (TYPE_ARG_TYPES (funtype) == NULL_TREE
643	    || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype)))
644		== void_type_node)))
645      return size;
646  }
647
648  /* Lose any fake structure return argument.  */
649  if (aggregate_value_p (TREE_TYPE (funtype)))
650    return GET_MODE_SIZE (Pmode);
651
652    return 0;
653}
654
655
656/* Argument support functions.  */
657
658/* Initialize a variable CUM of type CUMULATIVE_ARGS
659   for a call to a function whose data type is FNTYPE.
660   For a library call, FNTYPE is 0.  */
661
662void
663init_cumulative_args (cum, fntype, libname)
664     CUMULATIVE_ARGS *cum;	/* Argument info to initialize */
665     tree fntype;		/* tree ptr for function decl */
666     rtx libname;		/* SYMBOL_REF of library name or 0 */
667{
668  static CUMULATIVE_ARGS zero_cum;
669  tree param, next_param;
670
671  if (TARGET_DEBUG_ARG)
672    {
673      fprintf (stderr, "\ninit_cumulative_args (");
674      if (fntype)
675	fprintf (stderr, "fntype code = %s, ret code = %s",
676		 tree_code_name[(int) TREE_CODE (fntype)],
677		 tree_code_name[(int) TREE_CODE (TREE_TYPE (fntype))]);
678      else
679	fprintf (stderr, "no fntype");
680
681      if (libname)
682	fprintf (stderr, ", libname = %s", XSTR (libname, 0));
683    }
684
685  *cum = zero_cum;
686
687  /* Set up the number of registers to use for passing arguments.  */
688  cum->nregs = i386_regparm;
689  if (fntype)
690    {
691      tree attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype));
692
693      if (attr)
694	cum->nregs = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)));
695    }
696
697  /* Determine if this function has variable arguments.  This is
698     indicated by the last argument being 'void_type_mode' if there
699     are no variable arguments.  If there are variable arguments, then
700     we won't pass anything in registers */
701
702  if (cum->nregs)
703    {
704      for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0;
705	   param != 0; param = next_param)
706	{
707	  next_param = TREE_CHAIN (param);
708	  if (next_param == 0 && TREE_VALUE (param) != void_type_node)
709	    cum->nregs = 0;
710	}
711    }
712
713  if (TARGET_DEBUG_ARG)
714    fprintf (stderr, ", nregs=%d )\n", cum->nregs);
715
716  return;
717}
718
719/* Update the data in CUM to advance over an argument
720   of mode MODE and data type TYPE.
721   (TYPE is null for libcalls where that information may not be available.)  */
722
723void
724function_arg_advance (cum, mode, type, named)
725     CUMULATIVE_ARGS *cum;	/* current arg information */
726     enum machine_mode mode;	/* current arg mode */
727     tree type;			/* type of the argument or 0 if lib support */
728     int named;			/* whether or not the argument was named */
729{
730  int bytes
731    = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
732  int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
733
734  if (TARGET_DEBUG_ARG)
735    fprintf (stderr,
736	     "function_adv (sz=%d, wds=%2d, nregs=%d, mode=%s, named=%d)\n\n",
737	     words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
738
739  cum->words += words;
740  cum->nregs -= words;
741  cum->regno += words;
742
743  if (cum->nregs <= 0)
744    {
745      cum->nregs = 0;
746      cum->regno = 0;
747    }
748
749  return;
750}
751
752/* Define where to put the arguments to a function.
753   Value is zero to push the argument on the stack,
754   or a hard register in which to store the argument.
755
756   MODE is the argument's machine mode.
757   TYPE is the data type of the argument (as a tree).
758    This is null for libcalls where that information may
759    not be available.
760   CUM is a variable of type CUMULATIVE_ARGS which gives info about
761    the preceding args and about the function being called.
762   NAMED is nonzero if this argument is a named parameter
763    (otherwise it is an extra parameter matching an ellipsis).  */
764
765struct rtx_def *
766function_arg (cum, mode, type, named)
767     CUMULATIVE_ARGS *cum;	/* current arg information */
768     enum machine_mode mode;	/* current arg mode */
769     tree type;			/* type of the argument or 0 if lib support */
770     int named;			/* != 0 for normal args, == 0 for ... args */
771{
772  rtx ret   = NULL_RTX;
773  int bytes
774    = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
775  int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
776
777  switch (mode)
778    {
779      /* For now, pass fp/complex values on the stack. */
780    default:
781      break;
782
783    case BLKmode:
784    case DImode:
785    case SImode:
786    case HImode:
787    case QImode:
788      if (words <= cum->nregs)
789	ret = gen_rtx_REG (mode, cum->regno);
790      break;
791    }
792
793  if (TARGET_DEBUG_ARG)
794    {
795      fprintf (stderr,
796	       "function_arg (size=%d, wds=%2d, nregs=%d, mode=%4s, named=%d",
797	       words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
798
799      if (ret)
800	fprintf (stderr, ", reg=%%e%s", reg_names[ REGNO(ret) ]);
801      else
802	fprintf (stderr, ", stack");
803
804      fprintf (stderr, " )\n");
805    }
806
807  return ret;
808}
809
810/* For an arg passed partly in registers and partly in memory,
811   this is the number of registers used.
812   For args passed entirely in registers or entirely in memory, zero.  */
813
814int
815function_arg_partial_nregs (cum, mode, type, named)
816     CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;	/* current arg information */
817     enum machine_mode mode ATTRIBUTE_UNUSED;	/* current arg mode */
818     tree type ATTRIBUTE_UNUSED;		/* type of the argument or 0 if lib support */
819     int named ATTRIBUTE_UNUSED;		/* != 0 for normal args, == 0 for ... args */
820{
821  return 0;
822}
823
824/* Output an insn whose source is a 386 integer register.  SRC is the
825   rtx for the register, and TEMPLATE is the op-code template.  SRC may
826   be either SImode or DImode.
827
828   The template will be output with operands[0] as SRC, and operands[1]
829   as a pointer to the top of the 386 stack.  So a call from floatsidf2
830   would look like this:
831
832      output_op_from_reg (operands[1], AS1 (fild%z0,%1));
833
834   where %z0 corresponds to the caller's operands[1], and is used to
835   emit the proper size suffix.
836
837   ??? Extend this to handle HImode - a 387 can load and store HImode
838   values directly. */
839
840void
841output_op_from_reg (src, template)
842     rtx src;
843     char *template;
844{
845  rtx xops[4];
846  int size = GET_MODE_SIZE (GET_MODE (src));
847
848  xops[0] = src;
849  xops[1] = AT_SP (Pmode);
850  xops[2] = GEN_INT (size);
851  xops[3] = stack_pointer_rtx;
852
853  if (size > UNITS_PER_WORD)
854    {
855      rtx high;
856
857      if (size > 2 * UNITS_PER_WORD)
858	{
859	  high = gen_rtx_REG (SImode, REGNO (src) + 2);
860	  output_asm_insn (AS1 (push%L0,%0), &high);
861	}
862
863      high = gen_rtx_REG (SImode, REGNO (src) + 1);
864      output_asm_insn (AS1 (push%L0,%0), &high);
865    }
866
867  output_asm_insn (AS1 (push%L0,%0), &src);
868  output_asm_insn (template, xops);
869  output_asm_insn (AS2 (add%L3,%2,%3), xops);
870}
871
872/* Output an insn to pop an value from the 387 top-of-stack to 386
873   register DEST. The 387 register stack is popped if DIES is true.  If
874   the mode of DEST is an integer mode, a `fist' integer store is done,
875   otherwise a `fst' float store is done. */
876
877void
878output_to_reg (dest, dies, scratch_mem)
879     rtx dest;
880     int dies;
881     rtx scratch_mem;
882{
883  rtx xops[4];
884  int size = GET_MODE_SIZE (GET_MODE (dest));
885
886  if (! scratch_mem)
887    xops[0] = AT_SP (Pmode);
888  else
889    xops[0] = scratch_mem;
890
891  xops[1] = stack_pointer_rtx;
892  xops[2] = GEN_INT (size);
893  xops[3] = dest;
894
895  if (! scratch_mem)
896    output_asm_insn (AS2 (sub%L1,%2,%1), xops);
897
898  if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_INT)
899    {
900      if (dies)
901	output_asm_insn (AS1 (fistp%z3,%y0), xops);
902      else if (GET_MODE (xops[3]) == DImode && ! dies)
903	{
904	  /* There is no DImode version of this without a stack pop, so
905	     we must emulate it.  It doesn't matter much what the second
906	     instruction is, because the value being pushed on the FP stack
907	     is not used except for the following stack popping store.
908	     This case can only happen without optimization, so it doesn't
909	     matter that it is inefficient.  */
910	  output_asm_insn (AS1 (fistp%z3,%0), xops);
911	  output_asm_insn (AS1 (fild%z3,%0), xops);
912	}
913      else
914	output_asm_insn (AS1 (fist%z3,%y0), xops);
915    }
916
917  else if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
918    {
919      if (dies)
920	output_asm_insn (AS1 (fstp%z3,%y0), xops);
921      else
922	{
923	  if (GET_MODE (dest) == XFmode)
924	    {
925	      output_asm_insn (AS1 (fstp%z3,%y0), xops);
926	      output_asm_insn (AS1 (fld%z3,%y0), xops);
927	    }
928	  else
929	    output_asm_insn (AS1 (fst%z3,%y0), xops);
930	}
931    }
932
933  else
934    abort ();
935
936  if (! scratch_mem)
937    output_asm_insn (AS1 (pop%L0,%0), &dest);
938  else
939    output_asm_insn (AS2 (mov%L0,%0,%3), xops);
940
941
942  if (size > UNITS_PER_WORD)
943    {
944      dest = gen_rtx_REG (SImode, REGNO (dest) + 1);
945      if (! scratch_mem)
946	output_asm_insn (AS1 (pop%L0,%0), &dest);
947      else
948	{
949	  xops[0] = adj_offsettable_operand (xops[0], 4);
950	  xops[3] = dest;
951	  output_asm_insn (AS2 (mov%L0,%0,%3), xops);
952	}
953
954      if (size > 2 * UNITS_PER_WORD)
955	{
956	  dest = gen_rtx_REG (SImode, REGNO (dest) + 1);
957	  if (! scratch_mem)
958	    output_asm_insn (AS1 (pop%L0,%0), &dest);
959	  else
960	    {
961	      xops[0] = adj_offsettable_operand (xops[0], 4);
962	      output_asm_insn (AS2 (mov%L0,%0,%3), xops);
963	    }
964	}
965    }
966}
967
968char *
969singlemove_string (operands)
970     rtx *operands;
971{
972  rtx x;
973  if (GET_CODE (operands[0]) == MEM
974      && GET_CODE (x = XEXP (operands[0], 0)) == PRE_DEC)
975    {
976      if (XEXP (x, 0) != stack_pointer_rtx)
977	abort ();
978      return "push%L1 %1";
979    }
980  else if (GET_CODE (operands[1]) == CONST_DOUBLE)
981    return output_move_const_single (operands);
982  else if (GET_CODE (operands[0]) == REG || GET_CODE (operands[1]) == REG)
983    return AS2 (mov%L0,%1,%0);
984  else if (CONSTANT_P (operands[1]))
985    return AS2 (mov%L0,%1,%0);
986  else
987    {
988      output_asm_insn ("push%L1 %1", operands);
989      return "pop%L0 %0";
990    }
991}
992
993/* Return a REG that occurs in ADDR with coefficient 1.
994   ADDR can be effectively incremented by incrementing REG.  */
995
996static rtx
997find_addr_reg (addr)
998     rtx addr;
999{
1000  while (GET_CODE (addr) == PLUS)
1001    {
1002      if (GET_CODE (XEXP (addr, 0)) == REG)
1003	addr = XEXP (addr, 0);
1004      else if (GET_CODE (XEXP (addr, 1)) == REG)
1005	addr = XEXP (addr, 1);
1006      else if (CONSTANT_P (XEXP (addr, 0)))
1007	addr = XEXP (addr, 1);
1008      else if (CONSTANT_P (XEXP (addr, 1)))
1009	addr = XEXP (addr, 0);
1010      else
1011	abort ();
1012    }
1013
1014  if (GET_CODE (addr) == REG)
1015    return addr;
1016  abort ();
1017}
1018
1019/* Output an insn to add the constant N to the register X.  */
1020
1021static void
1022asm_add (n, x)
1023     int n;
1024     rtx x;
1025{
1026  rtx xops[2];
1027  xops[0] = x;
1028
1029  if (n == -1)
1030    output_asm_insn (AS1 (dec%L0,%0), xops);
1031  else if (n == 1)
1032    output_asm_insn (AS1 (inc%L0,%0), xops);
1033  else if (n < 0 || n == 128)
1034    {
1035      xops[1] = GEN_INT (-n);
1036      output_asm_insn (AS2 (sub%L0,%1,%0), xops);
1037    }
1038  else if (n > 0)
1039    {
1040      xops[1] = GEN_INT (n);
1041      output_asm_insn (AS2 (add%L0,%1,%0), xops);
1042    }
1043}
1044
1045/* Output assembler code to perform a doubleword move insn
1046   with operands OPERANDS.  */
1047
1048char *
1049output_move_double (operands)
1050     rtx *operands;
1051{
1052  enum {REGOP, OFFSOP, MEMOP, PUSHOP, POPOP, CNSTOP, RNDOP } optype0, optype1;
1053  rtx latehalf[2];
1054  rtx middlehalf[2];
1055  rtx xops[2];
1056  rtx addreg0 = 0, addreg1 = 0;
1057  int dest_overlapped_low = 0;
1058  int size = GET_MODE_SIZE (GET_MODE (operands[0]));
1059
1060  middlehalf[0] = 0;
1061  middlehalf[1] = 0;
1062
1063  /* First classify both operands.  */
1064
1065  if (REG_P (operands[0]))
1066    optype0 = REGOP;
1067  else if (offsettable_memref_p (operands[0]))
1068    optype0 = OFFSOP;
1069  else if (GET_CODE (XEXP (operands[0], 0)) == POST_INC)
1070    optype0 = POPOP;
1071  else if (GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1072    optype0 = PUSHOP;
1073  else if (GET_CODE (operands[0]) == MEM)
1074    optype0 = MEMOP;
1075  else
1076    optype0 = RNDOP;
1077
1078  if (REG_P (operands[1]))
1079    optype1 = REGOP;
1080  else if (CONSTANT_P (operands[1]))
1081    optype1 = CNSTOP;
1082  else if (offsettable_memref_p (operands[1]))
1083    optype1 = OFFSOP;
1084  else if (GET_CODE (XEXP (operands[1], 0)) == POST_INC)
1085    optype1 = POPOP;
1086  else if (GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
1087    optype1 = PUSHOP;
1088  else if (GET_CODE (operands[1]) == MEM)
1089    optype1 = MEMOP;
1090  else
1091    optype1 = RNDOP;
1092
1093  /* Check for the cases that the operand constraints are not
1094     supposed to allow to happen.  Abort if we get one,
1095     because generating code for these cases is painful.  */
1096
1097  if (optype0 == RNDOP || optype1 == RNDOP)
1098    abort ();
1099
1100  /* If one operand is decrementing and one is incrementing
1101     decrement the former register explicitly
1102     and change that operand into ordinary indexing.  */
1103
1104  if (optype0 == PUSHOP && optype1 == POPOP)
1105    {
1106      /* ??? Can this ever happen on i386? */
1107      operands[0] = XEXP (XEXP (operands[0], 0), 0);
1108      asm_add (-size, operands[0]);
1109      if (GET_MODE (operands[1]) == XFmode)
1110        operands[0] = gen_rtx_MEM (XFmode, operands[0]);
1111      else if (GET_MODE (operands[0]) == DFmode)
1112        operands[0] = gen_rtx_MEM (DFmode, operands[0]);
1113      else
1114        operands[0] = gen_rtx_MEM (DImode, operands[0]);
1115      optype0 = OFFSOP;
1116    }
1117
1118  if (optype0 == POPOP && optype1 == PUSHOP)
1119    {
1120      /* ??? Can this ever happen on i386? */
1121      operands[1] = XEXP (XEXP (operands[1], 0), 0);
1122      asm_add (-size, operands[1]);
1123      if (GET_MODE (operands[1]) == XFmode)
1124        operands[1] = gen_rtx_MEM (XFmode, operands[1]);
1125      else if (GET_MODE (operands[1]) == DFmode)
1126        operands[1] = gen_rtx_MEM (DFmode, operands[1]);
1127      else
1128        operands[1] = gen_rtx_MEM (DImode, operands[1]);
1129      optype1 = OFFSOP;
1130    }
1131
1132  /* If an operand is an unoffsettable memory ref, find a register
1133     we can increment temporarily to make it refer to the second word.  */
1134
1135  if (optype0 == MEMOP)
1136    addreg0 = find_addr_reg (XEXP (operands[0], 0));
1137
1138  if (optype1 == MEMOP)
1139    addreg1 = find_addr_reg (XEXP (operands[1], 0));
1140
1141  /* Ok, we can do one word at a time.
1142     Normally we do the low-numbered word first,
1143     but if either operand is autodecrementing then we
1144     do the high-numbered word first.
1145
1146     In either case, set up in LATEHALF the operands to use
1147     for the high-numbered word and in some cases alter the
1148     operands in OPERANDS to be suitable for the low-numbered word.  */
1149
1150  if (size == 12)
1151    {
1152      if (optype0 == REGOP)
1153	{
1154	  middlehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
1155	  latehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 2);
1156	}
1157      else if (optype0 == OFFSOP)
1158	{
1159	  middlehalf[0] = adj_offsettable_operand (operands[0], 4);
1160	  latehalf[0] = adj_offsettable_operand (operands[0], 8);
1161	}
1162      else
1163	{
1164         middlehalf[0] = operands[0];
1165         latehalf[0] = operands[0];
1166	}
1167
1168      if (optype1 == REGOP)
1169	{
1170          middlehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 1);
1171          latehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 2);
1172	}
1173      else if (optype1 == OFFSOP)
1174	{
1175          middlehalf[1] = adj_offsettable_operand (operands[1], 4);
1176          latehalf[1] = adj_offsettable_operand (operands[1], 8);
1177	}
1178      else if (optype1 == CNSTOP)
1179	{
1180	  if (GET_CODE (operands[1]) == CONST_DOUBLE)
1181	    {
1182	      REAL_VALUE_TYPE r; long l[3];
1183
1184	      REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1185	      REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, l);
1186	      operands[1] = GEN_INT (l[0]);
1187	      middlehalf[1] = GEN_INT (l[1]);
1188	      latehalf[1] = GEN_INT (l[2]);
1189	    }
1190	  else if (CONSTANT_P (operands[1]))
1191	    /* No non-CONST_DOUBLE constant should ever appear here.  */
1192	    abort ();
1193        }
1194      else
1195	{
1196	  middlehalf[1] = operands[1];
1197	  latehalf[1] = operands[1];
1198	}
1199    }
1200
1201  else
1202    {
1203      /* Size is not 12. */
1204
1205      if (optype0 == REGOP)
1206	latehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
1207      else if (optype0 == OFFSOP)
1208	latehalf[0] = adj_offsettable_operand (operands[0], 4);
1209      else
1210	latehalf[0] = operands[0];
1211
1212      if (optype1 == REGOP)
1213	latehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 1);
1214      else if (optype1 == OFFSOP)
1215	latehalf[1] = adj_offsettable_operand (operands[1], 4);
1216      else if (optype1 == CNSTOP)
1217	split_double (operands[1], &operands[1], &latehalf[1]);
1218      else
1219	latehalf[1] = operands[1];
1220    }
1221
1222  /* If insn is effectively movd N (sp),-(sp) then we will do the
1223     high word first.  We should use the adjusted operand 1
1224     (which is N+4 (sp) or N+8 (sp))
1225     for the low word and middle word as well,
1226     to compensate for the first decrement of sp.  */
1227  if (optype0 == PUSHOP
1228      && REGNO (XEXP (XEXP (operands[0], 0), 0)) == STACK_POINTER_REGNUM
1229      && reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
1230    middlehalf[1] = operands[1] = latehalf[1];
1231
1232  /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1233     if the upper part of reg N does not appear in the MEM, arrange to
1234     emit the move late-half first.  Otherwise, compute the MEM address
1235     into the upper part of N and use that as a pointer to the memory
1236     operand.  */
1237  if (optype0 == REGOP
1238      && (optype1 == OFFSOP || optype1 == MEMOP))
1239    {
1240      if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1241	  && reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1242	{
1243	  /* If both halves of dest are used in the src memory address,
1244	     compute the address into latehalf of dest.  */
1245	compadr:
1246	  xops[0] = latehalf[0];
1247	  xops[1] = XEXP (operands[1], 0);
1248	  output_asm_insn (AS2 (lea%L0,%a1,%0), xops);
1249	  if (GET_MODE (operands[1]) == XFmode)
1250	    {
1251	      operands[1] = gen_rtx_MEM (XFmode, latehalf[0]);
1252	      middlehalf[1] = adj_offsettable_operand (operands[1], size-8);
1253	      latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1254	    }
1255	  else
1256	    {
1257	      operands[1] = gen_rtx_MEM (DImode, latehalf[0]);
1258	      latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1259	    }
1260	}
1261
1262      else if (size == 12
1263		 && reg_mentioned_p (middlehalf[0], XEXP (operands[1], 0)))
1264	{
1265	  /* Check for two regs used by both source and dest. */
1266	  if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1267		|| reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1268	    goto compadr;
1269
1270	  /* JRV says this can't happen: */
1271	  if (addreg0 || addreg1)
1272	      abort ();
1273
1274	  /* Only the middle reg conflicts; simply put it last. */
1275	  output_asm_insn (singlemove_string (operands), operands);
1276	  output_asm_insn (singlemove_string (latehalf), latehalf);
1277	  output_asm_insn (singlemove_string (middlehalf), middlehalf);
1278	  return "";
1279	}
1280
1281      else if (reg_mentioned_p (operands[0], XEXP (operands[1], 0)))
1282	/* If the low half of dest is mentioned in the source memory
1283	   address, the arrange to emit the move late half first.  */
1284	dest_overlapped_low = 1;
1285    }
1286
1287  /* If one or both operands autodecrementing,
1288     do the two words, high-numbered first.  */
1289
1290  /* Likewise,  the first move would clobber the source of the second one,
1291     do them in the other order.  This happens only for registers;
1292     such overlap can't happen in memory unless the user explicitly
1293     sets it up, and that is an undefined circumstance.  */
1294
1295#if 0
1296  if (optype0 == PUSHOP || optype1 == PUSHOP
1297      || (optype0 == REGOP && optype1 == REGOP
1298	  && REGNO (operands[0]) == REGNO (latehalf[1]))
1299      || dest_overlapped_low)
1300#endif
1301
1302  if (optype0 == PUSHOP || optype1 == PUSHOP
1303      || (optype0 == REGOP && optype1 == REGOP
1304	  && ((middlehalf[1] && REGNO (operands[0]) == REGNO (middlehalf[1]))
1305	      || REGNO (operands[0]) == REGNO (latehalf[1])))
1306      || dest_overlapped_low)
1307    {
1308      /* Make any unoffsettable addresses point at high-numbered word.  */
1309      if (addreg0)
1310	asm_add (size-4, addreg0);
1311      if (addreg1)
1312	asm_add (size-4, addreg1);
1313
1314      /* Do that word.  */
1315      output_asm_insn (singlemove_string (latehalf), latehalf);
1316
1317      /* Undo the adds we just did.  */
1318      if (addreg0)
1319	asm_add (-4, addreg0);
1320      if (addreg1)
1321	asm_add (-4, addreg1);
1322
1323      if (size == 12)
1324        {
1325	  output_asm_insn (singlemove_string (middlehalf), middlehalf);
1326	  if (addreg0)
1327	    asm_add (-4, addreg0);
1328	  if (addreg1)
1329	    asm_add (-4, addreg1);
1330	}
1331
1332      /* Do low-numbered word.  */
1333      return singlemove_string (operands);
1334    }
1335
1336  /* Normal case: do the two words, low-numbered first.  */
1337
1338  output_asm_insn (singlemove_string (operands), operands);
1339
1340  /* Do the middle one of the three words for long double */
1341  if (size == 12)
1342    {
1343      if (addreg0)
1344        asm_add (4, addreg0);
1345      if (addreg1)
1346        asm_add (4, addreg1);
1347
1348      output_asm_insn (singlemove_string (middlehalf), middlehalf);
1349    }
1350
1351  /* Make any unoffsettable addresses point at high-numbered word.  */
1352  if (addreg0)
1353    asm_add (4, addreg0);
1354  if (addreg1)
1355    asm_add (4, addreg1);
1356
1357  /* Do that word.  */
1358  output_asm_insn (singlemove_string (latehalf), latehalf);
1359
1360  /* Undo the adds we just did.  */
1361  if (addreg0)
1362    asm_add (4-size, addreg0);
1363  if (addreg1)
1364    asm_add (4-size, addreg1);
1365
1366  return "";
1367}
1368
1369#define MAX_TMPS 2		/* max temporary registers used */
1370
1371/* Output the appropriate code to move push memory on the stack */
1372
1373char *
1374output_move_pushmem (operands, insn, length, tmp_start, n_operands)
1375     rtx operands[];
1376     rtx insn;
1377     int length;
1378     int tmp_start;
1379     int n_operands;
1380{
1381  struct
1382    {
1383      char *load;
1384      char *push;
1385      rtx   xops[2];
1386    } tmp_info[MAX_TMPS];
1387
1388  rtx src = operands[1];
1389  int max_tmps = 0;
1390  int offset = 0;
1391  int stack_p = reg_overlap_mentioned_p (stack_pointer_rtx, src);
1392  int stack_offset = 0;
1393  int i, num_tmps;
1394  rtx xops[1];
1395
1396  if (! offsettable_memref_p (src))
1397    fatal_insn ("Source is not offsettable", insn);
1398
1399  if ((length & 3) != 0)
1400    fatal_insn ("Pushing non-word aligned size", insn);
1401
1402  /* Figure out which temporary registers we have available */
1403  for (i = tmp_start; i < n_operands; i++)
1404    {
1405      if (GET_CODE (operands[i]) == REG)
1406	{
1407	  if (reg_overlap_mentioned_p (operands[i], src))
1408	    continue;
1409
1410	  tmp_info[ max_tmps++ ].xops[1] = operands[i];
1411	  if (max_tmps == MAX_TMPS)
1412	    break;
1413	}
1414    }
1415
1416  if (max_tmps == 0)
1417    for (offset = length - 4; offset >= 0; offset -= 4)
1418      {
1419	xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1420	output_asm_insn (AS1(push%L0,%0), xops);
1421	if (stack_p)
1422	  stack_offset += 4;
1423      }
1424
1425  else
1426    for (offset = length - 4; offset >= 0; )
1427      {
1428	for (num_tmps = 0; num_tmps < max_tmps && offset >= 0; num_tmps++)
1429	  {
1430	    tmp_info[num_tmps].load    = AS2(mov%L0,%0,%1);
1431	    tmp_info[num_tmps].push    = AS1(push%L0,%1);
1432	    tmp_info[num_tmps].xops[0]
1433	      = adj_offsettable_operand (src, offset + stack_offset);
1434	    offset -= 4;
1435	  }
1436
1437	for (i = 0; i < num_tmps; i++)
1438	  output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1439
1440	for (i = 0; i < num_tmps; i++)
1441	  output_asm_insn (tmp_info[i].push, tmp_info[i].xops);
1442
1443	if (stack_p)
1444	  stack_offset += 4*num_tmps;
1445      }
1446
1447  return "";
1448}
1449
1450/* Output the appropriate code to move data between two memory locations */
1451
1452char *
1453output_move_memory (operands, insn, length, tmp_start, n_operands)
1454     rtx operands[];
1455     rtx insn;
1456     int length;
1457     int tmp_start;
1458     int n_operands;
1459{
1460  struct
1461    {
1462      char *load;
1463      char *store;
1464      rtx   xops[3];
1465    } tmp_info[MAX_TMPS];
1466
1467  rtx dest = operands[0];
1468  rtx src  = operands[1];
1469  rtx qi_tmp = NULL_RTX;
1470  int max_tmps = 0;
1471  int offset = 0;
1472  int i, num_tmps;
1473  rtx xops[3];
1474
1475  if (GET_CODE (dest) == MEM
1476      && GET_CODE (XEXP (dest, 0)) == PRE_INC
1477      && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx)
1478    return output_move_pushmem (operands, insn, length, tmp_start, n_operands);
1479
1480  if (! offsettable_memref_p (src))
1481    fatal_insn ("Source is not offsettable", insn);
1482
1483  if (! offsettable_memref_p (dest))
1484    fatal_insn ("Destination is not offsettable", insn);
1485
1486  /* Figure out which temporary registers we have available */
1487  for (i = tmp_start; i < n_operands; i++)
1488    {
1489      if (GET_CODE (operands[i]) == REG)
1490	{
1491	  if ((length & 1) != 0 && qi_tmp == 0 && QI_REG_P (operands[i]))
1492	    qi_tmp = operands[i];
1493
1494	  if (reg_overlap_mentioned_p (operands[i], dest))
1495	    fatal_insn ("Temporary register overlaps the destination", insn);
1496
1497	  if (reg_overlap_mentioned_p (operands[i], src))
1498	    fatal_insn ("Temporary register overlaps the source", insn);
1499
1500	  tmp_info[max_tmps++].xops[2] = operands[i];
1501	  if (max_tmps == MAX_TMPS)
1502	    break;
1503	}
1504    }
1505
1506  if (max_tmps == 0)
1507    fatal_insn ("No scratch registers were found to do memory->memory moves",
1508		insn);
1509
1510  if ((length & 1) != 0)
1511    {
1512      if (qi_tmp == 0)
1513	fatal_insn ("No byte register found when moving odd # of bytes.",
1514		    insn);
1515    }
1516
1517  while (length > 1)
1518    {
1519      for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)
1520	{
1521	  if (length >= 4)
1522	    {
1523	      tmp_info[num_tmps].load    = AS2(mov%L0,%1,%2);
1524	      tmp_info[num_tmps].store   = AS2(mov%L0,%2,%0);
1525	      tmp_info[num_tmps].xops[0]
1526		= adj_offsettable_operand (dest, offset);
1527	      tmp_info[num_tmps].xops[1]
1528		= adj_offsettable_operand (src, offset);
1529
1530	      offset += 4;
1531	      length -= 4;
1532	    }
1533
1534	  else if (length >= 2)
1535	    {
1536	      tmp_info[num_tmps].load    = AS2(mov%W0,%1,%2);
1537	      tmp_info[num_tmps].store   = AS2(mov%W0,%2,%0);
1538	      tmp_info[num_tmps].xops[0]
1539		= adj_offsettable_operand (dest, offset);
1540	      tmp_info[num_tmps].xops[1]
1541		= adj_offsettable_operand (src, offset);
1542
1543	      offset += 2;
1544	      length -= 2;
1545	    }
1546	  else
1547	    break;
1548	}
1549
1550      for (i = 0; i < num_tmps; i++)
1551	output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1552
1553      for (i = 0; i < num_tmps; i++)
1554	output_asm_insn (tmp_info[i].store, tmp_info[i].xops);
1555    }
1556
1557  if (length == 1)
1558    {
1559      xops[0] = adj_offsettable_operand (dest, offset);
1560      xops[1] = adj_offsettable_operand (src, offset);
1561      xops[2] = qi_tmp;
1562      output_asm_insn (AS2(mov%B0,%1,%2), xops);
1563      output_asm_insn (AS2(mov%B0,%2,%0), xops);
1564    }
1565
1566  return "";
1567}
1568
1569int
1570standard_80387_constant_p (x)
1571     rtx x;
1572{
1573#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1574  REAL_VALUE_TYPE d;
1575  jmp_buf handler;
1576  int is0, is1;
1577
1578  if (setjmp (handler))
1579    return 0;
1580
1581  set_float_handler (handler);
1582  REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1583  is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);
1584  is1 = REAL_VALUES_EQUAL (d, dconst1);
1585  set_float_handler (NULL_PTR);
1586
1587  if (is0)
1588    return 1;
1589
1590  if (is1)
1591    return 2;
1592
1593  /* Note that on the 80387, other constants, such as pi,
1594     are much slower to load as standard constants
1595     than to load from doubles in memory!  */
1596#endif
1597
1598  return 0;
1599}
1600
1601char *
1602output_move_const_single (operands)
1603     rtx *operands;
1604{
1605  if (FP_REG_P (operands[0]))
1606    {
1607      int conval = standard_80387_constant_p (operands[1]);
1608
1609      if (conval == 1)
1610	return "fldz";
1611
1612      if (conval == 2)
1613	return "fld1";
1614    }
1615
1616  if (GET_CODE (operands[1]) == CONST_DOUBLE)
1617    {
1618      REAL_VALUE_TYPE r; long l;
1619
1620      if (GET_MODE (operands[1]) == XFmode)
1621	abort ();
1622
1623      REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1624      REAL_VALUE_TO_TARGET_SINGLE (r, l);
1625      operands[1] = GEN_INT (l);
1626    }
1627
1628  return singlemove_string (operands);
1629}
1630
1631/* Returns 1 if OP is either a symbol reference or a sum of a symbol
1632   reference and a constant.  */
1633
1634int
1635symbolic_operand (op, mode)
1636     register rtx op;
1637     enum machine_mode mode ATTRIBUTE_UNUSED;
1638{
1639  switch (GET_CODE (op))
1640    {
1641    case SYMBOL_REF:
1642    case LABEL_REF:
1643      return 1;
1644
1645    case CONST:
1646      op = XEXP (op, 0);
1647      return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1648	       || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1649	      && GET_CODE (XEXP (op, 1)) == CONST_INT);
1650
1651    default:
1652      return 0;
1653    }
1654}
1655
1656/* Test for a valid operand for a call instruction.
1657   Don't allow the arg pointer register or virtual regs
1658   since they may change into reg + const, which the patterns
1659   can't handle yet.  */
1660
1661int
1662call_insn_operand (op, mode)
1663     rtx op;
1664     enum machine_mode mode ATTRIBUTE_UNUSED;
1665{
1666  if (GET_CODE (op) == MEM
1667      && ((CONSTANT_ADDRESS_P (XEXP (op, 0))
1668	   /* This makes a difference for PIC.  */
1669	   && general_operand (XEXP (op, 0), Pmode))
1670	  || (GET_CODE (XEXP (op, 0)) == REG
1671	      && XEXP (op, 0) != arg_pointer_rtx
1672	      && ! (REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1673		    && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1674    return 1;
1675
1676  return 0;
1677}
1678
1679/* Like call_insn_operand but allow (mem (symbol_ref ...))
1680   even if pic.  */
1681
1682int
1683expander_call_insn_operand (op, mode)
1684     rtx op;
1685     enum machine_mode mode ATTRIBUTE_UNUSED;
1686{
1687  if (GET_CODE (op) == MEM
1688      && (CONSTANT_ADDRESS_P (XEXP (op, 0))
1689	  || (GET_CODE (XEXP (op, 0)) == REG
1690	      && XEXP (op, 0) != arg_pointer_rtx
1691	      && ! (REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1692		    && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1693    return 1;
1694
1695  return 0;
1696}
1697
1698/* Return 1 if OP is a comparison operator that can use the condition code
1699   generated by an arithmetic operation. */
1700
1701int
1702arithmetic_comparison_operator (op, mode)
1703     register rtx op;
1704     enum machine_mode mode;
1705{
1706  enum rtx_code code;
1707
1708  if (mode != VOIDmode && mode != GET_MODE (op))
1709    return 0;
1710
1711  code = GET_CODE (op);
1712  if (GET_RTX_CLASS (code) != '<')
1713    return 0;
1714
1715  return (code != GT && code != LE);
1716}
1717
1718int
1719ix86_logical_operator (op, mode)
1720     register rtx op;
1721     enum machine_mode mode ATTRIBUTE_UNUSED;
1722{
1723  return GET_CODE (op) == AND || GET_CODE (op) == IOR || GET_CODE (op) == XOR;
1724}
1725
1726
1727/* Returns 1 if OP contains a symbol reference */
1728
1729int
1730symbolic_reference_mentioned_p (op)
1731     rtx op;
1732{
1733  register char *fmt;
1734  register int i;
1735
1736  if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1737    return 1;
1738
1739  fmt = GET_RTX_FORMAT (GET_CODE (op));
1740  for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1741    {
1742      if (fmt[i] == 'E')
1743	{
1744	  register int j;
1745
1746	  for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1747	    if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1748	      return 1;
1749	}
1750
1751      else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1752	return 1;
1753    }
1754
1755  return 0;
1756}
1757
1758/* Attempt to expand a binary operator.  Make the expansion closer to the
1759   actual machine, then just general_operand, which will allow 3 separate
1760   memory references (one output, two input) in a single insn.  Return
1761   whether the insn fails, or succeeds.  */
1762
1763int
1764ix86_expand_binary_operator (code, mode, operands)
1765     enum rtx_code code;
1766     enum machine_mode mode;
1767     rtx operands[];
1768{
1769  int modified;
1770
1771  /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1772  if (GET_RTX_CLASS (code) == 'c'
1773      && (rtx_equal_p (operands[0], operands[2])
1774	  || immediate_operand (operands[1], mode)))
1775    {
1776      rtx temp = operands[1];
1777      operands[1] = operands[2];
1778      operands[2] = temp;
1779    }
1780
1781  /* If optimizing, copy to regs to improve CSE */
1782  if (TARGET_PSEUDO && optimize
1783      && ((reload_in_progress | reload_completed) == 0))
1784    {
1785      if (GET_CODE (operands[1]) == MEM
1786	  && ! rtx_equal_p (operands[0], operands[1]))
1787	operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1788
1789      if (GET_CODE (operands[2]) == MEM)
1790	operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1791
1792      if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1793	{
1794	  rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1795
1796	  emit_move_insn (temp, operands[1]);
1797	  operands[1] = temp;
1798	  return TRUE;
1799	}
1800    }
1801
1802  if (!ix86_binary_operator_ok (code, mode, operands))
1803    {
1804      /* If not optimizing, try to make a valid insn (optimize code
1805	 previously did this above to improve chances of CSE) */
1806
1807      if ((! TARGET_PSEUDO || !optimize)
1808	  && ((reload_in_progress | reload_completed) == 0)
1809	  && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))
1810	{
1811	  modified = FALSE;
1812	  if (GET_CODE (operands[1]) == MEM
1813	      && ! rtx_equal_p (operands[0], operands[1]))
1814	    {
1815	      operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1816	      modified = TRUE;
1817	    }
1818
1819	  if (GET_CODE (operands[2]) == MEM)
1820	    {
1821	      operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1822	      modified = TRUE;
1823	    }
1824
1825	  if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1826	    {
1827	      rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1828
1829	      emit_move_insn (temp, operands[1]);
1830	      operands[1] = temp;
1831	      return TRUE;
1832	    }
1833
1834	  if (modified && ! ix86_binary_operator_ok (code, mode, operands))
1835	    return FALSE;
1836	}
1837      else
1838	return FALSE;
1839    }
1840
1841  return TRUE;
1842}
1843
1844/* Return TRUE or FALSE depending on whether the binary operator meets the
1845   appropriate constraints.  */
1846
1847int
1848ix86_binary_operator_ok (code, mode, operands)
1849     enum rtx_code code;
1850     enum machine_mode mode ATTRIBUTE_UNUSED;
1851     rtx operands[3];
1852{
1853  return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
1854    && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');
1855}
1856
1857/* Attempt to expand a unary operator.  Make the expansion closer to the
1858   actual machine, then just general_operand, which will allow 2 separate
1859   memory references (one output, one input) in a single insn.  Return
1860   whether the insn fails, or succeeds.  */
1861
1862int
1863ix86_expand_unary_operator (code, mode, operands)
1864     enum rtx_code code;
1865     enum machine_mode mode;
1866     rtx operands[];
1867{
1868  /* If optimizing, copy to regs to improve CSE */
1869  if (TARGET_PSEUDO
1870      && optimize
1871      && ((reload_in_progress | reload_completed) == 0)
1872      && GET_CODE (operands[1]) == MEM)
1873    operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1874
1875  if (! ix86_unary_operator_ok (code, mode, operands))
1876    {
1877      if ((! TARGET_PSEUDO || optimize == 0)
1878	  && ((reload_in_progress | reload_completed) == 0)
1879	  && GET_CODE (operands[1]) == MEM)
1880	{
1881	  operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1882	  if (! ix86_unary_operator_ok (code, mode, operands))
1883	    return FALSE;
1884	}
1885      else
1886	return FALSE;
1887    }
1888
1889  return TRUE;
1890}
1891
1892/* Return TRUE or FALSE depending on whether the unary operator meets the
1893   appropriate constraints.  */
1894
1895int
1896ix86_unary_operator_ok (code, mode, operands)
1897     enum rtx_code code ATTRIBUTE_UNUSED;
1898     enum machine_mode mode ATTRIBUTE_UNUSED;
1899     rtx operands[2] ATTRIBUTE_UNUSED;
1900{
1901  return TRUE;
1902}
1903
1904static rtx pic_label_rtx;
1905static char pic_label_name [256];
1906static int pic_label_no = 0;
1907
1908/* This function generates code for -fpic that loads %ebx with
1909   the return address of the caller and then returns.  */
1910
1911void
1912asm_output_function_prefix (file, name)
1913     FILE *file;
1914     char *name ATTRIBUTE_UNUSED;
1915{
1916  rtx xops[2];
1917  xops[0] = pic_offset_table_rtx;
1918  xops[1] = stack_pointer_rtx;
1919
1920  /* Deep branch prediction favors having a return for every call. */
1921  if (PIC_REG_USED && TARGET_DEEP_BRANCH_PREDICTION)
1922    {
1923      tree prologue_node;
1924
1925      if (pic_label_rtx == 0)
1926	{
1927	  pic_label_rtx = gen_label_rtx ();
1928	  ASM_GENERATE_INTERNAL_LABEL (pic_label_name, "LPR", pic_label_no++);
1929	  LABEL_NAME (pic_label_rtx) = pic_label_name;
1930	}
1931
1932      prologue_node = make_node (FUNCTION_DECL);
1933      DECL_RESULT (prologue_node) = 0;
1934
1935      /* This used to call ASM_DECLARE_FUNCTION_NAME() but since it's an
1936	 internal (non-global) label that's being emitted, it didn't make
1937	 sense to have .type information for local labels.   This caused
1938	 the SCO OpenServer 5.0.4 ELF assembler grief (why are you giving
1939  	 me debug info for a label that you're declaring non-global?) this
1940	 was changed to call ASM_OUTPUT_LABEL() instead. */
1941
1942
1943      ASM_OUTPUT_LABEL (file, pic_label_name);
1944      output_asm_insn ("movl (%1),%0", xops);
1945      output_asm_insn ("ret", xops);
1946    }
1947}
1948
1949/* Generate the assembly code for function entry.
1950   FILE is an stdio stream to output the code to.
1951   SIZE is an int: how many units of temporary storage to allocate. */
1952
1953void
1954function_prologue (file, size)
1955     FILE *file ATTRIBUTE_UNUSED;
1956     int size ATTRIBUTE_UNUSED;
1957{
1958  if (TARGET_SCHEDULE_PROLOGUE)
1959    {
1960      pic_label_rtx = 0;
1961      return;
1962    }
1963
1964  ix86_prologue (0);
1965}
1966
1967/* Expand the prologue into a bunch of separate insns. */
1968
1969void
1970ix86_expand_prologue ()
1971{
1972  if (! TARGET_SCHEDULE_PROLOGUE)
1973      return;
1974
1975  ix86_prologue (1);
1976}
1977
1978void
1979load_pic_register (do_rtl)
1980     int do_rtl;
1981{
1982  rtx xops[4];
1983
1984  if (TARGET_DEEP_BRANCH_PREDICTION)
1985    {
1986      xops[0] = pic_offset_table_rtx;
1987      if (pic_label_rtx == 0)
1988	{
1989	  pic_label_rtx = gen_label_rtx ();
1990	  ASM_GENERATE_INTERNAL_LABEL (pic_label_name, "LPR", pic_label_no++);
1991	  LABEL_NAME (pic_label_rtx) = pic_label_name;
1992	}
1993
1994      xops[1] = gen_rtx_MEM (QImode,
1995			 gen_rtx (SYMBOL_REF, Pmode,
1996				  LABEL_NAME (pic_label_rtx)));
1997
1998      if (do_rtl)
1999	{
2000	  emit_insn (gen_prologue_get_pc (xops[0], xops[1]));
2001	  emit_insn (gen_prologue_set_got (xops[0],
2002					   gen_rtx (SYMBOL_REF, Pmode,
2003						    "$_GLOBAL_OFFSET_TABLE_"),
2004					   xops[1]));
2005	}
2006      else
2007	{
2008	  output_asm_insn (AS1 (call,%X1), xops);
2009	  output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops);
2010	  pic_label_rtx = 0;
2011	}
2012    }
2013
2014  else
2015    {
2016      xops[0] = pic_offset_table_rtx;
2017      xops[1] = gen_label_rtx ();
2018
2019      if (do_rtl)
2020	{
2021	  /* We can't put a raw CODE_LABEL into the RTL, and we can't emit
2022	     a new CODE_LABEL after reload, so we need a single pattern to
2023	     emit the 3 necessary instructions.  */
2024	  emit_insn (gen_prologue_get_pc_and_set_got (xops[0]));
2025	}
2026      else
2027	{
2028	  output_asm_insn (AS1 (call,%P1), xops);
2029	  ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
2030				     CODE_LABEL_NUMBER (xops[1]));
2031	  output_asm_insn (AS1 (pop%L0,%0), xops);
2032	  output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops);
2033	}
2034    }
2035
2036  /* When -fpic, we must emit a scheduling barrier, so that the instruction
2037     that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2038     moved before any instruction which implicitly uses the got.   */
2039
2040  if (do_rtl)
2041    emit_insn (gen_blockage ());
2042}
2043
2044static void
2045ix86_prologue (do_rtl)
2046     int do_rtl;
2047{
2048  register int regno;
2049  int limit;
2050  rtx xops[4];
2051  int pic_reg_used = PIC_REG_USED;
2052  long tsize = get_frame_size ();
2053  rtx insn;
2054  int cfa_offset = INCOMING_FRAME_SP_OFFSET, cfa_store_offset = cfa_offset;
2055
2056  xops[0] = stack_pointer_rtx;
2057  xops[1] = frame_pointer_rtx;
2058  xops[2] = GEN_INT (tsize);
2059
2060  if (frame_pointer_needed)
2061    {
2062      if (do_rtl)
2063	{
2064	  insn = emit_insn (gen_rtx (SET, VOIDmode,
2065				     gen_rtx_MEM (SImode,
2066					      gen_rtx (PRE_DEC, SImode,
2067						       stack_pointer_rtx)),
2068				     frame_pointer_rtx));
2069
2070	  RTX_FRAME_RELATED_P (insn) = 1;
2071	  insn = emit_move_insn (xops[1], xops[0]);
2072	  RTX_FRAME_RELATED_P (insn) = 1;
2073	}
2074
2075      else
2076	{
2077	  output_asm_insn ("push%L1 %1", xops);
2078#ifdef INCOMING_RETURN_ADDR_RTX
2079 	  if (dwarf2out_do_frame ())
2080 	    {
2081 	      char *l = dwarf2out_cfi_label ();
2082
2083 	      cfa_store_offset += 4;
2084 	      cfa_offset = cfa_store_offset;
2085 	      dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, cfa_offset);
2086 	      dwarf2out_reg_save (l, FRAME_POINTER_REGNUM, - cfa_store_offset);
2087 	    }
2088#endif
2089
2090	  output_asm_insn (AS2 (mov%L0,%0,%1), xops);
2091#ifdef INCOMING_RETURN_ADDR_RTX
2092 	  if (dwarf2out_do_frame ())
2093 	    dwarf2out_def_cfa ("", FRAME_POINTER_REGNUM, cfa_offset);
2094#endif
2095	}
2096    }
2097
2098  if (tsize == 0)
2099    ;
2100  else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
2101    {
2102      if (do_rtl)
2103	{
2104	  insn = emit_insn (gen_prologue_set_stack_ptr (xops[2]));
2105	  RTX_FRAME_RELATED_P (insn) = 1;
2106	}
2107      else
2108	{
2109	  output_asm_insn (AS2 (sub%L0,%2,%0), xops);
2110#ifdef INCOMING_RETURN_ADDR_RTX
2111 	  if (dwarf2out_do_frame ())
2112 	    {
2113 	      cfa_store_offset += tsize;
2114 	      if (! frame_pointer_needed)
2115 		{
2116 		  cfa_offset = cfa_store_offset;
2117 		  dwarf2out_def_cfa ("", STACK_POINTER_REGNUM, cfa_offset);
2118 		}
2119 	    }
2120#endif
2121	}
2122    }
2123  else
2124    {
2125      xops[3] = gen_rtx_REG (SImode, 0);
2126      if (do_rtl)
2127      emit_move_insn (xops[3], xops[2]);
2128      else
2129	output_asm_insn (AS2 (mov%L0,%2,%3), xops);
2130
2131      xops[3] = gen_rtx_MEM (FUNCTION_MODE,
2132			 gen_rtx (SYMBOL_REF, Pmode, "_alloca"));
2133
2134      if (do_rtl)
2135	emit_call_insn (gen_rtx (CALL, VOIDmode, xops[3], const0_rtx));
2136      else
2137	output_asm_insn (AS1 (call,%P3), xops);
2138    }
2139
2140  /* Note If use enter it is NOT reversed args.
2141     This one is not reversed from intel!!
2142     I think enter is slower.  Also sdb doesn't like it.
2143     But if you want it the code is:
2144     {
2145     xops[3] = const0_rtx;
2146     output_asm_insn ("enter %2,%3", xops);
2147     }
2148     */
2149
2150  limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2151  for (regno = limit - 1; regno >= 0; regno--)
2152    if ((regs_ever_live[regno] && ! call_used_regs[regno])
2153	|| (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2154      {
2155	xops[0] = gen_rtx_REG (SImode, regno);
2156	if (do_rtl)
2157	  {
2158	    insn = emit_insn (gen_rtx (SET, VOIDmode,
2159				       gen_rtx_MEM (SImode,
2160						gen_rtx (PRE_DEC, SImode,
2161							 stack_pointer_rtx)),
2162				       xops[0]));
2163
2164	    RTX_FRAME_RELATED_P (insn) = 1;
2165	  }
2166	else
2167	  {
2168	    output_asm_insn ("push%L0 %0", xops);
2169#ifdef INCOMING_RETURN_ADDR_RTX
2170 	    if (dwarf2out_do_frame ())
2171 	      {
2172 		char *l = dwarf2out_cfi_label ();
2173
2174 		cfa_store_offset += 4;
2175 		if (! frame_pointer_needed)
2176 		  {
2177 		    cfa_offset = cfa_store_offset;
2178 		    dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, cfa_offset);
2179 		  }
2180
2181 		dwarf2out_reg_save (l, regno, - cfa_store_offset);
2182 	      }
2183#endif
2184 	  }
2185      }
2186
2187  if (pic_reg_used)
2188    load_pic_register (do_rtl);
2189
2190  /* If we are profiling, make sure no instructions are scheduled before
2191     the call to mcount.  However, if -fpic, the above call will have
2192     done that.  */
2193  if ((profile_flag || profile_block_flag)
2194      && ! pic_reg_used && do_rtl)
2195    emit_insn (gen_blockage ());
2196}
2197
2198/* Return 1 if it is appropriate to emit `ret' instructions in the
2199   body of a function.  Do this only if the epilogue is simple, needing a
2200   couple of insns.  Prior to reloading, we can't tell how many registers
2201   must be saved, so return 0 then.  Return 0 if there is no frame
2202   marker to de-allocate.
2203
2204   If NON_SAVING_SETJMP is defined and true, then it is not possible
2205   for the epilogue to be simple, so return 0.  This is a special case
2206   since NON_SAVING_SETJMP will not cause regs_ever_live to change
2207   until final, but jump_optimize may need to know sooner if a
2208   `return' is OK.  */
2209
2210int
2211ix86_can_use_return_insn_p ()
2212{
2213  int regno;
2214  int nregs = 0;
2215  int reglimit = (frame_pointer_needed
2216		  ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2217
2218#ifdef NON_SAVING_SETJMP
2219  if (NON_SAVING_SETJMP && current_function_calls_setjmp)
2220    return 0;
2221#endif
2222
2223  if (! reload_completed)
2224    return 0;
2225
2226  for (regno = reglimit - 1; regno >= 0; regno--)
2227    if ((regs_ever_live[regno] && ! call_used_regs[regno])
2228	|| (regno == PIC_OFFSET_TABLE_REGNUM && PIC_REG_USED))
2229      nregs++;
2230
2231  return nregs == 0 || ! frame_pointer_needed;
2232}
2233
2234/* This function generates the assembly code for function exit.
2235   FILE is an stdio stream to output the code to.
2236   SIZE is an int: how many units of temporary storage to deallocate. */
2237
2238void
2239function_epilogue (file, size)
2240     FILE *file ATTRIBUTE_UNUSED;
2241     int size ATTRIBUTE_UNUSED;
2242{
2243    return;
2244}
2245
2246/* Restore function stack, frame, and registers. */
2247
2248void
2249ix86_expand_epilogue ()
2250{
2251  ix86_epilogue (1);
2252}
2253
2254static void
2255ix86_epilogue (do_rtl)
2256     int do_rtl;
2257{
2258  register int regno;
2259  register int nregs, limit;
2260  int offset;
2261  rtx xops[3];
2262  int pic_reg_used = PIC_REG_USED;
2263  long tsize = get_frame_size ();
2264
2265  /* Compute the number of registers to pop */
2266
2267  limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2268
2269  nregs = 0;
2270
2271  for (regno = limit - 1; regno >= 0; regno--)
2272    if ((regs_ever_live[regno] && ! call_used_regs[regno])
2273	|| (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2274      nregs++;
2275
2276  /* sp is often  unreliable so we must go off the frame pointer.
2277
2278     In reality, we may not care if sp is unreliable, because we can restore
2279     the register relative to the frame pointer.  In theory, since each move
2280     is the same speed as a pop, and we don't need the leal, this is faster.
2281     For now restore multiple registers the old way. */
2282
2283  offset = - tsize - (nregs * UNITS_PER_WORD);
2284
2285  xops[2] = stack_pointer_rtx;
2286
2287  /* When -fpic, we must emit a scheduling barrier, so that the instruction
2288     that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2289     moved before any instruction which implicitly uses the got.  This
2290     includes any instruction which uses a SYMBOL_REF or a LABEL_REF.
2291
2292     Alternatively, this could be fixed by making the dependence on the
2293     PIC_OFFSET_TABLE_REGNUM explicit in the RTL.  */
2294
2295  if (flag_pic || profile_flag || profile_block_flag)
2296    emit_insn (gen_blockage ());
2297
2298  if (nregs > 1 || ! frame_pointer_needed)
2299    {
2300      if (frame_pointer_needed)
2301	{
2302	  xops[0] = adj_offsettable_operand (AT_BP (QImode), offset);
2303	  if (do_rtl)
2304	    emit_insn (gen_movsi_lea (xops[2], XEXP (xops[0], 0)));
2305	  else
2306	    output_asm_insn (AS2 (lea%L2,%0,%2), xops);
2307	}
2308
2309      for (regno = 0; regno < limit; regno++)
2310	if ((regs_ever_live[regno] && ! call_used_regs[regno])
2311	    || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2312	  {
2313	    xops[0] = gen_rtx_REG (SImode, regno);
2314
2315	    if (do_rtl)
2316	      emit_insn (gen_pop (xops[0]));
2317	    else
2318	      output_asm_insn ("pop%L0 %0", xops);
2319	  }
2320    }
2321
2322  else
2323    for (regno = 0; regno < limit; regno++)
2324      if ((regs_ever_live[regno] && ! call_used_regs[regno])
2325	  || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2326	{
2327	  xops[0] = gen_rtx_REG (SImode, regno);
2328	  xops[1] = adj_offsettable_operand (AT_BP (Pmode), offset);
2329
2330	  if (do_rtl)
2331	    emit_move_insn (xops[0], xops[1]);
2332	  else
2333	    output_asm_insn (AS2 (mov%L0,%1,%0), xops);
2334
2335	  offset += 4;
2336	}
2337
2338  if (frame_pointer_needed)
2339    {
2340      /* If not an i386, mov & pop is faster than "leave". */
2341
2342      if (TARGET_USE_LEAVE)
2343	{
2344	  if (do_rtl)
2345	    emit_insn (gen_leave());
2346	  else
2347	    output_asm_insn ("leave", xops);
2348	}
2349      else
2350	{
2351	  xops[0] = frame_pointer_rtx;
2352	  xops[1] = stack_pointer_rtx;
2353
2354	  if (do_rtl)
2355	    {
2356	      emit_insn (gen_epilogue_set_stack_ptr());
2357	      emit_insn (gen_pop (xops[0]));
2358	    }
2359	  else
2360	    {
2361	      output_asm_insn (AS2 (mov%L2,%0,%2), xops);
2362	      output_asm_insn ("pop%L0 %0", xops);
2363	    }
2364	}
2365    }
2366
2367  else if (tsize)
2368    {
2369      /* Intel's docs say that for 4 or 8 bytes of stack frame one should
2370	 use `pop' and not `add'.  */
2371      int use_pop = tsize == 4;
2372
2373      /* Use two pops only for the Pentium processors.  */
2374      if (tsize == 8 && !TARGET_386 && !TARGET_486)
2375	{
2376	  rtx retval = current_function_return_rtx;
2377
2378	  xops[1] = gen_rtx_REG (SImode, 1);	/* %edx */
2379
2380	  /* This case is a bit more complex.  Since we cannot pop into
2381	     %ecx twice we need a second register.  But this is only
2382	     available if the return value is not of DImode in which
2383	     case the %edx register is not available.  */
2384	  use_pop = (retval == NULL
2385		     || ! reg_overlap_mentioned_p (xops[1], retval));
2386	}
2387
2388      if (use_pop)
2389	{
2390	  xops[0] = gen_rtx_REG (SImode, 2);	/* %ecx */
2391
2392	  if (do_rtl)
2393	    {
2394	      /* We have to prevent the two pops here from being scheduled.
2395		 GCC otherwise would try in some situation to put other
2396		 instructions in between them which has a bad effect.  */
2397	      emit_insn (gen_blockage ());
2398	      emit_insn (gen_pop (xops[0]));
2399	      if (tsize == 8)
2400		emit_insn (gen_pop (xops[1]));
2401	    }
2402	  else
2403	    {
2404	      output_asm_insn ("pop%L0 %0", xops);
2405	      if (tsize == 8)
2406		output_asm_insn ("pop%L1 %1", xops);
2407	    }
2408	}
2409      else
2410	{
2411	  /* If there is no frame pointer, we must still release the frame. */
2412	  xops[0] = GEN_INT (tsize);
2413
2414	  if (do_rtl)
2415	    emit_insn (gen_rtx (SET, VOIDmode, xops[2],
2416				gen_rtx (PLUS, SImode, xops[2], xops[0])));
2417	  else
2418	    output_asm_insn (AS2 (add%L2,%0,%2), xops);
2419	}
2420    }
2421
2422#ifdef FUNCTION_BLOCK_PROFILER_EXIT
2423  if (profile_block_flag == 2)
2424    {
2425      FUNCTION_BLOCK_PROFILER_EXIT(file);
2426    }
2427#endif
2428
2429  if (current_function_pops_args && current_function_args_size)
2430    {
2431      xops[1] = GEN_INT (current_function_pops_args);
2432
2433      /* i386 can only pop 32K bytes (maybe 64K?  Is it signed?).  If
2434	 asked to pop more, pop return address, do explicit add, and jump
2435	 indirectly to the caller. */
2436
2437      if (current_function_pops_args >= 32768)
2438	{
2439	  /* ??? Which register to use here? */
2440	  xops[0] = gen_rtx_REG (SImode, 2);
2441
2442	  if (do_rtl)
2443	    {
2444	      emit_insn (gen_pop (xops[0]));
2445	      emit_insn (gen_rtx (SET, VOIDmode, xops[2],
2446				  gen_rtx (PLUS, SImode, xops[1], xops[2])));
2447	      emit_jump_insn (xops[0]);
2448	    }
2449	  else
2450	    {
2451	      output_asm_insn ("pop%L0 %0", xops);
2452	      output_asm_insn (AS2 (add%L2,%1,%2), xops);
2453	      output_asm_insn ("jmp %*%0", xops);
2454	    }
2455	}
2456      else
2457	{
2458	  if (do_rtl)
2459	    emit_jump_insn (gen_return_pop_internal (xops[1]));
2460	  else
2461	    output_asm_insn ("ret %1", xops);
2462	}
2463    }
2464  else
2465    {
2466      if (do_rtl)
2467	emit_jump_insn (gen_return_internal ());
2468      else
2469	output_asm_insn ("ret", xops);
2470    }
2471}
2472
2473/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2474   that is a valid memory address for an instruction.
2475   The MODE argument is the machine mode for the MEM expression
2476   that wants to use this address.
2477
2478   On x86, legitimate addresses are:
2479	base				movl (base),reg
2480	displacement			movl disp,reg
2481	base + displacement		movl disp(base),reg
2482	index + base			movl (base,index),reg
2483	(index + base) + displacement	movl disp(base,index),reg
2484	index*scale			movl (,index,scale),reg
2485	index*scale + disp		movl disp(,index,scale),reg
2486	index*scale + base 		movl (base,index,scale),reg
2487	(index*scale + base) + disp	movl disp(base,index,scale),reg
2488
2489	In each case, scale can be 1, 2, 4, 8.  */
2490
2491/* This is exactly the same as print_operand_addr, except that
2492   it recognizes addresses instead of printing them.
2493
2494   It only recognizes address in canonical form.  LEGITIMIZE_ADDRESS should
2495   convert common non-canonical forms to canonical form so that they will
2496   be recognized.  */
2497
2498#define ADDR_INVALID(msg,insn)						\
2499do {									\
2500  if (TARGET_DEBUG_ADDR)						\
2501    {									\
2502      fprintf (stderr, msg);						\
2503      debug_rtx (insn);							\
2504    }									\
2505} while (0)
2506
2507int
2508legitimate_address_p (mode, addr, strict)
2509     enum machine_mode mode;
2510     register rtx addr;
2511     int strict;
2512{
2513  rtx base  = NULL_RTX;
2514  rtx indx  = NULL_RTX;
2515  rtx scale = NULL_RTX;
2516  rtx disp  = NULL_RTX;
2517
2518  if (TARGET_DEBUG_ADDR)
2519    {
2520      fprintf (stderr,
2521	       "\n======\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2522	       GET_MODE_NAME (mode), strict);
2523
2524      debug_rtx (addr);
2525    }
2526
2527  if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
2528      base = addr;
2529
2530  else if (GET_CODE (addr) == PLUS)
2531    {
2532      rtx op0 = XEXP (addr, 0);
2533      rtx op1 = XEXP (addr, 1);
2534      enum rtx_code code0 = GET_CODE (op0);
2535      enum rtx_code code1 = GET_CODE (op1);
2536
2537      if (code0 == REG || code0 == SUBREG)
2538	{
2539	  if (code1 == REG || code1 == SUBREG)
2540	    {
2541	      indx = op0;	/* index + base */
2542	      base = op1;
2543	    }
2544
2545	  else
2546	    {
2547	      base = op0;	/* base + displacement */
2548	      disp = op1;
2549	    }
2550	}
2551
2552      else if (code0 == MULT)
2553	{
2554	  indx  = XEXP (op0, 0);
2555	  scale = XEXP (op0, 1);
2556
2557	  if (code1 == REG || code1 == SUBREG)
2558	    base = op1;		/* index*scale + base */
2559
2560	  else
2561	    disp = op1;		/* index*scale + disp */
2562	}
2563
2564      else if (code0 == PLUS && GET_CODE (XEXP (op0, 0)) == MULT)
2565	{
2566	  indx  = XEXP (XEXP (op0, 0), 0);	/* index*scale + base + disp */
2567	  scale = XEXP (XEXP (op0, 0), 1);
2568	  base  = XEXP (op0, 1);
2569	  disp  = op1;
2570	}
2571
2572      else if (code0 == PLUS)
2573	{
2574	  indx = XEXP (op0, 0);	/* index + base + disp */
2575	  base = XEXP (op0, 1);
2576	  disp = op1;
2577	}
2578
2579      else
2580	{
2581	  ADDR_INVALID ("PLUS subcode is not valid.\n", op0);
2582	  return FALSE;
2583	}
2584    }
2585
2586  else if (GET_CODE (addr) == MULT)
2587    {
2588      indx  = XEXP (addr, 0);	/* index*scale */
2589      scale = XEXP (addr, 1);
2590    }
2591
2592  else
2593    disp = addr;		/* displacement */
2594
2595  /* Allow arg pointer and stack pointer as index if there is not scaling */
2596  if (base && indx && !scale
2597      && (indx == arg_pointer_rtx || indx == stack_pointer_rtx))
2598    {
2599      rtx tmp = base;
2600      base = indx;
2601      indx = tmp;
2602    }
2603
2604  /* Validate base register:
2605
2606     Don't allow SUBREG's here, it can lead to spill failures when the base
2607     is one word out of a two word structure, which is represented internally
2608     as a DImode int.  */
2609
2610  if (base)
2611    {
2612      if (GET_CODE (base) != REG)
2613	{
2614	  ADDR_INVALID ("Base is not a register.\n", base);
2615	  return FALSE;
2616	}
2617
2618      if ((strict && ! REG_OK_FOR_BASE_STRICT_P (base))
2619	  || (! strict && ! REG_OK_FOR_BASE_NONSTRICT_P (base)))
2620	{
2621	  ADDR_INVALID ("Base is not valid.\n", base);
2622	  return FALSE;
2623	}
2624    }
2625
2626  /* Validate index register:
2627
2628     Don't allow SUBREG's here, it can lead to spill failures when the index
2629     is one word out of a two word structure, which is represented internally
2630     as a DImode int.  */
2631  if (indx)
2632    {
2633      if (GET_CODE (indx) != REG)
2634	{
2635	  ADDR_INVALID ("Index is not a register.\n", indx);
2636	  return FALSE;
2637	}
2638
2639      if ((strict && ! REG_OK_FOR_INDEX_STRICT_P (indx))
2640	  || (! strict && ! REG_OK_FOR_INDEX_NONSTRICT_P (indx)))
2641	{
2642	  ADDR_INVALID ("Index is not valid.\n", indx);
2643	  return FALSE;
2644	}
2645    }
2646  else if (scale)
2647    abort ();			/* scale w/o index invalid */
2648
2649  /* Validate scale factor: */
2650  if (scale)
2651    {
2652      HOST_WIDE_INT value;
2653
2654      if (GET_CODE (scale) != CONST_INT)
2655	{
2656	  ADDR_INVALID ("Scale is not valid.\n", scale);
2657	  return FALSE;
2658	}
2659
2660      value = INTVAL (scale);
2661      if (value != 1 && value != 2 && value != 4 && value != 8)
2662	{
2663	  ADDR_INVALID ("Scale is not a good multiplier.\n", scale);
2664	  return FALSE;
2665	}
2666    }
2667
2668  /* Validate displacement
2669     Constant pool addresses must be handled special.  They are
2670     considered legitimate addresses, but only if not used with regs.
2671     When printed, the output routines know to print the reference with the
2672     PIC reg, even though the PIC reg doesn't appear in the RTL. */
2673  if (disp)
2674    {
2675      if (GET_CODE (disp) == SYMBOL_REF
2676	  && CONSTANT_POOL_ADDRESS_P (disp)
2677	  && base == 0
2678	  && indx == 0)
2679	;
2680
2681      else if (!CONSTANT_ADDRESS_P (disp))
2682	{
2683	  ADDR_INVALID ("Displacement is not valid.\n", disp);
2684	  return FALSE;
2685	}
2686
2687      else if (GET_CODE (disp) == CONST_DOUBLE)
2688	{
2689	  ADDR_INVALID ("Displacement is a const_double.\n", disp);
2690	  return FALSE;
2691	}
2692
2693      else if (flag_pic && SYMBOLIC_CONST (disp)
2694	       && base != pic_offset_table_rtx
2695	       && (indx != pic_offset_table_rtx || scale != NULL_RTX))
2696	{
2697	  ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp);
2698	  return FALSE;
2699	}
2700
2701      else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp)
2702	       && (base != NULL_RTX || indx != NULL_RTX))
2703	{
2704	  ADDR_INVALID ("Displacement is an invalid half-pic reference.\n",
2705			disp);
2706	  return FALSE;
2707	}
2708    }
2709
2710  if (TARGET_DEBUG_ADDR)
2711    fprintf (stderr, "Address is valid.\n");
2712
2713  /* Everything looks valid, return true */
2714  return TRUE;
2715}
2716
2717/* Return a legitimate reference for ORIG (an address) using the
2718   register REG.  If REG is 0, a new pseudo is generated.
2719
2720   There are three types of references that must be handled:
2721
2722   1. Global data references must load the address from the GOT, via
2723      the PIC reg.  An insn is emitted to do this load, and the reg is
2724      returned.
2725
2726   2. Static data references must compute the address as an offset
2727      from the GOT, whose base is in the PIC reg.  An insn is emitted to
2728      compute the address into a reg, and the reg is returned.  Static
2729      data objects have SYMBOL_REF_FLAG set to differentiate them from
2730      global data objects.
2731
2732   3. Constant pool addresses must be handled special.  They are
2733      considered legitimate addresses, but only if not used with regs.
2734      When printed, the output routines know to print the reference with the
2735      PIC reg, even though the PIC reg doesn't appear in the RTL.
2736
2737   GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2738   reg also appears in the address (except for constant pool references,
2739   noted above).
2740
2741   "switch" statements also require special handling when generating
2742   PIC code.  See comments by the `casesi' insn in i386.md for details.  */
2743
2744rtx
2745legitimize_pic_address (orig, reg)
2746     rtx orig;
2747     rtx reg;
2748{
2749  rtx addr = orig;
2750  rtx new = orig;
2751
2752  if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
2753    {
2754      if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
2755	reg = new = orig;
2756      else
2757	{
2758	  if (reg == 0)
2759	    reg = gen_reg_rtx (Pmode);
2760
2761	  if ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FLAG (addr))
2762	      || GET_CODE (addr) == LABEL_REF)
2763	    new = gen_rtx (PLUS, Pmode, pic_offset_table_rtx, orig);
2764	  else
2765	    new = gen_rtx_MEM (Pmode,
2766			   gen_rtx (PLUS, Pmode, pic_offset_table_rtx, orig));
2767
2768	  emit_move_insn (reg, new);
2769	}
2770      current_function_uses_pic_offset_table = 1;
2771      return reg;
2772    }
2773
2774  else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
2775    {
2776      rtx base;
2777
2778      if (GET_CODE (addr) == CONST)
2779	{
2780	  addr = XEXP (addr, 0);
2781	  if (GET_CODE (addr) != PLUS)
2782	    abort ();
2783	}
2784
2785      if (XEXP (addr, 0) == pic_offset_table_rtx)
2786	return orig;
2787
2788      if (reg == 0)
2789	reg = gen_reg_rtx (Pmode);
2790
2791      base = legitimize_pic_address (XEXP (addr, 0), reg);
2792      addr = legitimize_pic_address (XEXP (addr, 1),
2793				     base == reg ? NULL_RTX : reg);
2794
2795      if (GET_CODE (addr) == CONST_INT)
2796	return plus_constant (base, INTVAL (addr));
2797
2798      if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
2799	{
2800	  base = gen_rtx (PLUS, Pmode, base, XEXP (addr, 0));
2801	  addr = XEXP (addr, 1);
2802	}
2803
2804      return gen_rtx (PLUS, Pmode, base, addr);
2805    }
2806  return new;
2807}
2808
2809/* Emit insns to move operands[1] into operands[0].  */
2810
2811void
2812emit_pic_move (operands, mode)
2813     rtx *operands;
2814     enum machine_mode mode ATTRIBUTE_UNUSED;
2815{
2816  rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2817
2818  if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2819    operands[1] = force_reg (SImode, operands[1]);
2820  else
2821    operands[1] = legitimize_pic_address (operands[1], temp);
2822}
2823
2824/* Try machine-dependent ways of modifying an illegitimate address
2825   to be legitimate.  If we find one, return the new, valid address.
2826   This macro is used in only one place: `memory_address' in explow.c.
2827
2828   OLDX is the address as it was before break_out_memory_refs was called.
2829   In some cases it is useful to look at this to decide what needs to be done.
2830
2831   MODE and WIN are passed so that this macro can use
2832   GO_IF_LEGITIMATE_ADDRESS.
2833
2834   It is always safe for this macro to do nothing.  It exists to recognize
2835   opportunities to optimize the output.
2836
2837   For the 80386, we handle X+REG by loading X into a register R and
2838   using R+REG.  R will go in a general reg and indexing will be used.
2839   However, if REG is a broken-out memory address or multiplication,
2840   nothing needs to be done because REG can certainly go in a general reg.
2841
2842   When -fpic is used, special handling is needed for symbolic references.
2843   See comments by legitimize_pic_address in i386.c for details.  */
2844
2845rtx
2846legitimize_address (x, oldx, mode)
2847     register rtx x;
2848     register rtx oldx ATTRIBUTE_UNUSED;
2849     enum machine_mode mode;
2850{
2851  int changed = 0;
2852  unsigned log;
2853
2854  if (TARGET_DEBUG_ADDR)
2855    {
2856      fprintf (stderr, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n",
2857	       GET_MODE_NAME (mode));
2858      debug_rtx (x);
2859    }
2860
2861  if (flag_pic && SYMBOLIC_CONST (x))
2862    return legitimize_pic_address (x, 0);
2863
2864  /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2865  if (GET_CODE (x) == ASHIFT
2866      && GET_CODE (XEXP (x, 1)) == CONST_INT
2867      && (log = (unsigned)exact_log2 (INTVAL (XEXP (x, 1)))) < 4)
2868    {
2869      changed = 1;
2870      x = gen_rtx (MULT, Pmode, force_reg (Pmode, XEXP (x, 0)),
2871		   GEN_INT (1 << log));
2872    }
2873
2874  if (GET_CODE (x) == PLUS)
2875    {
2876      /* Canonicalize shifts by 0, 1, 2, 3 into multiply. */
2877
2878      if (GET_CODE (XEXP (x, 0)) == ASHIFT
2879	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2880	  && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) < 4)
2881	{
2882	  changed = 1;
2883	  XEXP (x, 0) = gen_rtx (MULT, Pmode,
2884				 force_reg (Pmode, XEXP (XEXP (x, 0), 0)),
2885				 GEN_INT (1 << log));
2886	}
2887
2888      if (GET_CODE (XEXP (x, 1)) == ASHIFT
2889	  && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2890	  && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1)))) < 4)
2891	{
2892	  changed = 1;
2893	  XEXP (x, 1) = gen_rtx (MULT, Pmode,
2894				 force_reg (Pmode, XEXP (XEXP (x, 1), 0)),
2895				 GEN_INT (1 << log));
2896	}
2897
2898      /* Put multiply first if it isn't already. */
2899      if (GET_CODE (XEXP (x, 1)) == MULT)
2900	{
2901	  rtx tmp = XEXP (x, 0);
2902	  XEXP (x, 0) = XEXP (x, 1);
2903	  XEXP (x, 1) = tmp;
2904	  changed = 1;
2905	}
2906
2907      /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2908	 into (plus (plus (mult (reg) (const)) (reg)) (const)).  This can be
2909	 created by virtual register instantiation, register elimination, and
2910	 similar optimizations.  */
2911      if (GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == PLUS)
2912	{
2913	  changed = 1;
2914	  x = gen_rtx (PLUS, Pmode,
2915		       gen_rtx (PLUS, Pmode, XEXP (x, 0),
2916				XEXP (XEXP (x, 1), 0)),
2917		       XEXP (XEXP (x, 1), 1));
2918	}
2919
2920      /* Canonicalize
2921	 (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2922	 into (plus (plus (mult (reg) (const)) (reg)) (const)).  */
2923      else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS
2924	       && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2925	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == PLUS
2926	       && CONSTANT_P (XEXP (x, 1)))
2927	{
2928	  rtx constant;
2929	  rtx other = NULL_RTX;
2930
2931	  if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2932	    {
2933	      constant = XEXP (x, 1);
2934	      other = XEXP (XEXP (XEXP (x, 0), 1), 1);
2935	    }
2936	  else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
2937	    {
2938	      constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
2939	      other = XEXP (x, 1);
2940	    }
2941	  else
2942	    constant = 0;
2943
2944	  if (constant)
2945	    {
2946	      changed = 1;
2947	      x = gen_rtx (PLUS, Pmode,
2948			   gen_rtx (PLUS, Pmode, XEXP (XEXP (x, 0), 0),
2949				    XEXP (XEXP (XEXP (x, 0), 1), 0)),
2950			   plus_constant (other, INTVAL (constant)));
2951	    }
2952	}
2953
2954      if (changed && legitimate_address_p (mode, x, FALSE))
2955	return x;
2956
2957      if (GET_CODE (XEXP (x, 0)) == MULT)
2958	{
2959	  changed = 1;
2960	  XEXP (x, 0) = force_operand (XEXP (x, 0), 0);
2961	}
2962
2963      if (GET_CODE (XEXP (x, 1)) == MULT)
2964	{
2965	  changed = 1;
2966	  XEXP (x, 1) = force_operand (XEXP (x, 1), 0);
2967	}
2968
2969      if (changed
2970	  && GET_CODE (XEXP (x, 1)) == REG
2971	  && GET_CODE (XEXP (x, 0)) == REG)
2972	return x;
2973
2974      if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
2975	{
2976	  changed = 1;
2977	  x = legitimize_pic_address (x, 0);
2978	}
2979
2980      if (changed && legitimate_address_p (mode, x, FALSE))
2981	return x;
2982
2983      if (GET_CODE (XEXP (x, 0)) == REG)
2984	{
2985	  register rtx temp = gen_reg_rtx (Pmode);
2986	  register rtx val  = force_operand (XEXP (x, 1), temp);
2987	  if (val != temp)
2988	    emit_move_insn (temp, val);
2989
2990	  XEXP (x, 1) = temp;
2991	  return x;
2992	}
2993
2994      else if (GET_CODE (XEXP (x, 1)) == REG)
2995	{
2996	  register rtx temp = gen_reg_rtx (Pmode);
2997	  register rtx val  = force_operand (XEXP (x, 0), temp);
2998	  if (val != temp)
2999	    emit_move_insn (temp, val);
3000
3001	  XEXP (x, 0) = temp;
3002	  return x;
3003	}
3004    }
3005
3006  return x;
3007}
3008
3009/* Print an integer constant expression in assembler syntax.  Addition
3010   and subtraction are the only arithmetic that may appear in these
3011   expressions.  FILE is the stdio stream to write to, X is the rtx, and
3012   CODE is the operand print code from the output string.  */
3013
3014static void
3015output_pic_addr_const (file, x, code)
3016     FILE *file;
3017     rtx x;
3018     int code;
3019{
3020  char buf[256];
3021
3022  switch (GET_CODE (x))
3023    {
3024    case PC:
3025      if (flag_pic)
3026	putc ('.', file);
3027      else
3028	abort ();
3029      break;
3030
3031    case SYMBOL_REF:
3032    case LABEL_REF:
3033      if (GET_CODE (x) == SYMBOL_REF)
3034	assemble_name (file, XSTR (x, 0));
3035      else
3036	{
3037	  ASM_GENERATE_INTERNAL_LABEL (buf, "L",
3038				       CODE_LABEL_NUMBER (XEXP (x, 0)));
3039	  assemble_name (asm_out_file, buf);
3040	}
3041
3042      if (code == 'X')
3043	; /* No suffix, dammit. */
3044      else if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3045	fprintf (file, "@GOTOFF(%%ebx)");
3046      else if (code == 'P')
3047	fprintf (file, "@PLT");
3048      else if (GET_CODE (x) == LABEL_REF)
3049	fprintf (file, "@GOTOFF");
3050      else if (! SYMBOL_REF_FLAG (x))
3051	fprintf (file, "@GOT");
3052      else
3053	fprintf (file, "@GOTOFF");
3054
3055      break;
3056
3057    case CODE_LABEL:
3058      ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3059      assemble_name (asm_out_file, buf);
3060      break;
3061
3062    case CONST_INT:
3063      fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3064      break;
3065
3066    case CONST:
3067      /* This used to output parentheses around the expression,
3068	 but that does not work on the 386 (either ATT or BSD assembler).  */
3069      output_pic_addr_const (file, XEXP (x, 0), code);
3070      break;
3071
3072    case CONST_DOUBLE:
3073      if (GET_MODE (x) == VOIDmode)
3074	{
3075	  /* We can use %d if the number is <32 bits and positive.  */
3076	  if (CONST_DOUBLE_HIGH (x) || CONST_DOUBLE_LOW (x) < 0)
3077	    fprintf (file, "0x%lx%08lx",
3078		     (unsigned long) CONST_DOUBLE_HIGH (x),
3079		     (unsigned long) CONST_DOUBLE_LOW (x));
3080	  else
3081	    fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3082	}
3083      else
3084	/* We can't handle floating point constants;
3085	   PRINT_OPERAND must handle them.  */
3086	output_operand_lossage ("floating constant misused");
3087      break;
3088
3089    case PLUS:
3090      /* Some assemblers need integer constants to appear first.  */
3091      if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3092	{
3093	  output_pic_addr_const (file, XEXP (x, 0), code);
3094	  if (INTVAL (XEXP (x, 1)) >= 0)
3095	    fprintf (file, "+");
3096	  output_pic_addr_const (file, XEXP (x, 1), code);
3097	}
3098      else
3099	{
3100	  output_pic_addr_const (file, XEXP (x, 1), code);
3101	  if (INTVAL (XEXP (x, 0)) >= 0)
3102	    fprintf (file, "+");
3103	  output_pic_addr_const (file, XEXP (x, 0), code);
3104	}
3105      break;
3106
3107    case MINUS:
3108      output_pic_addr_const (file, XEXP (x, 0), code);
3109      fprintf (file, "-");
3110      output_pic_addr_const (file, XEXP (x, 1), code);
3111      break;
3112
3113    default:
3114      output_operand_lossage ("invalid expression as operand");
3115    }
3116}
3117
3118/* Append the correct conditional move suffix which corresponds to CODE.  */
3119
3120static void
3121put_condition_code (code, reverse_cc, mode, file)
3122     enum rtx_code code;
3123     int  reverse_cc;
3124     enum mode_class mode;
3125     FILE * file;
3126{
3127  int ieee = (TARGET_IEEE_FP && (cc_prev_status.flags & CC_IN_80387)
3128	      && ! (cc_prev_status.flags & CC_FCOMI));
3129  if (reverse_cc && ! ieee)
3130    code = reverse_condition (code);
3131
3132  if (mode == MODE_INT)
3133    switch (code)
3134      {
3135      case NE:
3136	if (cc_prev_status.flags & CC_Z_IN_NOT_C)
3137	  fputs ("b", file);
3138	else
3139	  fputs ("ne", file);
3140	return;
3141
3142      case EQ:
3143	if (cc_prev_status.flags & CC_Z_IN_NOT_C)
3144	  fputs ("ae", file);
3145	else
3146	  fputs ("e", file);
3147	return;
3148
3149      case GE:
3150	if (cc_prev_status.flags & CC_NO_OVERFLOW)
3151	  fputs ("ns", file);
3152	else
3153	  fputs ("ge", file);
3154	return;
3155
3156      case GT:
3157	fputs ("g", file);
3158	return;
3159
3160      case LE:
3161	fputs ("le", file);
3162	return;
3163
3164      case LT:
3165	if (cc_prev_status.flags & CC_NO_OVERFLOW)
3166	  fputs ("s", file);
3167	else
3168	  fputs ("l", file);
3169	return;
3170
3171      case GEU:
3172	fputs ("ae", file);
3173	return;
3174
3175      case GTU:
3176	fputs ("a", file);
3177	return;
3178
3179      case LEU:
3180	fputs ("be", file);
3181	return;
3182
3183      case LTU:
3184	fputs ("b", file);
3185	return;
3186
3187      default:
3188	output_operand_lossage ("Invalid %%C operand");
3189      }
3190
3191  else if (mode == MODE_FLOAT)
3192    switch (code)
3193      {
3194      case NE:
3195	fputs (ieee ? (reverse_cc ? "ne" : "e") : "ne", file);
3196	return;
3197      case EQ:
3198	fputs (ieee ? (reverse_cc ? "ne" : "e") : "e", file);
3199	return;
3200      case GE:
3201	fputs (ieee ? (reverse_cc ? "ne" : "e") : "nb", file);
3202	return;
3203      case GT:
3204	fputs (ieee ? (reverse_cc ? "ne" : "e") : "nbe", file);
3205	return;
3206      case LE:
3207	fputs (ieee ? (reverse_cc ? "nb" : "b") : "be", file);
3208	return;
3209      case LT:
3210	fputs (ieee ? (reverse_cc ? "ne" : "e") : "b", file);
3211	return;
3212      case GEU:
3213	fputs (ieee ? (reverse_cc ? "ne" : "e") : "nb", file);
3214	return;
3215      case GTU:
3216	fputs (ieee ? (reverse_cc ? "ne" : "e") : "nbe", file);
3217	return;
3218      case LEU:
3219	fputs (ieee ? (reverse_cc ? "nb" : "b") : "be", file);
3220	return;
3221      case LTU:
3222	fputs (ieee ? (reverse_cc ? "ne" : "e") : "b", file);
3223	return;
3224      default:
3225	output_operand_lossage ("Invalid %%C operand");
3226    }
3227}
3228
3229/* Meaning of CODE:
3230   L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
3231   C -- print opcode suffix for set/cmov insn.
3232   c -- like C, but print reversed condition
3233   F -- print opcode suffix for fcmov insn.
3234   f -- like C, but print reversed condition
3235   R -- print the prefix for register names.
3236   z -- print the opcode suffix for the size of the current operand.
3237   * -- print a star (in certain assembler syntax)
3238   w -- print the operand as if it's a "word" (HImode) even if it isn't.
3239   c -- don't print special prefixes before constant operands.
3240   J -- print the appropriate jump operand.
3241   s -- print a shift double count, followed by the assemblers argument
3242	delimiter.
3243   b -- print the QImode name of the register for the indicated operand.
3244	%b0 would print %al if operands[0] is reg 0.
3245   w --  likewise, print the HImode name of the register.
3246   k --  likewise, print the SImode name of the register.
3247   h --  print the QImode name for a "high" register, either ah, bh, ch or dh.
3248   y --  print "st(0)" instead of "st" as a register.
3249   P --  print as a PIC constant */
3250
3251void
3252print_operand (file, x, code)
3253     FILE *file;
3254     rtx x;
3255     int code;
3256{
3257  if (code)
3258    {
3259      switch (code)
3260	{
3261	case '*':
3262	  if (USE_STAR)
3263	    putc ('*', file);
3264	  return;
3265
3266	case 'L':
3267	  PUT_OP_SIZE (code, 'l', file);
3268	  return;
3269
3270	case 'W':
3271	  PUT_OP_SIZE (code, 'w', file);
3272	  return;
3273
3274	case 'B':
3275	  PUT_OP_SIZE (code, 'b', file);
3276	  return;
3277
3278	case 'Q':
3279	  PUT_OP_SIZE (code, 'l', file);
3280	  return;
3281
3282	case 'S':
3283	  PUT_OP_SIZE (code, 's', file);
3284	  return;
3285
3286	case 'T':
3287	  PUT_OP_SIZE (code, 't', file);
3288	  return;
3289
3290	case 'z':
3291	  /* 387 opcodes don't get size suffixes if the operands are
3292	     registers. */
3293
3294	  if (STACK_REG_P (x))
3295	    return;
3296
3297	  /* this is the size of op from size of operand */
3298	  switch (GET_MODE_SIZE (GET_MODE (x)))
3299	    {
3300	    case 1:
3301	      PUT_OP_SIZE ('B', 'b', file);
3302	      return;
3303
3304	    case 2:
3305	      PUT_OP_SIZE ('W', 'w', file);
3306	      return;
3307
3308	    case 4:
3309	      if (GET_MODE (x) == SFmode)
3310		{
3311		  PUT_OP_SIZE ('S', 's', file);
3312		  return;
3313		}
3314	      else
3315		PUT_OP_SIZE ('L', 'l', file);
3316	      return;
3317
3318	    case 12:
3319		  PUT_OP_SIZE ('T', 't', file);
3320		  return;
3321
3322	    case 8:
3323	      if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
3324		{
3325#ifdef GAS_MNEMONICS
3326		  PUT_OP_SIZE ('Q', 'q', file);
3327		  return;
3328#else
3329		  PUT_OP_SIZE ('Q', 'l', file);	/* Fall through */
3330#endif
3331		}
3332
3333	      PUT_OP_SIZE ('Q', 'l', file);
3334	      return;
3335	    }
3336
3337	case 'b':
3338	case 'w':
3339	case 'k':
3340	case 'h':
3341	case 'y':
3342	case 'P':
3343	case 'X':
3344	  break;
3345
3346	case 'J':
3347	  switch (GET_CODE (x))
3348	    {
3349	      /* These conditions are appropriate for testing the result
3350		 of an arithmetic operation, not for a compare operation.
3351	         Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
3352		 CC_Z_IN_NOT_C false and not floating point.  */
3353	    case NE:  fputs ("jne", file); return;
3354	    case EQ:  fputs ("je",  file); return;
3355	    case GE:  fputs ("jns", file); return;
3356	    case LT:  fputs ("js",  file); return;
3357	    case GEU: fputs ("jmp", file); return;
3358	    case GTU: fputs ("jne",  file); return;
3359	    case LEU: fputs ("je", file); return;
3360	    case LTU: fputs ("#branch never",  file); return;
3361
3362	    /* no matching branches for GT nor LE */
3363
3364	    default:
3365	      abort ();
3366	    }
3367
3368	case 's':
3369	  if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
3370	    {
3371	      PRINT_OPERAND (file, x, 0);
3372	      fputs (AS2C (,) + 1, file);
3373	    }
3374
3375	  return;
3376
3377	  /* This is used by the conditional move instructions.  */
3378	case 'C':
3379	  put_condition_code (GET_CODE (x), 0, MODE_INT, file);
3380	  return;
3381
3382	  /* Like above, but reverse condition */
3383	case 'c':
3384	  put_condition_code (GET_CODE (x), 1, MODE_INT, file); return;
3385
3386	case 'F':
3387	  put_condition_code (GET_CODE (x), 0, MODE_FLOAT, file);
3388	  return;
3389
3390	  /* Like above, but reverse condition */
3391	case 'f':
3392	  put_condition_code (GET_CODE (x), 1, MODE_FLOAT, file);
3393	  return;
3394
3395	default:
3396	  {
3397	    char str[50];
3398
3399	    sprintf (str, "invalid operand code `%c'", code);
3400	    output_operand_lossage (str);
3401	  }
3402	}
3403    }
3404
3405  if (GET_CODE (x) == REG)
3406    {
3407      PRINT_REG (x, code, file);
3408    }
3409
3410  else if (GET_CODE (x) == MEM)
3411    {
3412      PRINT_PTR (x, file);
3413      if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
3414	{
3415	  if (flag_pic)
3416	    output_pic_addr_const (file, XEXP (x, 0), code);
3417	  else
3418	    output_addr_const (file, XEXP (x, 0));
3419	}
3420      else
3421	output_address (XEXP (x, 0));
3422    }
3423
3424  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
3425    {
3426      REAL_VALUE_TYPE r;
3427      long l;
3428
3429      REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3430      REAL_VALUE_TO_TARGET_SINGLE (r, l);
3431      PRINT_IMMED_PREFIX (file);
3432      fprintf (file, "0x%lx", l);
3433    }
3434
3435 /* These float cases don't actually occur as immediate operands. */
3436 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
3437    {
3438      REAL_VALUE_TYPE r;
3439      char dstr[30];
3440
3441      REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3442      REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3443      fprintf (file, "%s", dstr);
3444    }
3445
3446  else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == XFmode)
3447    {
3448      REAL_VALUE_TYPE r;
3449      char dstr[30];
3450
3451      REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3452      REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3453      fprintf (file, "%s", dstr);
3454    }
3455  else
3456    {
3457      if (code != 'P')
3458	{
3459	  if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
3460	    PRINT_IMMED_PREFIX (file);
3461	  else if (GET_CODE (x) == CONST || GET_CODE (x) == SYMBOL_REF
3462		   || GET_CODE (x) == LABEL_REF)
3463	    PRINT_OFFSET_PREFIX (file);
3464	}
3465      if (flag_pic)
3466	output_pic_addr_const (file, x, code);
3467      else
3468	output_addr_const (file, x);
3469    }
3470}
3471
3472/* Print a memory operand whose address is ADDR.  */
3473
3474void
3475print_operand_address (file, addr)
3476     FILE *file;
3477     register rtx addr;
3478{
3479  register rtx reg1, reg2, breg, ireg;
3480  rtx offset;
3481
3482  switch (GET_CODE (addr))
3483    {
3484    case REG:
3485      ADDR_BEG (file);
3486      fprintf (file, "%se", RP);
3487      fputs (hi_reg_name[REGNO (addr)], file);
3488      ADDR_END (file);
3489      break;
3490
3491    case PLUS:
3492      reg1 = 0;
3493      reg2 = 0;
3494      ireg = 0;
3495      breg = 0;
3496      offset = 0;
3497      if (CONSTANT_ADDRESS_P (XEXP (addr, 0)))
3498	{
3499	  offset = XEXP (addr, 0);
3500	  addr = XEXP (addr, 1);
3501	}
3502      else if (CONSTANT_ADDRESS_P (XEXP (addr, 1)))
3503	{
3504	  offset = XEXP (addr, 1);
3505	  addr = XEXP (addr, 0);
3506	}
3507
3508      if (GET_CODE (addr) != PLUS)
3509	;
3510      else if (GET_CODE (XEXP (addr, 0)) == MULT)
3511	reg1 = XEXP (addr, 0), addr = XEXP (addr, 1);
3512      else if (GET_CODE (XEXP (addr, 1)) == MULT)
3513	reg1 = XEXP (addr, 1), addr = XEXP (addr, 0);
3514      else if (GET_CODE (XEXP (addr, 0)) == REG)
3515	reg1 = XEXP (addr, 0), addr = XEXP (addr, 1);
3516      else if (GET_CODE (XEXP (addr, 1)) == REG)
3517	reg1 = XEXP (addr, 1), addr = XEXP (addr, 0);
3518
3519      if (GET_CODE (addr) == REG || GET_CODE (addr) == MULT)
3520	{
3521	  if (reg1 == 0)
3522	    reg1 = addr;
3523	  else
3524	    reg2 = addr;
3525
3526	  addr = 0;
3527	}
3528
3529      if (offset != 0)
3530	{
3531	  if (addr != 0)
3532	    abort ();
3533	  addr = offset;
3534	}
3535
3536      if ((reg1 && GET_CODE (reg1) == MULT)
3537	  || (reg2 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2))))
3538	{
3539	  breg = reg2;
3540	  ireg = reg1;
3541	}
3542      else if (reg1 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1)))
3543	{
3544	  breg = reg1;
3545	  ireg = reg2;
3546	}
3547
3548      if (ireg != 0 || breg != 0)
3549	{
3550	  int scale = 1;
3551
3552	  if (addr != 0)
3553	    {
3554	      if (flag_pic)
3555		output_pic_addr_const (file, addr, 0);
3556	      else if (GET_CODE (addr) == LABEL_REF)
3557		output_asm_label (addr);
3558	      else
3559		output_addr_const (file, addr);
3560	    }
3561
3562  	  if (ireg != 0 && GET_CODE (ireg) == MULT)
3563	    {
3564	      scale = INTVAL (XEXP (ireg, 1));
3565	      ireg = XEXP (ireg, 0);
3566	    }
3567
3568	  /* The stack pointer can only appear as a base register,
3569	     never an index register, so exchange the regs if it is wrong. */
3570
3571	  if (scale == 1 && ireg && REGNO (ireg) == STACK_POINTER_REGNUM)
3572	    {
3573	      rtx tmp;
3574
3575	      tmp = breg;
3576	      breg = ireg;
3577	      ireg = tmp;
3578	    }
3579
3580	  /* output breg+ireg*scale */
3581	  PRINT_B_I_S (breg, ireg, scale, file);
3582	  break;
3583	}
3584
3585    case MULT:
3586      {
3587	int scale;
3588
3589	if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
3590	  {
3591	    scale = INTVAL (XEXP (addr, 0));
3592	    ireg = XEXP (addr, 1);
3593	  }
3594	else
3595	  {
3596	    scale = INTVAL (XEXP (addr, 1));
3597	    ireg = XEXP (addr, 0);
3598	  }
3599
3600	output_addr_const (file, const0_rtx);
3601	PRINT_B_I_S (NULL_RTX, ireg, scale, file);
3602      }
3603      break;
3604
3605    default:
3606      if (GET_CODE (addr) == CONST_INT
3607	  && INTVAL (addr) < 0x8000
3608	  && INTVAL (addr) >= -0x8000)
3609	fprintf (file, "%d", (int) INTVAL (addr));
3610      else
3611	{
3612	  if (flag_pic)
3613	    output_pic_addr_const (file, addr, 0);
3614	  else
3615	    output_addr_const (file, addr);
3616	}
3617    }
3618}
3619
3620/* Set the cc_status for the results of an insn whose pattern is EXP.
3621   On the 80386, we assume that only test and compare insns, as well
3622   as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, BSF, ASHIFT,
3623   ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3624   Also, we assume that jumps, moves and sCOND don't affect the condition
3625   codes.  All else clobbers the condition codes, by assumption.
3626
3627   We assume that ALL integer add, minus, etc. instructions effect the
3628   condition codes.  This MUST be consistent with i386.md.
3629
3630   We don't record any float test or compare - the redundant test &
3631   compare check in final.c does not handle stack-like regs correctly. */
3632
3633void
3634notice_update_cc (exp)
3635     rtx exp;
3636{
3637  if (GET_CODE (exp) == SET)
3638    {
3639      /* Jumps do not alter the cc's.  */
3640      if (SET_DEST (exp) == pc_rtx)
3641	return;
3642
3643      /* Moving register or memory into a register:
3644	 it doesn't alter the cc's, but it might invalidate
3645	 the RTX's which we remember the cc's came from.
3646	 (Note that moving a constant 0 or 1 MAY set the cc's).  */
3647      if (REG_P (SET_DEST (exp))
3648	  && (REG_P (SET_SRC (exp)) || GET_CODE (SET_SRC (exp)) == MEM
3649	      || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'
3650	      || (GET_CODE (SET_SRC (exp)) == IF_THEN_ELSE
3651		  && GET_MODE_CLASS (GET_MODE (SET_DEST (exp))) == MODE_INT)))
3652	{
3653	  if (cc_status.value1
3654	      && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3655	    cc_status.value1 = 0;
3656
3657	  if (cc_status.value2
3658	      && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3659	    cc_status.value2 = 0;
3660
3661	  return;
3662	}
3663
3664      /* Moving register into memory doesn't alter the cc's.
3665	 It may invalidate the RTX's which we remember the cc's came from.  */
3666      if (GET_CODE (SET_DEST (exp)) == MEM
3667	  && (REG_P (SET_SRC (exp))
3668	      || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3669	{
3670	  if (cc_status.value1
3671	      && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3672	    cc_status.value1 = 0;
3673	  if (cc_status.value2
3674	      && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3675	    cc_status.value2 = 0;
3676
3677	  return;
3678	}
3679
3680      /* Function calls clobber the cc's.  */
3681      else if (GET_CODE (SET_SRC (exp)) == CALL)
3682	{
3683	  CC_STATUS_INIT;
3684	  return;
3685	}
3686
3687      /* Tests and compares set the cc's in predictable ways.  */
3688      else if (SET_DEST (exp) == cc0_rtx)
3689	{
3690	  CC_STATUS_INIT;
3691	  cc_status.value1 = SET_SRC (exp);
3692	  return;
3693	}
3694
3695      /* Certain instructions effect the condition codes. */
3696      else if (GET_MODE (SET_SRC (exp)) == SImode
3697	       || GET_MODE (SET_SRC (exp)) == HImode
3698	       || GET_MODE (SET_SRC (exp)) == QImode)
3699	switch (GET_CODE (SET_SRC (exp)))
3700	  {
3701	  case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3702	    /* Shifts on the 386 don't set the condition codes if the
3703	       shift count is zero. */
3704	    if (GET_CODE (XEXP (SET_SRC (exp), 1)) != CONST_INT)
3705	      {
3706		CC_STATUS_INIT;
3707		break;
3708	      }
3709
3710	    /* We assume that the CONST_INT is non-zero (this rtx would
3711	       have been deleted if it were zero. */
3712
3713	  case PLUS: case MINUS: case NEG:
3714	  case AND: case IOR: case XOR:
3715	    cc_status.flags = CC_NO_OVERFLOW;
3716	    cc_status.value1 = SET_SRC (exp);
3717	    cc_status.value2 = SET_DEST (exp);
3718	    break;
3719
3720	    /* This is the bsf pattern used by ffs.  */
3721	  case UNSPEC:
3722	    if (XINT (SET_SRC (exp), 1) == 5)
3723	      {
3724		/* Only the Z flag is defined after bsf.  */
3725		cc_status.flags
3726		  = CC_NOT_POSITIVE | CC_NOT_NEGATIVE | CC_NO_OVERFLOW;
3727		cc_status.value1 = XVECEXP (SET_SRC (exp), 0, 0);
3728		cc_status.value2 = 0;
3729		break;
3730	      }
3731	    /* FALLTHRU */
3732
3733	  default:
3734	    CC_STATUS_INIT;
3735	  }
3736      else
3737	{
3738	  CC_STATUS_INIT;
3739	}
3740    }
3741  else if (GET_CODE (exp) == PARALLEL
3742	   && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
3743    {
3744      if (SET_DEST (XVECEXP (exp, 0, 0)) == pc_rtx)
3745	return;
3746      if (SET_DEST (XVECEXP (exp, 0, 0)) == cc0_rtx)
3747
3748	{
3749	  CC_STATUS_INIT;
3750          if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp, 0, 0))))
3751	    {
3752              cc_status.flags |= CC_IN_80387;
3753	      if (0 && TARGET_CMOVE && stack_regs_mentioned_p
3754		  (XEXP (SET_SRC (XVECEXP (exp, 0, 0)), 1)))
3755		cc_status.flags |= CC_FCOMI;
3756	    }
3757	  else
3758	    cc_status.value1 = SET_SRC (XVECEXP (exp, 0, 0));
3759	  return;
3760	}
3761
3762      CC_STATUS_INIT;
3763    }
3764  else
3765    {
3766      CC_STATUS_INIT;
3767    }
3768}
3769
3770/* Split one or more DImode RTL references into pairs of SImode
3771   references.  The RTL can be REG, offsettable MEM, integer constant, or
3772   CONST_DOUBLE.  "operands" is a pointer to an array of DImode RTL to
3773   split and "num" is its length.  lo_half and hi_half are output arrays
3774   that parallel "operands". */
3775
3776void
3777split_di (operands, num, lo_half, hi_half)
3778     rtx operands[];
3779     int num;
3780     rtx lo_half[], hi_half[];
3781{
3782  while (num--)
3783    {
3784      rtx op = operands[num];
3785      if (GET_CODE (op) == REG)
3786	{
3787	  lo_half[num] = gen_rtx_REG (SImode, REGNO (op));
3788	  hi_half[num] = gen_rtx_REG (SImode, REGNO (op) + 1);
3789	}
3790      else if (CONSTANT_P (op))
3791	split_double (op, &lo_half[num], &hi_half[num]);
3792      else if (offsettable_memref_p (op))
3793	{
3794	  rtx lo_addr = XEXP (op, 0);
3795	  rtx hi_addr = XEXP (adj_offsettable_operand (op, 4), 0);
3796	  lo_half[num] = change_address (op, SImode, lo_addr);
3797	  hi_half[num] = change_address (op, SImode, hi_addr);
3798	}
3799      else
3800	abort();
3801    }
3802}
3803
3804/* Return 1 if this is a valid binary operation on a 387.
3805   OP is the expression matched, and MODE is its mode. */
3806
3807int
3808binary_387_op (op, mode)
3809    register rtx op;
3810    enum machine_mode mode;
3811{
3812  if (mode != VOIDmode && mode != GET_MODE (op))
3813    return 0;
3814
3815  switch (GET_CODE (op))
3816    {
3817    case PLUS:
3818    case MINUS:
3819    case MULT:
3820    case DIV:
3821      return GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT;
3822
3823    default:
3824      return 0;
3825    }
3826}
3827
3828/* Return 1 if this is a valid shift or rotate operation on a 386.
3829   OP is the expression matched, and MODE is its mode. */
3830
3831int
3832shift_op (op, mode)
3833    register rtx op;
3834    enum machine_mode mode;
3835{
3836  rtx operand = XEXP (op, 0);
3837
3838  if (mode != VOIDmode && mode != GET_MODE (op))
3839    return 0;
3840
3841  if (GET_MODE (operand) != GET_MODE (op)
3842      || GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
3843    return 0;
3844
3845  return (GET_CODE (op) == ASHIFT
3846	  || GET_CODE (op) == ASHIFTRT
3847	  || GET_CODE (op) == LSHIFTRT
3848	  || GET_CODE (op) == ROTATE
3849	  || GET_CODE (op) == ROTATERT);
3850}
3851
3852/* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3853   MODE is not used.  */
3854
3855int
3856VOIDmode_compare_op (op, mode)
3857    register rtx op;
3858    enum machine_mode mode ATTRIBUTE_UNUSED;
3859{
3860  return GET_CODE (op) == COMPARE && GET_MODE (op) == VOIDmode;
3861}
3862
3863/* Output code to perform a 387 binary operation in INSN, one of PLUS,
3864   MINUS, MULT or DIV.  OPERANDS are the insn operands, where operands[3]
3865   is the expression of the binary operation.  The output may either be
3866   emitted here, or returned to the caller, like all output_* functions.
3867
3868   There is no guarantee that the operands are the same mode, as they
3869   might be within FLOAT or FLOAT_EXTEND expressions. */
3870
3871char *
3872output_387_binary_op (insn, operands)
3873     rtx insn;
3874     rtx *operands;
3875{
3876  rtx temp;
3877  char *base_op;
3878  static char buf[100];
3879
3880  switch (GET_CODE (operands[3]))
3881    {
3882    case PLUS:
3883      if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3884	  || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3885	base_op = "fiadd";
3886      else
3887	base_op = "fadd";
3888      break;
3889
3890    case MINUS:
3891      if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3892	  || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3893	base_op = "fisub";
3894      else
3895	base_op = "fsub";
3896      break;
3897
3898    case MULT:
3899      if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3900	  || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3901	base_op = "fimul";
3902      else
3903	base_op = "fmul";
3904      break;
3905
3906    case DIV:
3907      if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3908	  || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3909	base_op = "fidiv";
3910      else
3911	base_op = "fdiv";
3912      break;
3913
3914    default:
3915      abort ();
3916    }
3917
3918  strcpy (buf, base_op);
3919
3920  switch (GET_CODE (operands[3]))
3921    {
3922    case MULT:
3923    case PLUS:
3924      if (REG_P (operands[2]) && REGNO (operands[0]) == REGNO (operands[2]))
3925	{
3926	  temp = operands[2];
3927	  operands[2] = operands[1];
3928	  operands[1] = temp;
3929	}
3930
3931      if (GET_CODE (operands[2]) == MEM)
3932	return strcat (buf, AS1 (%z2,%2));
3933
3934      if (NON_STACK_REG_P (operands[1]))
3935	{
3936	  output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3937	  return "";
3938	}
3939
3940      else if (NON_STACK_REG_P (operands[2]))
3941	{
3942	  output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3943	  return "";
3944	}
3945
3946      if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3947	{
3948	  if (STACK_TOP_P (operands[0]))
3949	    return strcat (buf, AS2 (p,%0,%2));
3950	  else
3951	    return strcat (buf, AS2 (p,%2,%0));
3952	}
3953
3954      if (STACK_TOP_P (operands[0]))
3955	return strcat (buf, AS2C (%y2,%0));
3956      else
3957	return strcat (buf, AS2C (%2,%0));
3958
3959    case MINUS:
3960    case DIV:
3961      if (GET_CODE (operands[1]) == MEM)
3962	return strcat (buf, AS1 (r%z1,%1));
3963
3964      if (GET_CODE (operands[2]) == MEM)
3965	return strcat (buf, AS1 (%z2,%2));
3966
3967      if (NON_STACK_REG_P (operands[1]))
3968	{
3969	  output_op_from_reg (operands[1], strcat (buf, AS1 (r%z0,%1)));
3970	  return "";
3971	}
3972
3973      else if (NON_STACK_REG_P (operands[2]))
3974	{
3975	  output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3976	  return "";
3977	}
3978
3979      if (! STACK_REG_P (operands[1]) || ! STACK_REG_P (operands[2]))
3980	abort ();
3981
3982      if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3983	{
3984	  if (STACK_TOP_P (operands[0]))
3985	    return strcat (buf, AS2 (p,%0,%2));
3986	  else
3987	    return strcat (buf, AS2 (rp,%2,%0));
3988	}
3989
3990      if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
3991	{
3992	  if (STACK_TOP_P (operands[0]))
3993	    return strcat (buf, AS2 (rp,%0,%1));
3994	  else
3995	    return strcat (buf, AS2 (p,%1,%0));
3996	}
3997
3998      if (STACK_TOP_P (operands[0]))
3999	{
4000	  if (STACK_TOP_P (operands[1]))
4001	    return strcat (buf, AS2C (%y2,%0));
4002	  else
4003	    return strcat (buf, AS2 (r,%y1,%0));
4004	}
4005      else if (STACK_TOP_P (operands[1]))
4006	return strcat (buf, AS2C (%1,%0));
4007      else
4008	return strcat (buf, AS2 (r,%2,%0));
4009
4010    default:
4011      abort ();
4012    }
4013}
4014
4015/* Output code for INSN to convert a float to a signed int.  OPERANDS
4016   are the insn operands.  The output may be SFmode or DFmode and the
4017   input operand may be SImode or DImode.  As a special case, make sure
4018   that the 387 stack top dies if the output mode is DImode, because the
4019   hardware requires this.  */
4020
4021char *
4022output_fix_trunc (insn, operands)
4023     rtx insn;
4024     rtx *operands;
4025{
4026  int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
4027  rtx xops[2];
4028
4029  if (! STACK_TOP_P (operands[1]))
4030    abort ();
4031
4032  xops[0] = GEN_INT (12);
4033  xops[1] = operands[4];
4034
4035  output_asm_insn (AS1 (fnstc%W2,%2), operands);
4036  output_asm_insn (AS2 (mov%L2,%2,%4), operands);
4037  output_asm_insn (AS2 (mov%B1,%0,%h1), xops);
4038  output_asm_insn (AS2 (mov%L4,%4,%3), operands);
4039  output_asm_insn (AS1 (fldc%W3,%3), operands);
4040
4041  if (NON_STACK_REG_P (operands[0]))
4042    output_to_reg (operands[0], stack_top_dies, operands[3]);
4043
4044  else if (GET_CODE (operands[0]) == MEM)
4045    {
4046      if (stack_top_dies)
4047	output_asm_insn (AS1 (fistp%z0,%0), operands);
4048      else if (GET_MODE (operands[0]) == DImode && ! stack_top_dies)
4049	{
4050	  /* There is no DImode version of this without a stack pop, so
4051	     we must emulate it.  It doesn't matter much what the second
4052	     instruction is, because the value being pushed on the FP stack
4053	     is not used except for the following stack popping store.
4054	     This case can only happen without optimization, so it doesn't
4055	     matter that it is inefficient.  */
4056	  output_asm_insn (AS1 (fistp%z0,%0), operands);
4057	  output_asm_insn (AS1 (fild%z0,%0), operands);
4058	}
4059      else
4060	output_asm_insn (AS1 (fist%z0,%0), operands);
4061    }
4062  else
4063    abort ();
4064
4065  return AS1 (fldc%W2,%2);
4066}
4067
4068/* Output code for INSN to compare OPERANDS.  The two operands might
4069   not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
4070   expression.  If the compare is in mode CCFPEQmode, use an opcode that
4071   will not fault if a qNaN is present. */
4072
4073char *
4074output_float_compare (insn, operands)
4075     rtx insn;
4076     rtx *operands;
4077{
4078  int stack_top_dies;
4079  rtx body = XVECEXP (PATTERN (insn), 0, 0);
4080  int unordered_compare = GET_MODE (SET_SRC (body)) == CCFPEQmode;
4081  rtx tmp;
4082
4083  if (0 && TARGET_CMOVE && STACK_REG_P (operands[1]))
4084    {
4085      cc_status.flags |= CC_FCOMI;
4086      cc_prev_status.flags &= ~CC_TEST_AX;
4087    }
4088
4089  if (! STACK_TOP_P (operands[0]))
4090    {
4091      tmp = operands[0];
4092      operands[0] = operands[1];
4093      operands[1] = tmp;
4094      cc_status.flags |= CC_REVERSED;
4095    }
4096
4097  if (! STACK_TOP_P (operands[0]))
4098    abort ();
4099
4100  stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
4101
4102  if (STACK_REG_P (operands[1])
4103      && stack_top_dies
4104      && find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
4105      && REGNO (operands[1]) != FIRST_STACK_REG)
4106    {
4107      /* If both the top of the 387 stack dies, and the other operand
4108	 is also a stack register that dies, then this must be a
4109	 `fcompp' float compare */
4110
4111      if (unordered_compare)
4112	{
4113	  if (cc_status.flags & CC_FCOMI)
4114	    {
4115	      output_asm_insn (AS2 (fucomip,%y1,%0), operands);
4116	      output_asm_insn (AS1 (fstp, %y0), operands);
4117	      return "";
4118	    }
4119	  else
4120	    output_asm_insn ("fucompp", operands);
4121	}
4122      else
4123	{
4124	  if (cc_status.flags & CC_FCOMI)
4125	    {
4126	      output_asm_insn (AS2 (fcomip, %y1,%0), operands);
4127	      output_asm_insn (AS1 (fstp, %y0), operands);
4128	      return "";
4129	    }
4130	  else
4131	    output_asm_insn ("fcompp", operands);
4132	}
4133    }
4134  else
4135    {
4136      static char buf[100];
4137
4138      /* Decide if this is the integer or float compare opcode, or the
4139	 unordered float compare. */
4140
4141      if (unordered_compare)
4142	strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fucomi" : "fucom");
4143      else if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_FLOAT)
4144	strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fcomi" : "fcom");
4145      else
4146	strcpy (buf, "ficom");
4147
4148      /* Modify the opcode if the 387 stack is to be popped. */
4149
4150      if (stack_top_dies)
4151	strcat (buf, "p");
4152
4153      if (NON_STACK_REG_P (operands[1]))
4154	output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
4155      else if (cc_status.flags & CC_FCOMI)
4156	{
4157	  output_asm_insn (strcat (buf, AS2 (%z1,%y1,%0)), operands);
4158	  return "";
4159	}
4160      else
4161        output_asm_insn (strcat (buf, AS1 (%z1,%y1)), operands);
4162    }
4163
4164  /* Now retrieve the condition code. */
4165
4166  return output_fp_cc0_set (insn);
4167}
4168
4169/* Output opcodes to transfer the results of FP compare or test INSN
4170   from the FPU to the CPU flags.  If TARGET_IEEE_FP, ensure that if the
4171   result of the compare or test is unordered, no comparison operator
4172   succeeds except NE.  Return an output template, if any.  */
4173
4174char *
4175output_fp_cc0_set (insn)
4176     rtx insn;
4177{
4178  rtx xops[3];
4179  rtx next;
4180  enum rtx_code code;
4181
4182  xops[0] = gen_rtx_REG (HImode, 0);
4183  output_asm_insn (AS1 (fnsts%W0,%0), xops);
4184
4185  if (! TARGET_IEEE_FP)
4186    {
4187      if (!(cc_status.flags & CC_REVERSED))
4188        {
4189          next = next_cc0_user (insn);
4190
4191          if (GET_CODE (next) == JUMP_INSN
4192              && GET_CODE (PATTERN (next)) == SET
4193              && SET_DEST (PATTERN (next)) == pc_rtx
4194              && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4195	    code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4196          else if (GET_CODE (PATTERN (next)) == SET)
4197	    code = GET_CODE (SET_SRC (PATTERN (next)));
4198          else
4199	    return "sahf";
4200
4201	  if (code == GT || code == LT || code == EQ || code == NE
4202	      || code == LE || code == GE)
4203	    {
4204	      /* We will test eax directly. */
4205              cc_status.flags |= CC_TEST_AX;
4206	      return "";
4207            }
4208        }
4209
4210      return "sahf";
4211    }
4212
4213  next = next_cc0_user (insn);
4214  if (next == NULL_RTX)
4215    abort ();
4216
4217  if (GET_CODE (next) == JUMP_INSN
4218      && GET_CODE (PATTERN (next)) == SET
4219      && SET_DEST (PATTERN (next)) == pc_rtx
4220      && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4221    code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4222  else if (GET_CODE (PATTERN (next)) == SET)
4223    {
4224      if (GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4225	code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4226      else
4227	code = GET_CODE (SET_SRC (PATTERN (next)));
4228    }
4229
4230  else if (GET_CODE (PATTERN (next)) == PARALLEL
4231	   && GET_CODE (XVECEXP (PATTERN (next), 0, 0)) == SET)
4232    {
4233      if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0))) == IF_THEN_ELSE)
4234	code = GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next), 0, 0)), 0));
4235      else
4236	code = GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0)));
4237    }
4238  else
4239    abort ();
4240
4241  xops[0] = gen_rtx_REG (QImode, 0);
4242
4243  switch (code)
4244    {
4245    case GT:
4246      xops[1] = GEN_INT (0x45);
4247      output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4248      /* je label */
4249      break;
4250
4251    case LT:
4252      xops[1] = GEN_INT (0x45);
4253      xops[2] = GEN_INT (0x01);
4254      output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4255      output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4256      /* je label */
4257      break;
4258
4259    case GE:
4260      xops[1] = GEN_INT (0x05);
4261      output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4262      /* je label */
4263      break;
4264
4265    case LE:
4266      xops[1] = GEN_INT (0x45);
4267      xops[2] = GEN_INT (0x40);
4268      output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4269      output_asm_insn (AS1 (dec%B0,%h0), xops);
4270      output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4271      /* jb label */
4272      break;
4273
4274    case EQ:
4275      xops[1] = GEN_INT (0x45);
4276      xops[2] = GEN_INT (0x40);
4277      output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4278      output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4279      /* je label */
4280      break;
4281
4282    case NE:
4283      xops[1] = GEN_INT (0x44);
4284      xops[2] = GEN_INT (0x40);
4285      output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4286      output_asm_insn (AS2 (xor%B0,%2,%h0), xops);
4287      /* jne label */
4288      break;
4289
4290    case GTU:
4291    case LTU:
4292    case GEU:
4293    case LEU:
4294    default:
4295      abort ();
4296    }
4297
4298  return "";
4299}
4300
4301#define MAX_386_STACK_LOCALS 2
4302
4303static rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
4304
4305/* Define the structure for the machine field in struct function.  */
4306struct machine_function
4307{
4308  rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
4309  rtx pic_label_rtx;
4310  char pic_label_name[256];
4311};
4312
4313/* Functions to save and restore i386_stack_locals.
4314   These will be called, via pointer variables,
4315   from push_function_context and pop_function_context.  */
4316
4317void
4318save_386_machine_status (p)
4319     struct function *p;
4320{
4321  p->machine
4322    = (struct machine_function *) xmalloc (sizeof (struct machine_function));
4323  bcopy ((char *) i386_stack_locals, (char *) p->machine->i386_stack_locals,
4324	 sizeof i386_stack_locals);
4325  p->machine->pic_label_rtx = pic_label_rtx;
4326  bcopy (pic_label_name, p->machine->pic_label_name, 256);
4327}
4328
4329void
4330restore_386_machine_status (p)
4331     struct function *p;
4332{
4333  bcopy ((char *) p->machine->i386_stack_locals, (char *) i386_stack_locals,
4334	 sizeof i386_stack_locals);
4335  pic_label_rtx = p->machine->pic_label_rtx;
4336  bcopy (p->machine->pic_label_name, pic_label_name, 256);
4337  free (p->machine);
4338  p->machine = NULL;
4339}
4340
4341/* Clear stack slot assignments remembered from previous functions.
4342   This is called from INIT_EXPANDERS once before RTL is emitted for each
4343   function.  */
4344
4345void
4346clear_386_stack_locals ()
4347{
4348  enum machine_mode mode;
4349  int n;
4350
4351  for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
4352       mode = (enum machine_mode) ((int) mode + 1))
4353    for (n = 0; n < MAX_386_STACK_LOCALS; n++)
4354      i386_stack_locals[(int) mode][n] = NULL_RTX;
4355
4356  pic_label_rtx = NULL_RTX;
4357  bzero (pic_label_name, 256);
4358  /* Arrange to save and restore i386_stack_locals around nested functions.  */
4359  save_machine_status = save_386_machine_status;
4360  restore_machine_status = restore_386_machine_status;
4361}
4362
4363/* Return a MEM corresponding to a stack slot with mode MODE.
4364   Allocate a new slot if necessary.
4365
4366   The RTL for a function can have several slots available: N is
4367   which slot to use.  */
4368
4369rtx
4370assign_386_stack_local (mode, n)
4371     enum machine_mode mode;
4372     int n;
4373{
4374  if (n < 0 || n >= MAX_386_STACK_LOCALS)
4375    abort ();
4376
4377  if (i386_stack_locals[(int) mode][n] == NULL_RTX)
4378    i386_stack_locals[(int) mode][n]
4379      = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
4380
4381  return i386_stack_locals[(int) mode][n];
4382}
4383
4384int is_mul(op,mode)
4385    register rtx op;
4386    enum machine_mode mode ATTRIBUTE_UNUSED;
4387{
4388  return (GET_CODE (op) == MULT);
4389}
4390
4391int is_div(op,mode)
4392    register rtx op;
4393    enum machine_mode mode ATTRIBUTE_UNUSED;
4394{
4395  return (GET_CODE (op) == DIV);
4396}
4397
4398#ifdef NOTYET
4399/* Create a new copy of an rtx.
4400   Recursively copies the operands of the rtx,
4401   except for those few rtx codes that are sharable.
4402   Doesn't share CONST  */
4403
4404rtx
4405copy_all_rtx (orig)
4406     register rtx orig;
4407{
4408  register rtx copy;
4409  register int i, j;
4410  register RTX_CODE code;
4411  register char *format_ptr;
4412
4413  code = GET_CODE (orig);
4414
4415  switch (code)
4416    {
4417    case REG:
4418    case QUEUED:
4419    case CONST_INT:
4420    case CONST_DOUBLE:
4421    case SYMBOL_REF:
4422    case CODE_LABEL:
4423    case PC:
4424    case CC0:
4425    case SCRATCH:
4426      /* SCRATCH must be shared because they represent distinct values. */
4427      return orig;
4428
4429#if 0
4430    case CONST:
4431      /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
4432	 a LABEL_REF, it isn't sharable.  */
4433      if (GET_CODE (XEXP (orig, 0)) == PLUS
4434	  && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4435	  && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4436	return orig;
4437      break;
4438#endif
4439      /* A MEM with a constant address is not sharable.  The problem is that
4440	 the constant address may need to be reloaded.  If the mem is shared,
4441	 then reloading one copy of this mem will cause all copies to appear
4442	 to have been reloaded.  */
4443    }
4444
4445  copy = rtx_alloc (code);
4446  PUT_MODE (copy, GET_MODE (orig));
4447  copy->in_struct = orig->in_struct;
4448  copy->volatil = orig->volatil;
4449  copy->unchanging = orig->unchanging;
4450  copy->integrated = orig->integrated;
4451  /* intel1 */
4452  copy->is_spill_rtx = orig->is_spill_rtx;
4453
4454  format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4455
4456  for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4457    {
4458      switch (*format_ptr++)
4459	{
4460	case 'e':
4461	  XEXP (copy, i) = XEXP (orig, i);
4462	  if (XEXP (orig, i) != NULL)
4463	    XEXP (copy, i) = copy_rtx (XEXP (orig, i));
4464	  break;
4465
4466	case '0':
4467	case 'u':
4468	  XEXP (copy, i) = XEXP (orig, i);
4469	  break;
4470
4471	case 'E':
4472	case 'V':
4473	  XVEC (copy, i) = XVEC (orig, i);
4474	  if (XVEC (orig, i) != NULL)
4475	    {
4476	      XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4477	      for (j = 0; j < XVECLEN (copy, i); j++)
4478		XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
4479	    }
4480	  break;
4481
4482	case 'w':
4483	  XWINT (copy, i) = XWINT (orig, i);
4484	  break;
4485
4486	case 'i':
4487	  XINT (copy, i) = XINT (orig, i);
4488	  break;
4489
4490	case 's':
4491	case 'S':
4492	  XSTR (copy, i) = XSTR (orig, i);
4493	  break;
4494
4495	default:
4496	  abort ();
4497	}
4498    }
4499  return copy;
4500}
4501
4502
4503/* Try to rewrite a memory address to make it valid */
4504
4505void
4506rewrite_address (mem_rtx)
4507     rtx mem_rtx;
4508{
4509  rtx index_rtx, base_rtx, offset_rtx, scale_rtx, ret_rtx;
4510  int scale = 1;
4511  int offset_adjust = 0;
4512  int was_only_offset = 0;
4513  rtx mem_addr = XEXP (mem_rtx, 0);
4514  char *storage = oballoc (0);
4515  int in_struct = 0;
4516  int is_spill_rtx = 0;
4517
4518  in_struct = MEM_IN_STRUCT_P (mem_rtx);
4519  is_spill_rtx = RTX_IS_SPILL_P (mem_rtx);
4520
4521  if (GET_CODE (mem_addr) == PLUS
4522      && GET_CODE (XEXP (mem_addr, 1)) == PLUS
4523      && GET_CODE (XEXP (XEXP (mem_addr, 1), 0)) == REG)
4524    {
4525      /* This part is utilized by the combiner. */
4526      ret_rtx
4527	= gen_rtx (PLUS, GET_MODE (mem_addr),
4528		   gen_rtx (PLUS, GET_MODE (XEXP (mem_addr, 1)),
4529			    XEXP (mem_addr, 0), XEXP (XEXP (mem_addr, 1), 0)),
4530		   XEXP (XEXP (mem_addr, 1), 1));
4531
4532      if (memory_address_p (GET_MODE (mem_rtx), ret_rtx))
4533	{
4534	  XEXP (mem_rtx, 0) = ret_rtx;
4535	  RTX_IS_SPILL_P (ret_rtx) = is_spill_rtx;
4536	  return;
4537	}
4538
4539      obfree (storage);
4540    }
4541
4542  /* This part is utilized by loop.c.
4543     If the address contains PLUS (reg,const) and this pattern is invalid
4544     in this case - try to rewrite the address to make it valid. */
4545  storage = oballoc (0);
4546  index_rtx = base_rtx = offset_rtx = NULL;
4547
4548  /* Find the base index and offset elements of the memory address. */
4549  if (GET_CODE (mem_addr) == PLUS)
4550    {
4551      if (GET_CODE (XEXP (mem_addr, 0)) == REG)
4552	{
4553	  if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4554	    base_rtx = XEXP (mem_addr, 1), index_rtx = XEXP (mem_addr, 0);
4555	  else
4556	    base_rtx = XEXP (mem_addr, 0), offset_rtx = XEXP (mem_addr, 1);
4557	}
4558
4559      else if (GET_CODE (XEXP (mem_addr, 0)) == MULT)
4560	{
4561	  index_rtx = XEXP (mem_addr, 0);
4562	  if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4563	    base_rtx = XEXP (mem_addr, 1);
4564	  else
4565	    offset_rtx = XEXP (mem_addr, 1);
4566	}
4567
4568      else if (GET_CODE (XEXP (mem_addr, 0)) == PLUS)
4569	{
4570	  if (GET_CODE (XEXP (XEXP (mem_addr, 0), 0)) == PLUS
4571	      && GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0)) == MULT
4572	      && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 0))
4573		  == REG)
4574	      && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 1))
4575		  == CONST_INT)
4576	      && (GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1))
4577		  == CONST_INT)
4578	      && GET_CODE (XEXP (XEXP (mem_addr, 0), 1)) == REG
4579	      && GET_CODE (XEXP (mem_addr, 1)) == SYMBOL_REF)
4580	    {
4581	      index_rtx = XEXP (XEXP (XEXP (mem_addr, 0), 0), 0);
4582	      offset_rtx = XEXP (mem_addr, 1);
4583	      base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4584	      offset_adjust = INTVAL (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1));
4585	    }
4586	  else
4587	    {
4588	      offset_rtx = XEXP (mem_addr, 1);
4589	      index_rtx = XEXP (XEXP (mem_addr, 0), 0);
4590	      base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4591	    }
4592	}
4593
4594      else if (GET_CODE (XEXP (mem_addr, 0)) == CONST_INT)
4595	{
4596	  was_only_offset = 1;
4597	  index_rtx = NULL;
4598	  base_rtx = NULL;
4599	  offset_rtx = XEXP (mem_addr, 1);
4600	  offset_adjust = INTVAL (XEXP (mem_addr, 0));
4601	  if (offset_adjust == 0)
4602	    {
4603	      XEXP (mem_rtx, 0) = offset_rtx;
4604	      RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4605	      return;
4606	    }
4607	}
4608      else
4609	{
4610	  obfree (storage);
4611	  return;
4612	}
4613    }
4614  else if (GET_CODE (mem_addr) == MULT)
4615    index_rtx = mem_addr;
4616  else
4617    {
4618      obfree (storage);
4619      return;
4620    }
4621
4622  if (index_rtx != 0 && GET_CODE (index_rtx) == MULT)
4623    {
4624      if (GET_CODE (XEXP (index_rtx, 1)) != CONST_INT)
4625	{
4626	  obfree (storage);
4627	  return;
4628	}
4629
4630      scale_rtx = XEXP (index_rtx, 1);
4631      scale = INTVAL (scale_rtx);
4632      index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4633    }
4634
4635  /* Now find which of the elements are invalid and try to fix them. */
4636  if (index_rtx && GET_CODE (index_rtx) == CONST_INT && base_rtx == NULL)
4637    {
4638      offset_adjust = INTVAL (index_rtx) * scale;
4639
4640      if (offset_rtx != 0 && CONSTANT_P (offset_rtx))
4641	offset_rtx = plus_constant (offset_rtx, offset_adjust);
4642      else if (offset_rtx == 0)
4643	offset_rtx = const0_rtx;
4644
4645      RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4646      XEXP (mem_rtx, 0) = offset_rtx;
4647      return;
4648    }
4649
4650  if (base_rtx && GET_CODE (base_rtx) == PLUS
4651      && GET_CODE (XEXP (base_rtx, 0)) == REG
4652      && GET_CODE (XEXP (base_rtx, 1)) == CONST_INT)
4653    {
4654      offset_adjust += INTVAL (XEXP (base_rtx, 1));
4655      base_rtx = copy_all_rtx (XEXP (base_rtx, 0));
4656    }
4657
4658  else if (base_rtx && GET_CODE (base_rtx) == CONST_INT)
4659    {
4660      offset_adjust += INTVAL (base_rtx);
4661      base_rtx = NULL;
4662    }
4663
4664  if (index_rtx && GET_CODE (index_rtx) == PLUS
4665      && GET_CODE (XEXP (index_rtx, 0)) == REG
4666      && GET_CODE (XEXP (index_rtx, 1)) == CONST_INT)
4667    {
4668      offset_adjust += INTVAL (XEXP (index_rtx, 1)) * scale;
4669      index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4670    }
4671
4672  if (index_rtx)
4673    {
4674      if (! LEGITIMATE_INDEX_P (index_rtx)
4675	  && ! (index_rtx == stack_pointer_rtx && scale == 1
4676		&& base_rtx == NULL))
4677	{
4678	  obfree (storage);
4679	  return;
4680	}
4681    }
4682
4683  if (base_rtx)
4684    {
4685      if (! LEGITIMATE_INDEX_P (base_rtx) && GET_CODE (base_rtx) != REG)
4686	{
4687	  obfree (storage);
4688	  return;
4689	}
4690    }
4691
4692  if (offset_adjust != 0)
4693    {
4694      if (offset_rtx != 0 && CONSTANT_P (offset_rtx))
4695	offset_rtx = plus_constant (offset_rtx, offset_adjust);
4696      else
4697	offset_rtx = const0_rtx;
4698
4699      if (index_rtx)
4700	{
4701	  if (base_rtx)
4702	    {
4703	      if (scale != 1)
4704		{
4705		  ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx),
4706				     gen_rtx (MULT, GET_MODE (index_rtx),
4707					      index_rtx, scale_rtx),
4708				     base_rtx);
4709
4710		  if (GET_CODE (offset_rtx) != CONST_INT
4711		      || INTVAL (offset_rtx) != 0)
4712		    ret_rtx = gen_rtx (PLUS, GET_MODE (ret_rtx),
4713				       ret_rtx, offset_rtx);
4714		}
4715	      else
4716		{
4717		  ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx),
4718				     index_rtx, base_rtx);
4719
4720		  if (GET_CODE (offset_rtx) != CONST_INT
4721		      || INTVAL (offset_rtx) != 0)
4722		    ret_rtx = gen_rtx (PLUS, GET_MODE (ret_rtx),
4723				       ret_rtx, offset_rtx);
4724		}
4725	    }
4726	  else
4727	    {
4728	      if (scale != 1)
4729		{
4730		  ret_rtx = gen_rtx (MULT, GET_MODE (index_rtx),
4731				     index_rtx, scale_rtx);
4732
4733		  if (GET_CODE (offset_rtx) != CONST_INT
4734		      || INTVAL (offset_rtx) != 0)
4735		    ret_rtx = gen_rtx (PLUS, GET_MODE (ret_rtx),
4736				       ret_rtx, offset_rtx);
4737		}
4738	      else
4739		{
4740		  if (GET_CODE (offset_rtx) == CONST_INT
4741		      && INTVAL (offset_rtx) == 0)
4742		    ret_rtx = index_rtx;
4743		  else
4744		    ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx),
4745				       index_rtx, offset_rtx);
4746		}
4747	    }
4748	}
4749      else
4750	{
4751	  if (base_rtx)
4752	    {
4753	      if (GET_CODE (offset_rtx) == CONST_INT
4754		  && INTVAL (offset_rtx) == 0)
4755		ret_rtx = base_rtx;
4756	      else
4757		ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx), base_rtx,
4758				   offset_rtx);
4759	    }
4760	  else if (was_only_offset)
4761	    ret_rtx = offset_rtx;
4762	  else
4763	    {
4764	      obfree (storage);
4765	      return;
4766	    }
4767	}
4768
4769      XEXP (mem_rtx, 0) = ret_rtx;
4770      RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4771      return;
4772    }
4773  else
4774    {
4775      obfree (storage);
4776      return;
4777    }
4778}
4779#endif /* NOTYET */
4780
4781/* Return 1 if the first insn to set cc before INSN also sets the register
4782   REG_RTX; otherwise return 0. */
4783int
4784last_to_set_cc (reg_rtx, insn)
4785     rtx reg_rtx, insn;
4786{
4787  rtx prev_insn = PREV_INSN (insn);
4788
4789  while (prev_insn)
4790    {
4791      if (GET_CODE (prev_insn) == NOTE)
4792	;
4793
4794      else if (GET_CODE (prev_insn) == INSN)
4795	{
4796	  if (GET_CODE (PATTERN (prev_insn)) != SET)
4797	    return (0);
4798
4799	  if (rtx_equal_p (SET_DEST (PATTERN (prev_insn)), reg_rtx))
4800	    {
4801	      if (sets_condition_code (SET_SRC (PATTERN (prev_insn))))
4802		return (1);
4803
4804	      return (0);
4805	    }
4806
4807	  else if (! doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn))))
4808	    return (0);
4809	}
4810
4811      else
4812	return (0);
4813
4814      prev_insn = PREV_INSN (prev_insn);
4815    }
4816
4817  return (0);
4818}
4819
4820int
4821doesnt_set_condition_code (pat)
4822     rtx pat;
4823{
4824  switch (GET_CODE (pat))
4825    {
4826    case MEM:
4827    case REG:
4828      return 1;
4829
4830    default:
4831      return 0;
4832
4833    }
4834}
4835
4836int
4837sets_condition_code (pat)
4838     rtx pat;
4839{
4840  switch (GET_CODE (pat))
4841    {
4842    case PLUS:
4843    case MINUS:
4844    case AND:
4845    case IOR:
4846    case XOR:
4847    case NOT:
4848    case NEG:
4849    case MULT:
4850    case DIV:
4851    case MOD:
4852    case UDIV:
4853    case UMOD:
4854      return 1;
4855
4856    default:
4857      return (0);
4858    }
4859}
4860
4861int
4862str_immediate_operand (op, mode)
4863     register rtx op;
4864     enum machine_mode mode ATTRIBUTE_UNUSED;
4865{
4866  if (GET_CODE (op) == CONST_INT && INTVAL (op) <= 32 && INTVAL (op) >= 0)
4867    return 1;
4868
4869  return 0;
4870}
4871
4872int
4873is_fp_insn (insn)
4874     rtx insn;
4875{
4876  if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4877      && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4878	  || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4879	  || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode))
4880    return 1;
4881
4882  return 0;
4883}
4884
4885/* Return 1 if the mode of the SET_DEST of insn is floating point
4886   and it is not an fld or a move from memory to memory.
4887   Otherwise return 0 */
4888
4889int
4890is_fp_dest (insn)
4891     rtx insn;
4892{
4893  if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4894      && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4895	  || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4896	  || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4897      && GET_CODE (SET_DEST (PATTERN (insn))) == REG
4898      && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_FLOAT_REG
4899      && GET_CODE (SET_SRC (PATTERN (insn))) != MEM)
4900    return 1;
4901
4902  return 0;
4903}
4904
4905/* Return 1 if the mode of the SET_DEST of INSN is floating point and is
4906   memory and the source is a register.  */
4907
4908int
4909is_fp_store (insn)
4910     rtx insn;
4911{
4912  if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4913      && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4914	  || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4915	  || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4916      && GET_CODE (SET_DEST (PATTERN (insn))) == MEM
4917      && GET_CODE (SET_SRC (PATTERN (insn))) == REG)
4918    return 1;
4919
4920  return 0;
4921}
4922
4923/* Return 1 if DEP_INSN sets a register which INSN uses as a base
4924   or index to reference memory.
4925   otherwise return 0 */
4926
4927int
4928agi_dependent (insn, dep_insn)
4929     rtx insn, dep_insn;
4930{
4931  if (GET_CODE (dep_insn) == INSN
4932      && GET_CODE (PATTERN (dep_insn)) == SET
4933      && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG)
4934    return reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn)), insn);
4935
4936  if (GET_CODE (dep_insn) == INSN && GET_CODE (PATTERN (dep_insn)) == SET
4937      && GET_CODE (SET_DEST (PATTERN (dep_insn))) == MEM
4938      && push_operand (SET_DEST (PATTERN (dep_insn)),
4939                       GET_MODE (SET_DEST (PATTERN (dep_insn)))))
4940    return reg_mentioned_in_mem (stack_pointer_rtx, insn);
4941
4942  return 0;
4943}
4944
4945/* Return 1 if reg is used in rtl as a base or index for a memory ref
4946   otherwise return 0. */
4947
4948int
4949reg_mentioned_in_mem (reg, rtl)
4950     rtx reg, rtl;
4951{
4952  register char *fmt;
4953  register int i, j;
4954  register enum rtx_code code;
4955
4956  if (rtl == NULL)
4957    return 0;
4958
4959  code = GET_CODE (rtl);
4960
4961  switch (code)
4962    {
4963    case HIGH:
4964    case CONST_INT:
4965    case CONST:
4966    case CONST_DOUBLE:
4967    case SYMBOL_REF:
4968    case LABEL_REF:
4969    case PC:
4970    case CC0:
4971    case SUBREG:
4972      return 0;
4973    default:
4974      break;
4975    }
4976
4977  if (code == MEM && reg_mentioned_p (reg, rtl))
4978    return 1;
4979
4980  fmt = GET_RTX_FORMAT (code);
4981  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4982    {
4983      if (fmt[i] == 'E')
4984	{
4985	  for (j = XVECLEN (rtl, i) - 1; j >= 0; j--)
4986	    if (reg_mentioned_in_mem (reg, XVECEXP (rtl, i, j)))
4987	      return 1;
4988	}
4989
4990      else if (fmt[i] == 'e' && reg_mentioned_in_mem (reg, XEXP (rtl, i)))
4991	return 1;
4992    }
4993
4994  return 0;
4995}
4996
4997/* Output the appropriate insns for doing strlen if not just doing repnz; scasb
4998
4999   operands[0] = result, initialized with the startaddress
5000   operands[1] = alignment of the address.
5001   operands[2] = scratch register, initialized with the startaddress when
5002   		 not aligned, otherwise undefined
5003
5004   This is just the body. It needs the initialisations mentioned above and
5005   some address computing at the end.  These things are done in i386.md.  */
5006
5007char *
5008output_strlen_unroll (operands)
5009     rtx operands[];
5010{
5011  rtx xops[18];
5012
5013  xops[0] = operands[0];		/* Result */
5014  /*        operands[1];                 * Alignment */
5015  xops[1] = operands[2];		/* Scratch */
5016  xops[2] = GEN_INT (0);
5017  xops[3] = GEN_INT (2);
5018  xops[4] = GEN_INT (3);
5019  xops[5] = GEN_INT (4);
5020  /* xops[6] = gen_label_rtx ();	 * label when aligned to 3-byte */
5021  /* xops[7] = gen_label_rtx ();	 * label when aligned to 2-byte */
5022  xops[8] = gen_label_rtx ();		/* label of main loop */
5023
5024  if (TARGET_USE_Q_REG && QI_REG_P (xops[1]))
5025    xops[9] = gen_label_rtx ();		/* pentium optimisation */
5026
5027  xops[10] = gen_label_rtx ();		/* end label 2 */
5028  xops[11] = gen_label_rtx ();		/* end label 1 */
5029  xops[12] = gen_label_rtx ();		/* end label */
5030  /* xops[13]				 * Temporary used */
5031  xops[14] = GEN_INT (0xff);
5032  xops[15] = GEN_INT (0xff00);
5033  xops[16] = GEN_INT (0xff0000);
5034  xops[17] = GEN_INT (0xff000000);
5035
5036  /* Loop to check 1..3 bytes for null to get an aligned pointer.  */
5037
5038  /* Is there a known alignment and is it less than 4?  */
5039  if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < 4)
5040    {
5041      /* Is there a known alignment and is it not 2? */
5042      if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
5043	{
5044	  xops[6] = gen_label_rtx (); /* Label when aligned to 3-byte */
5045	  xops[7] = gen_label_rtx (); /* Label when aligned to 2-byte */
5046
5047	  /* Leave just the 3 lower bits.
5048	     If this is a q-register, then the high part is used later
5049	     therefore use andl rather than andb. */
5050	  output_asm_insn (AS2 (and%L1,%4,%1), xops);
5051
5052	  /* Is aligned to 4-byte address when zero */
5053	  output_asm_insn (AS1 (je,%l8), xops);
5054
5055	  /* Side-effect even Parity when %eax == 3 */
5056	  output_asm_insn (AS1 (jp,%6), xops);
5057
5058	  /* Is it aligned to 2 bytes ? */
5059	  if (QI_REG_P (xops[1]))
5060	    output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
5061	  else
5062	    output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
5063
5064	  output_asm_insn (AS1 (je,%7), xops);
5065	}
5066      else
5067        {
5068	  /* Since the alignment is 2, we have to check 2 or 0 bytes;
5069	     check if is aligned to 4 - byte.  */
5070	  output_asm_insn (AS2 (and%L1,%3,%1), xops);
5071
5072	  /* Is aligned to 4-byte address when zero */
5073	  output_asm_insn (AS1 (je,%l8), xops);
5074        }
5075
5076      xops[13] = gen_rtx_MEM (QImode, xops[0]);
5077
5078      /* Now compare the bytes; compare with the high part of a q-reg
5079	 gives shorter code. */
5080      if (QI_REG_P (xops[1]))
5081        {
5082	  /* Compare the first n unaligned byte on a byte per byte basis. */
5083          output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
5084
5085	  /* When zero we reached the end. */
5086          output_asm_insn (AS1 (je,%l12), xops);
5087
5088	  /* Increment the address. */
5089          output_asm_insn (AS1 (inc%L0,%0), xops);
5090
5091	  /* Not needed with an alignment of 2 */
5092	  if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
5093	    {
5094	      ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5095					 CODE_LABEL_NUMBER (xops[7]));
5096	      output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
5097	      output_asm_insn (AS1 (je,%l12), xops);
5098	      output_asm_insn (AS1 (inc%L0,%0), xops);
5099
5100	      ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5101					 CODE_LABEL_NUMBER (xops[6]));
5102	    }
5103
5104          output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
5105        }
5106      else
5107        {
5108          output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
5109          output_asm_insn (AS1 (je,%l12), xops);
5110          output_asm_insn (AS1 (inc%L0,%0), xops);
5111
5112	  ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5113				     CODE_LABEL_NUMBER (xops[7]));
5114          output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
5115          output_asm_insn (AS1 (je,%l12), xops);
5116          output_asm_insn (AS1 (inc%L0,%0), xops);
5117
5118	  ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5119				     CODE_LABEL_NUMBER (xops[6]));
5120          output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
5121        }
5122
5123      output_asm_insn (AS1 (je,%l12), xops);
5124      output_asm_insn (AS1 (inc%L0,%0), xops);
5125    }
5126
5127    /* Generate loop to check 4 bytes at a time.  It is not a good idea to
5128       align this loop.  It gives only huge programs, but does not help to
5129       speed up.  */
5130  ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[8]));
5131
5132  xops[13] = gen_rtx_MEM (SImode, xops[0]);
5133  output_asm_insn (AS2 (mov%L1,%13,%1), xops);
5134
5135  if (QI_REG_P (xops[1]))
5136    {
5137      /* On i586 it is faster to combine the hi- and lo- part as
5138	 a kind of lookahead.  If anding both yields zero, then one
5139	 of both *could* be zero, otherwise none of both is zero;
5140	 this saves one instruction, on i486 this is slower
5141	 tested with P-90, i486DX2-66, AMD486DX2-66  */
5142      if (TARGET_PENTIUM)
5143        {
5144	  output_asm_insn (AS2 (test%B1,%h1,%b1), xops);
5145	  output_asm_insn (AS1 (jne,%l9), xops);
5146        }
5147
5148      /* Check first byte. */
5149      output_asm_insn (AS2 (test%B1,%b1,%b1), xops);
5150      output_asm_insn (AS1 (je,%l12), xops);
5151
5152      /* Check second byte. */
5153      output_asm_insn (AS2 (test%B1,%h1,%h1), xops);
5154      output_asm_insn (AS1 (je,%l11), xops);
5155
5156      if (TARGET_PENTIUM)
5157	ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5158				   CODE_LABEL_NUMBER (xops[9]));
5159    }
5160
5161  else
5162    {
5163      /* Check first byte. */
5164      output_asm_insn (AS2 (test%L1,%14,%1), xops);
5165      output_asm_insn (AS1 (je,%l12), xops);
5166
5167      /* Check second byte. */
5168      output_asm_insn (AS2 (test%L1,%15,%1), xops);
5169      output_asm_insn (AS1 (je,%l11), xops);
5170    }
5171
5172  /* Check third byte. */
5173  output_asm_insn (AS2 (test%L1,%16,%1), xops);
5174  output_asm_insn (AS1 (je,%l10), xops);
5175
5176  /* Check fourth byte and increment address. */
5177  output_asm_insn (AS2 (add%L0,%5,%0), xops);
5178  output_asm_insn (AS2 (test%L1,%17,%1), xops);
5179  output_asm_insn (AS1 (jne,%l8), xops);
5180
5181  /* Now generate fixups when the compare stops within a 4-byte word. */
5182  output_asm_insn (AS2 (sub%L0,%4,%0), xops);
5183
5184  ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[10]));
5185  output_asm_insn (AS1 (inc%L0,%0), xops);
5186
5187  ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[11]));
5188  output_asm_insn (AS1 (inc%L0,%0), xops);
5189
5190  ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[12]));
5191
5192  return "";
5193}
5194
5195char *
5196output_fp_conditional_move (which_alternative, operands)
5197     int which_alternative;
5198     rtx operands[];
5199{
5200  switch (which_alternative)
5201    {
5202    case 0:
5203      /* r <- cond ? arg : r */
5204      output_asm_insn (AS2 (fcmov%F1,%2,%0), operands);
5205      break;
5206
5207    case 1:
5208      /* r <- cond ? r : arg */
5209      output_asm_insn (AS2 (fcmov%f1,%3,%0), operands);
5210      break;
5211
5212    default:
5213      abort ();
5214    }
5215
5216  return "";
5217}
5218
5219char *
5220output_int_conditional_move (which_alternative, operands)
5221     int which_alternative;
5222     rtx operands[];
5223{
5224  int code = GET_CODE (operands[1]);
5225  enum machine_mode mode;
5226  rtx xops[4];
5227
5228  /* This is very tricky. We have to do it right. For a code segement
5229     like:
5230
5231	int foo, bar;
5232	....
5233	foo = foo - x;
5234	if (foo >= 0)
5235	  bar = y;
5236
5237     final_scan_insn () may delete the insn which sets CC. We have to
5238     tell final_scan_insn () if it should be reinserted. When CODE is
5239     GT or LE, we have to check the CC_NO_OVERFLOW bit and return
5240     NULL_PTR to tell final to reinsert the test insn because the
5241     conditional move cannot be handled properly without it. */
5242  if ((code == GT || code == LE)
5243      && (cc_prev_status.flags & CC_NO_OVERFLOW))
5244    return NULL_PTR;
5245
5246  mode = GET_MODE (operands [0]);
5247  if (mode == DImode)
5248    {
5249      xops [0] = gen_rtx_SUBREG (SImode, operands [0], 1);
5250      xops [1] = operands [1];
5251      xops [2] = gen_rtx_SUBREG (SImode, operands [2], 1);
5252      xops [3] = gen_rtx_SUBREG (SImode, operands [3], 1);
5253    }
5254
5255  switch (which_alternative)
5256    {
5257    case 0:
5258      /* r <- cond ? arg : r */
5259      output_asm_insn (AS2 (cmov%C1,%2,%0), operands);
5260      if (mode == DImode)
5261	output_asm_insn (AS2 (cmov%C1,%2,%0), xops);
5262      break;
5263
5264    case 1:
5265      /* r <- cond ? r : arg */
5266      output_asm_insn (AS2 (cmov%c1,%3,%0), operands);
5267      if (mode == DImode)
5268	output_asm_insn (AS2 (cmov%c1,%3,%0), xops);
5269      break;
5270
5271    default:
5272      abort ();
5273    }
5274
5275  return "";
5276}
5277