final.c revision 260919
1139749Simp/* Convert RTL to assembler code and output it, for GNU compiler.
237785Smsmith   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
337785Smsmith   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
437785Smsmith   Free Software Foundation, Inc.
537785Smsmith
637785SmsmithThis file is part of GCC.
737785Smsmith
837785SmsmithGCC is free software; you can redistribute it and/or modify it under
937785Smsmiththe terms of the GNU General Public License as published by the Free
1037785SmsmithSoftware Foundation; either version 2, or (at your option) any later
1137785Smsmithversion.
1237785Smsmith
1337785SmsmithGCC is distributed in the hope that it will be useful, but WITHOUT ANY
1437785SmsmithWARRANTY; without even the implied warranty of MERCHANTABILITY or
1537785SmsmithFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1637785Smsmithfor more details.
1737785Smsmith
1837785SmsmithYou should have received a copy of the GNU General Public License
1937785Smsmithalong with GCC; see the file COPYING.  If not, write to the Free
2037785SmsmithSoftware Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2137785Smsmith02110-1301, USA.  */
2237785Smsmith
2337785Smsmith/* This is the final pass of the compiler.
2437785Smsmith   It looks at the rtl code for a function and outputs assembler code.
2537785Smsmith
2637785Smsmith   Call `final_start_function' to output the assembler code for function entry,
2737785Smsmith   `final' to output assembler code for some RTL code,
2837785Smsmith   `final_end_function' to output assembler code for function exit.
29119418Sobrien   If a function is compiled in several pieces, each piece is
30119418Sobrien   output separately with `final'.
31119418Sobrien
3237785Smsmith   Some optimizations are also done at this level.
3337785Smsmith   Move instructions that were made unnecessary by good register allocation
3437785Smsmith   are detected and omitted from the output.  (Though most of these
3537785Smsmith   are removed by the last jump pass.)
3637785Smsmith
3737785Smsmith   Instructions to set the condition codes are omitted when it can be
3858816Simp   seen that the condition codes already had the desired values.
3958816Simp
4058816Simp   In some cases it is sufficient if the inherited condition codes
4137785Smsmith   have related values, but this may require the following insn
4237785Smsmith   (the one that tests the condition codes) to be modified.
4337785Smsmith
4437785Smsmith   The code for the function prologue and epilogue are generated
4558816Simp   directly in assembler by the target functions function_prologue and
4658816Simp   function_epilogue.  Those instructions never exist as rtl.  */
4737785Smsmith
4837785Smsmith#include "config.h"
4938592Smsmith#include "system.h"
5037785Smsmith#include "coretypes.h"
5137785Smsmith#include "tm.h"
5258816Simp
5358816Simp#include "tree.h"
5458816Simp#include "rtl.h"
5558816Simp#include "tm_p.h"
5658816Simp#include "regs.h"
5758816Simp#include "insn-config.h"
5837785Smsmith#include "insn-attr.h"
5937785Smsmith#include "recog.h"
6037785Smsmith#include "conditions.h"
6137785Smsmith#include "flags.h"
6237785Smsmith#include "real.h"
6337785Smsmith#include "hard-reg-set.h"
6472940Simp#include "output.h"
6558816Simp#include "except.h"
6637785Smsmith#include "function.h"
6737785Smsmith#include "toplev.h"
6837785Smsmith#include "reload.h"
6937785Smsmith#include "intl.h"
7037785Smsmith#include "basic-block.h"
7137785Smsmith#include "target.h"
7237785Smsmith#include "debug.h"
73122024Simp#include "expr.h"
74122024Simp#include "cfglayout.h"
75122024Simp#include "tree-pass.h"
76122024Simp#include "timevar.h"
77122024Simp#include "cgraph.h"
78122024Simp#include "coverage.h"
7938592Smsmith
80122024Simp#ifdef XCOFF_DEBUGGING_INFO
81122024Simp#include "xcoffout.h"		/* Needed for external data
82122024Simp				   declarations for e.g. AIX 4.x.  */
8358816Simp#endif
8437785Smsmith
8537785Smsmith#if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
8637785Smsmith#include "dwarf2out.h"
8737785Smsmith#endif
8837785Smsmith
89122024Simp#ifdef DBX_DEBUGGING_INFO
90122024Simp#include "dbxout.h"
9137785Smsmith#endif
9237785Smsmith
9337785Smsmith#ifdef SDB_DEBUGGING_INFO
9437785Smsmith#include "sdbout.h"
9537785Smsmith#endif
9637785Smsmith
9737785Smsmith/* If we aren't using cc0, CC_STATUS_INIT shouldn't exist.  So define a
9837785Smsmith   null default for it to save conditionalization later.  */
9971316Simp#ifndef CC_STATUS_INIT
10037785Smsmith#define CC_STATUS_INIT
101122024Simp#endif
102122024Simp
103122024Simp/* How to start an assembler comment.  */
104122024Simp#ifndef ASM_COMMENT_START
105122024Simp#define ASM_COMMENT_START ";#"
106122024Simp#endif
107122024Simp
108122024Simp/* Is the given character a logical line separator for the assembler?  */
109122024Simp#ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
110122024Simp#define IS_ASM_LOGICAL_LINE_SEPARATOR(C) ((C) == ';')
111122024Simp#endif
112122024Simp
113122024Simp#ifndef JUMP_TABLES_IN_TEXT_SECTION
114122024Simp#define JUMP_TABLES_IN_TEXT_SECTION 0
115122024Simp#endif
116122024Simp
117140925Simp/* Bitflags used by final_scan_insn.  */
118122024Simp#define SEEN_BB		1
119122024Simp#define SEEN_NOTE	2
12037785Smsmith#define SEEN_EMITTED	4
121122024Simp
12237785Smsmith/* Last insn processed by final_scan_insn.  */
12337785Smsmithstatic rtx debug_insn;
12437785Smsmithrtx current_output_insn;
12537785Smsmith
126122024Simp/* Line number of last NOTE.  */
12737785Smsmithstatic int last_linenum;
12837785Smsmith
129122024Simp/* Highest line number in current block.  */
130122024Simpstatic int high_block_linenum;
131122024Simp
13237785Smsmith/* Likewise for function.  */
13372940Simpstatic int high_function_linenum;
134122024Simp
13572940Simp/* Filename of last NOTE.  */
13672940Simpstatic const char *last_filename;
13737785Smsmith
13837785Smsmith/* Whether to force emission of a line note before the next insn.  */
13937785Smsmithstatic bool force_source_line = false;
140122024Simp
14137785Smsmithextern const int length_unit_log; /* This is defined in insn-attrtab.c.  */
14237785Smsmith
14337785Smsmith/* Nonzero while outputting an `asm' with operands.
14437785Smsmith   This means that inconsistencies are the user's fault, so don't die.
14537785Smsmith   The precise value is the insn being output, to pass to error_for_asm.  */
14637785Smsmithrtx this_is_asm_operands;
14772940Simp
14837785Smsmith/* Number of operands of this insn, for an `asm' with operands.  */
14937785Smsmithstatic unsigned int insn_noperands;
15037785Smsmith
151122024Simp/* Compare optimization flag.  */
15237785Smsmith
153122024Simpstatic rtx last_ignored_compare = 0;
154122024Simp
15537785Smsmith/* Assign a unique number to each insn that is output.
156122024Simp   This can be used to generate unique local labels.  */
157122024Simp
15837785Smsmithstatic int insn_counter = 0;
15937785Smsmith
160122024Simp#ifdef HAVE_cc0
161122024Simp/* This variable contains machine-dependent flags (defined in tm.h)
162122024Simp   set and examined by output routines
163122024Simp   that describe how to interpret the condition codes properly.  */
164122024Simp
165122024SimpCC_STATUS cc_status;
16637785Smsmith
16737785Smsmith/* During output of an insn, this contains a copy of cc_status
16837785Smsmith   from before the insn.  */
16937785Smsmith
17037785SmsmithCC_STATUS cc_prev_status;
171122024Simp#endif
17237785Smsmith
17337785Smsmith/* Indexed by hardware reg number, is 1 if that register is ever
17437785Smsmith   used in the current function.
17537785Smsmith
17637785Smsmith   In life_analysis, or in stupid_life_analysis, this is set
17737785Smsmith   up to record the hard regs used explicitly.  Reload adds
17837785Smsmith   in the hard regs used for holding pseudo regs.  Final uses
17937785Smsmith   it to generate the code in the function prologue and epilogue
18037785Smsmith   to save and restore registers as needed.  */
18137785Smsmith
18237785Smsmithchar regs_ever_live[FIRST_PSEUDO_REGISTER];
18337785Smsmith
18472940Simp/* Like regs_ever_live, but 1 if a reg is set or clobbered from an asm.
18537785Smsmith   Unlike regs_ever_live, elements of this array corresponding to
186122024Simp   eliminable regs like the frame pointer are set if an asm sets them.  */
18737785Smsmith
18837785Smsmithchar regs_asm_clobbered[FIRST_PSEUDO_REGISTER];
18937785Smsmith
19037785Smsmith/* Nonzero means current function must be given a frame pointer.
19137785Smsmith   Initialized in function.c to 0.  Set only in reload1.c as per
19237785Smsmith   the needs of the function.  */
193104252Sbrooks
19437785Smsmithint frame_pointer_needed;
19572940Simp
19672940Simp/* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen.  */
19772940Simp
19837785Smsmithstatic int block_depth;
199104252Sbrooks
200104252Sbrooks/* Nonzero if have enabled APP processing of our assembler output.  */
20137785Smsmith
20237785Smsmithstatic int app_on;
20337785Smsmith
20437785Smsmith/* If we are outputting an insn sequence, this contains the sequence rtx.
20537785Smsmith   Zero otherwise.  */
20637785Smsmith
20737785Smsmithrtx final_sequence;
20837785Smsmith
20937785Smsmith#ifdef ASSEMBLER_DIALECT
21037785Smsmith
21137785Smsmith/* Number of the assembler dialect to use, starting at 0.  */
21237785Smsmithstatic int dialect_number;
21337785Smsmith#endif
21472940Simp
21537785Smsmith#ifdef HAVE_conditional_execution
21637785Smsmith/* Nonnull if the insn currently being emitted was a COND_EXEC pattern.  */
21737785Smsmithrtx current_insn_predicate;
21872940Simp#endif
219104252Sbrooks
22037785Smsmith#ifdef HAVE_ATTR_length
22137785Smsmithstatic int asm_insn_count (rtx);
22237785Smsmith#endif
22337785Smsmithstatic void profile_function (FILE *);
22437785Smsmithstatic void profile_after_prologue (FILE *);
22537785Smsmithstatic bool notice_source_line (rtx);
22637785Smsmithstatic rtx walk_alter_subreg (rtx *);
22737785Smsmithstatic void output_asm_name (void);
22837785Smsmithstatic void output_alternate_entry_point (FILE *, rtx);
22937785Smsmithstatic tree get_mem_expr_from_op (rtx, int *);
23037785Smsmithstatic void output_asm_operand_names (rtx *, int *, int);
23137785Smsmithstatic void output_operand (rtx, int);
23237785Smsmith#ifdef LEAF_REGISTERS
23337785Smsmithstatic void leaf_renumber_regs (rtx);
23437785Smsmith#endif
23537785Smsmith#ifdef HAVE_cc0
23638305Smsmithstatic int alter_cond (rtx);
23738305Smsmith#endif
23837785Smsmith#ifndef ADDR_VEC_ALIGN
239122024Simpstatic int final_addr_vec_align (rtx);
24038305Smsmith#endif
24138305Smsmith#ifdef HAVE_ATTR_length
24272940Simpstatic int align_fuzz (rtx, rtx, int, unsigned);
24372940Simp#endif
24472940Simp
24572940Simp/* Initialize data in final at the beginning of a compilation.  */
24672940Simp
24772940Simpvoid
24837785Smsmithinit_final (const char *filename ATTRIBUTE_UNUSED)
24937785Smsmith{
25037785Smsmith  app_on = 0;
25172940Simp  final_sequence = 0;
252122024Simp
25338305Smsmith#ifdef ASSEMBLER_DIALECT
25437785Smsmith  dialect_number = ASSEMBLER_DIALECT;
25538305Smsmith#endif
25637785Smsmith}
25737785Smsmith
25837785Smsmith/* Default target function prologue and epilogue assembler output.
25937785Smsmith
26037785Smsmith   If not overridden for epilogue code, then the function body itself
261122024Simp   contains return instructions wherever needed.  */
262122024Simpvoid
263122024Simpdefault_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
26437785Smsmith			       HOST_WIDE_INT size ATTRIBUTE_UNUSED)
26537785Smsmith{
26637785Smsmith}
26737785Smsmith
26837785Smsmith/* Default target hook that outputs nothing to a stream.  */
26937785Smsmithvoid
27037785Smsmithno_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
27137785Smsmith{
27237785Smsmith}
27337785Smsmith
27437785Smsmith/* Enable APP processing of subsequent output.
27537785Smsmith   Used before the output from an `asm' statement.  */
27672940Simp
27772940Simpvoid
27837785Smsmithapp_enable (void)
27937785Smsmith{
280104252Sbrooks  if (! app_on)
28137785Smsmith    {
28237785Smsmith      fputs (ASM_APP_ON, asm_out_file);
28337785Smsmith      app_on = 1;
28437785Smsmith    }
28537785Smsmith}
28637785Smsmith
28737785Smsmith/* Disable APP processing of subsequent output.
28837785Smsmith   Called from varasm.c before most kinds of output.  */
28937785Smsmith
29037785Smsmithvoid
29137785Smsmithapp_disable (void)
29237785Smsmith{
29337785Smsmith  if (app_on)
29472940Simp    {
29572940Simp      fputs (ASM_APP_OFF, asm_out_file);
29637785Smsmith      app_on = 0;
29737785Smsmith    }
298104252Sbrooks}
29937785Smsmith
30037785Smsmith/* Return the number of slots filled in the current
30137785Smsmith   delayed branch sequence (we don't count the insn needing the
30237785Smsmith   delay slot).   Zero if not in a delayed branch sequence.  */
30337785Smsmith
30471316Simp#ifdef DELAY_SLOTS
30558816Simpint
30637785Smsmithdbr_sequence_length (void)
30758816Simp{
30858816Simp  if (final_sequence != 0)
30958816Simp    return XVECLEN (final_sequence, 0) - 1;
31058816Simp  else
31137785Smsmith    return 0;
312122024Simp}
31358816Simp#endif
314122024Simp
31537785Smsmith/* The next two pages contain routines used to compute the length of an insn
31637785Smsmith   and to shorten branches.  */
31758816Simp
31858816Simp/* Arrays for insn lengths, and addresses.  The latter is referenced by
31958816Simp   `insn_current_length'.  */
32058816Simp
32172940Simpstatic int *insn_lengths;
32258816Simp
32372940Simpvarray_type insn_addresses_;
32437785Smsmith
32537785Smsmith/* Max uid for which the above arrays are valid.  */
32658816Simpstatic int insn_lengths_max_uid;
32772940Simp
32872940Simp/* Address of insn being processed.  Used by `insn_current_length'.  */
32972940Simpint insn_current_address;
33072940Simp
33172940Simp/* Address of insn being processed in previous iteration.  */
33258816Simpint insn_last_address;
33337785Smsmith
33437785Smsmith/* known invariant alignment of insn being processed.  */
33572940Simpint insn_current_align;
33672940Simp
33772940Simp/* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
33872940Simp   gives the next following alignment insn that increases the known
33972940Simp   alignment, or NULL_RTX if there is no such insn.
34072940Simp   For any alignment obtained this way, we can again index uid_align with
34158816Simp   its uid to obtain the next following align that in turn increases the
34237785Smsmith   alignment, till we reach NULL_RTX; the sequence obtained this way
34372940Simp   for each insn we'll call the alignment chain of this insn in the following
34437785Smsmith   comments.  */
34537785Smsmith
34637785Smsmith/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
34737785Smsmith/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
34858816Simpstatic rtx *uid_align;
34937785Smsmithstatic int *uid_shuid;
35037785Smsmith/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
35137785Smsmith
35237785Smsmith/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
35337785Smsmith
35437785Smsmith/* Indicate that branch shortening hasn't yet been done.  */
35537785Smsmith
35637785Smsmithvoid
35737785Smsmithinit_insn_lengths (void)
35837785Smsmith{
35937785Smsmith  if (uid_shuid)
36037785Smsmith    {
36137785Smsmith      free (uid_shuid);
36237785Smsmith      uid_shuid = 0;
36337785Smsmith    }
36437785Smsmith  if (insn_lengths)
36537785Smsmith    {
36637785Smsmith      free (insn_lengths);
36758816Simp      insn_lengths = 0;
36837785Smsmith      insn_lengths_max_uid = 0;
36958816Simp    }
37058816Simp#ifdef HAVE_ATTR_length
37158816Simp  INSN_ADDRESSES_FREE ();
37258816Simp#endif
37358816Simp  if (uid_align)
37472940Simp    {
37558816Simp      free (uid_align);
37637785Smsmith      uid_align = 0;
37737785Smsmith    }
37858816Simp}
37958816Simp
38037785Smsmith/* Obtain the current length of an insn.  If branch shortening has been done,
38137785Smsmith   get its actual length.  Otherwise, use FALLBACK_FN to calculate the
38258816Simp   length.  */
38358816Simpstatic inline int
38437785Smsmithget_attr_length_1 (rtx insn ATTRIBUTE_UNUSED,
38537785Smsmith		   int (*fallback_fn) (rtx) ATTRIBUTE_UNUSED)
38637785Smsmith{
38737785Smsmith#ifdef HAVE_ATTR_length
38837785Smsmith  rtx body;
38937785Smsmith  int i;
39037785Smsmith  int length = 0;
39137785Smsmith
39237785Smsmith  if (insn_lengths_max_uid > INSN_UID (insn))
39337785Smsmith    return insn_lengths[INSN_UID (insn)];
39437785Smsmith  else
39537785Smsmith    switch (GET_CODE (insn))
39637785Smsmith      {
39737785Smsmith      case NOTE:
39837785Smsmith      case BARRIER:
39937785Smsmith      case CODE_LABEL:
40037785Smsmith	return 0;
40137785Smsmith
40237785Smsmith      case CALL_INSN:
40358816Simp	length = fallback_fn (insn);
40458816Simp	break;
40537785Smsmith
40658816Simp      case JUMP_INSN:
40758816Simp	body = PATTERN (insn);
40858816Simp	if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
40958816Simp	  {
41058816Simp	    /* Alignment is machine-dependent and should be handled by
41158816Simp	       ADDR_VEC_ALIGN.  */
41258816Simp	  }
41358816Simp	else
41458816Simp	  length = fallback_fn (insn);
41558816Simp	break;
41658816Simp
41758816Simp      case INSN:
41858816Simp	body = PATTERN (insn);
41958816Simp	if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
42058816Simp	  return 0;
42158816Simp
42258816Simp	else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
42358816Simp	  length = asm_insn_count (body) * fallback_fn (insn);
42458816Simp	else if (GET_CODE (body) == SEQUENCE)
42558816Simp	  for (i = 0; i < XVECLEN (body, 0); i++)
42658816Simp	    length += get_attr_length (XVECEXP (body, 0, i));
42758816Simp	else
42858816Simp	  length = fallback_fn (insn);
42958816Simp	break;
43058816Simp
43158816Simp      default:
43258816Simp	break;
43337785Smsmith      }
43437785Smsmith
43558816Simp#ifdef ADJUST_INSN_LENGTH
43658816Simp  ADJUST_INSN_LENGTH (insn, length);
43758816Simp#endif
43858816Simp  return length;
43958816Simp#else /* not HAVE_ATTR_length */
44037785Smsmith  return 0;
44137785Smsmith#define insn_default_length 0
44237785Smsmith#define insn_min_length 0
44337785Smsmith#endif /* not HAVE_ATTR_length */
44458816Simp}
44537785Smsmith
44637785Smsmith/* Obtain the current length of an insn.  If branch shortening has been done,
44758816Simp   get its actual length.  Otherwise, get its maximum length.  */
44858816Simpint
44958816Simpget_attr_length (rtx insn)
45058816Simp{
45158816Simp  return get_attr_length_1 (insn, insn_default_length);
45258816Simp}
45358816Simp
45458816Simp/* Obtain the current length of an insn.  If branch shortening has been done,
45558816Simp   get its actual length.  Otherwise, get its minimum length.  */
45658816Simpint
45758816Simpget_attr_min_length (rtx insn)
45858816Simp{
45958816Simp  return get_attr_length_1 (insn, insn_min_length);
46058816Simp}
46137785Smsmith
46237785Smsmith/* Code to handle alignment inside shorten_branches.  */
46337785Smsmith
46458816Simp/* Here is an explanation how the algorithm in align_fuzz can give
46537785Smsmith   proper results:
46637785Smsmith
46758816Simp   Call a sequence of instructions beginning with alignment point X
46858816Simp   and continuing until the next alignment point `block X'.  When `X'
46958816Simp   is used in an expression, it means the alignment value of the
470140926Simp   alignment point.
471140926Simp
47237785Smsmith   Call the distance between the start of the first insn of block X, and
47358816Simp   the end of the last insn of block X `IX', for the `inner size of X'.
47458816Simp   This is clearly the sum of the instruction lengths.
47537785Smsmith
47637785Smsmith   Likewise with the next alignment-delimited block following X, which we
47737785Smsmith   shall call block Y.
47837785Smsmith
47937785Smsmith   Call the distance between the start of the first insn of block X, and
48037785Smsmith   the start of the first insn of block Y `OX', for the `outer size of X'.
48172940Simp
48237785Smsmith   The estimated padding is then OX - IX.
483104252Sbrooks
48437785Smsmith   OX can be safely estimated as
48537785Smsmith
48637785Smsmith           if (X >= Y)
48737785Smsmith                   OX = round_up(IX, Y)
48837785Smsmith           else
48958816Simp                   OX = round_up(IX, X) + Y - X
49037785Smsmith
49137785Smsmith   Clearly est(IX) >= real(IX), because that only depends on the
49237785Smsmith   instruction lengths, and those being overestimated is a given.
49337785Smsmith
49437785Smsmith   Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
49558816Simp   we needn't worry about that when thinking about OX.
49637785Smsmith
49737785Smsmith   When X >= Y, the alignment provided by Y adds no uncertainty factor
49837785Smsmith   for branch ranges starting before X, so we can just round what we have.
49937785Smsmith   But when X < Y, we don't know anything about the, so to speak,
50037785Smsmith   `middle bits', so we have to assume the worst when aligning up from an
50137785Smsmith   address mod X to one mod Y, which is Y - X.  */
50237785Smsmith
50337785Smsmith#ifndef LABEL_ALIGN
50458816Simp#define LABEL_ALIGN(LABEL) align_labels_log
50537785Smsmith#endif
50637785Smsmith
50737785Smsmith#ifndef LABEL_ALIGN_MAX_SKIP
50858816Simp#define LABEL_ALIGN_MAX_SKIP align_labels_max_skip
50958816Simp#endif
51058816Simp
51158816Simp#ifndef LOOP_ALIGN
51258816Simp#define LOOP_ALIGN(LABEL) align_loops_log
51358816Simp#endif
51458816Simp
51558816Simp#ifndef LOOP_ALIGN_MAX_SKIP
51658816Simp#define LOOP_ALIGN_MAX_SKIP align_loops_max_skip
51758816Simp#endif
51858816Simp
51958816Simp#ifndef LABEL_ALIGN_AFTER_BARRIER
52058816Simp#define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
52158816Simp#endif
52258816Simp
52358816Simp#ifndef LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
52458816Simp#define LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP 0
52558816Simp#endif
52658816Simp
52758816Simp#ifndef JUMP_ALIGN
52858816Simp#define JUMP_ALIGN(LABEL) align_jumps_log
52958816Simp#endif
53058816Simp
53158816Simp#ifndef JUMP_ALIGN_MAX_SKIP
53258816Simp#define JUMP_ALIGN_MAX_SKIP align_jumps_max_skip
53358816Simp#endif
53458816Simp
53558816Simp#ifndef ADDR_VEC_ALIGN
53658816Simpstatic int
53758816Simpfinal_addr_vec_align (rtx addr_vec)
53858816Simp{
53958816Simp  int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
54058816Simp
54158816Simp  if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
54258816Simp    align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
54358816Simp  return exact_log2 (align);
54458816Simp
54558816Simp}
54658816Simp
54758816Simp#define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
54858816Simp#endif
54958816Simp
55058816Simp#ifndef INSN_LENGTH_ALIGNMENT
55158816Simp#define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
55258816Simp#endif
55358816Simp
55458816Simp#define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
555127135Snjl
556127135Snjl/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
55758816Simp/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
55858816Simp/* For the benefit of port specific code do this also as a function.  */
55958816Simp
56058816Simpint
56158816Simplabel_to_alignment (rtx label)
56258816Simp{
56358816Simp/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
56458816Simp  return LABEL_ALIGN_LOG (label);
56558816Simp/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
56658816Simp}
56758816Simp
56858816Simp#ifdef HAVE_ATTR_length
56958816Simp/* The differences in addresses
57058816Simp   between a branch and its target might grow or shrink depending on
57158816Simp   the alignment the start insn of the range (the branch for a forward
57258816Simp   branch or the label for a backward branch) starts out on; if these
57358816Simp   differences are used naively, they can even oscillate infinitely.
57458816Simp   We therefore want to compute a 'worst case' address difference that
57558816Simp   is independent of the alignment the start insn of the range end
57658816Simp   up on, and that is at least as large as the actual difference.
57758816Simp   The function align_fuzz calculates the amount we have to add to the
57858816Simp   naively computed difference, by traversing the part of the alignment
57958816Simp   chain of the start insn of the range that is in front of the end insn
58058816Simp   of the range, and considering for each alignment the maximum amount
58158816Simp   that it might contribute to a size increase.
58258816Simp
58358816Simp   For casesi tables, we also want to know worst case minimum amounts of
58458816Simp   address difference, in case a machine description wants to introduce
58558816Simp   some common offset that is added to all offsets in a table.
58658816Simp   For this purpose, align_fuzz with a growth argument of 0 computes the
58758816Simp   appropriate adjustment.  */
58858816Simp
58958816Simp/* Compute the maximum delta by which the difference of the addresses of
59058816Simp   START and END might grow / shrink due to a different address for start
59137785Smsmith   which changes the size of alignment insns between START and END.
59237785Smsmith   KNOWN_ALIGN_LOG is the alignment known for START.
59371316Simp   GROWTH should be ~0 if the objective is to compute potential code size
594121816Sbrooks   increase, and 0 if the objective is to compute potential shrink.
59537785Smsmith   The return value is undefined for any other value of GROWTH.  */
59637785Smsmith
597121816Sbrooksstatic int
59837785Smsmithalign_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
59937785Smsmith{
60058816Simp  int uid = INSN_UID (start);
60158816Simp  rtx align_label;
602121752Sbrooks  int known_align = 1 << known_align_log;
603121816Sbrooks  int end_shuid = INSN_SHUID (end);
604121752Sbrooks  int fuzz = 0;
605121752Sbrooks
606121752Sbrooks  for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
607121752Sbrooks    {
608121752Sbrooks      int align_addr, new_align;
609121752Sbrooks
610121752Sbrooks      uid = INSN_UID (align_label);
611121752Sbrooks      align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
612121752Sbrooks      if (uid_shuid[uid] > end_shuid)
613121752Sbrooks	break;
614121752Sbrooks/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
615121752Sbrooks      known_align_log = LABEL_ALIGN_LOG (align_label);
61637785Smsmith/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
617133681Srwatson      new_align = 1 << known_align_log;
618133681Srwatson      if (new_align < known_align)
61937785Smsmith	continue;
620121752Sbrooks      fuzz += (-align_addr ^ growth) & (new_align - known_align);
621121752Sbrooks      known_align = new_align;
622121752Sbrooks    }
62337785Smsmith  return fuzz;
624121752Sbrooks}
625121752Sbrooks
626121816Sbrooks/* Compute a worst-case reference address of a branch so that it
627121816Sbrooks   can be safely used in the presence of aligned labels.  Since the
628121752Sbrooks   size of the branch itself is unknown, the size of the branch is
629121752Sbrooks   not included in the range.  I.e. for a forward branch, the reference
630121752Sbrooks   address is the end address of the branch as known from the previous
631121752Sbrooks   branch shortening pass, minus a value to account for possible size
632121752Sbrooks   increase due to alignment.  For a backward branch, it is the start
63337785Smsmith   address of the branch as known from the current pass, plus a value
634121752Sbrooks   to account for possible size increase due to alignment.
63537785Smsmith   NB.: Therefore, the maximum offset allowed for backward branches needs
636121752Sbrooks   to exclude the branch size.  */
637121752Sbrooks
638121752Sbrooksint
639121752Sbrooksinsn_current_reference_address (rtx branch)
640121752Sbrooks{
64137785Smsmith  rtx dest, seq;
642121752Sbrooks  int seq_uid;
643121752Sbrooks
644121752Sbrooks  if (! INSN_ADDRESSES_SET_P ())
645121752Sbrooks    return 0;
64637785Smsmith
647121752Sbrooks  seq = NEXT_INSN (PREV_INSN (branch));
648121752Sbrooks  seq_uid = INSN_UID (seq);
649121752Sbrooks  if (!JUMP_P (branch))
650121752Sbrooks    /* This can happen for example on the PA; the objective is to know the
651121752Sbrooks       offset to address something in front of the start of the function.
652121752Sbrooks       Thus, we can treat it like a backward branch.
653121752Sbrooks       We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
654121752Sbrooks       any alignment we'd encounter, so we skip the call to align_fuzz.  */
655121752Sbrooks    return insn_current_address;
65637785Smsmith  dest = JUMP_LABEL (branch);
657121752Sbrooks
658121752Sbrooks  /* BRANCH has no proper alignment chain set, so use SEQ.
65937785Smsmith     BRANCH also has no INSN_SHUID.  */
660121752Sbrooks  if (INSN_SHUID (seq) < INSN_SHUID (dest))
661121752Sbrooks    {
66237785Smsmith      /* Forward branch.  */
663121752Sbrooks      return (insn_last_address + insn_lengths[seq_uid]
664121752Sbrooks	      - align_fuzz (seq, dest, length_unit_log, ~0));
66537785Smsmith    }
666121752Sbrooks  else
667121752Sbrooks    {
668121752Sbrooks      /* Backward branch.  */
669121752Sbrooks      return (insn_current_address
670121752Sbrooks	      + align_fuzz (dest, seq, length_unit_log, ~0));
671121752Sbrooks    }
672122024Simp}
673122024Simp#endif /* HAVE_ATTR_length */
674122024Simp
675122024Simp/* Compute branch alignments based on frequency information in the
676122024Simp   CFG.  */
677122024Simp
678122024Simpstatic unsigned int
679122024Simpcompute_alignments (void)
680122024Simp{
681122024Simp/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
682122024Simp/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
683122024Simp  basic_block bb;
68437785Smsmith
685121752Sbrooks/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
686121752Sbrooks/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
68737785Smsmith
688121752Sbrooks  /* If not optimizing or optimizing for size, don't assign any alignments.  */
689121752Sbrooks  if (! optimize || optimize_size)
69058816Simp    return 0;
69137785Smsmith
69237785Smsmith  FOR_EACH_BB (bb)
693140888Simp    {
694140888Simp      rtx label = BB_HEAD (bb);
695140888Simp      int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
696140888Simp      edge e;
697140888Simp      edge_iterator ei;
698140888Simp/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
699140888Simp      int log, max_skip, max_log;
700140888Simp
701140888Simp/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
702140888Simp      if (!LABEL_P (label)
703140888Simp	  || probably_never_executed_bb_p (bb))
704140888Simp	continue;
705140888Simp/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
706140888Simp      /* If user has specified an alignment, honour it.  */
707140888Simp      if (LABEL_ALIGN_LOG (label) > 0)
708140888Simp	continue;
70937785Smsmith
71037785Smsmith/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
71137785Smsmith      max_log = LABEL_ALIGN (label);
71237785Smsmith      max_skip = LABEL_ALIGN_MAX_SKIP;
71337785Smsmith
71437785Smsmith      FOR_EACH_EDGE (e, ei, bb->preds)
71537785Smsmith	{
71637785Smsmith	  if (e->flags & EDGE_FALLTHRU)
71741591Sarchie	    has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
71837785Smsmith	  else
71937785Smsmith	    branch_frequency += EDGE_FREQUENCY (e);
72037785Smsmith	}
72137785Smsmith
72237785Smsmith      /* There are two purposes to align block with no fallthru incoming edge:
72337785Smsmith	 1) to avoid fetch stalls when branch destination is near cache boundary
72437785Smsmith	 2) to improve cache efficiency in case the previous block is not executed
72537785Smsmith	    (so it does not need to be in the cache).
72637785Smsmith
72737785Smsmith	 We to catch first case, we align frequently executed blocks.
72837785Smsmith	 To catch the second, we align blocks that are executed more frequently
72937785Smsmith	 than the predecessor and the predecessor is likely to not be executed
73037785Smsmith	 when function is called.  */
73137785Smsmith
73272940Simp      if (!has_fallthru
73372940Simp	  && (branch_frequency > BB_FREQ_MAX / 10
73437785Smsmith	      || (bb->frequency > bb->prev_bb->frequency * 10
73537785Smsmith		  && (bb->prev_bb->frequency
73637785Smsmith		      <= ENTRY_BLOCK_PTR->frequency / 2))))
73737785Smsmith	{
73837785Smsmith	  log = JUMP_ALIGN (label);
73937785Smsmith	  if (max_log < log)
74037785Smsmith	    {
74137785Smsmith	      max_log = log;
74237785Smsmith	      max_skip = JUMP_ALIGN_MAX_SKIP;
74337785Smsmith	    }
74437785Smsmith	}
74537785Smsmith      /* In case block is frequent and reached mostly by non-fallthru edge,
74637785Smsmith	 align it.  It is most likely a first block of loop.  */
74772940Simp      if (has_fallthru
74872940Simp	  && maybe_hot_bb_p (bb)
74937785Smsmith	  && branch_frequency + fallthru_frequency > BB_FREQ_MAX / 10
75037785Smsmith	  && branch_frequency > fallthru_frequency * 2)
75172940Simp	{
75237785Smsmith	  log = LOOP_ALIGN (label);
75337785Smsmith	  if (max_log < log)
75437785Smsmith	    {
75537785Smsmith	      max_log = log;
75637785Smsmith	      max_skip = LOOP_ALIGN_MAX_SKIP;
75772940Simp	    }
75872940Simp	}
75972940Simp/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
76037785Smsmith      SET_LABEL_ALIGN (label, max_log, max_skip);
76137785Smsmith/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
76237785Smsmith    }
76337785Smsmith  return 0;
76437785Smsmith}
76537785Smsmith
76672940Simpstruct tree_opt_pass pass_compute_alignments =
76772940Simp{
76872940Simp  NULL,                                 /* name */
76937785Smsmith  NULL,                                 /* gate */
77037785Smsmith  compute_alignments,                   /* execute */
77172940Simp  NULL,                                 /* sub */
77237785Smsmith  NULL,                                 /* next */
77337785Smsmith  0,                                    /* static_pass_number */
77437785Smsmith  0,                                    /* tv_id */
77537785Smsmith  0,                                    /* properties_required */
77637785Smsmith  0,                                    /* properties_provided */
77737785Smsmith  0,                                    /* properties_destroyed */
77837785Smsmith  0,                                    /* todo_flags_start */
77937785Smsmith  0,                                    /* todo_flags_finish */
78037785Smsmith  0                                     /* letter */
78137785Smsmith};
78237785Smsmith
78337785Smsmith
78437785Smsmith/* Make a pass over all insns and compute their actual lengths by shortening
78537785Smsmith   any branches of variable length if possible.  */
78637785Smsmith
78737785Smsmith/* shorten_branches might be called multiple times:  for example, the SH
788106937Ssam   port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
78937785Smsmith   In order to do this, it needs proper length information, which it obtains
79037785Smsmith   by calling shorten_branches.  This cannot be collapsed with
79137785Smsmith   shorten_branches itself into a single pass unless we also want to integrate
79237785Smsmith   reorg.c, since the branch splitting exposes new instructions with delay
79337785Smsmith   slots.  */
79437785Smsmith
79537785Smsmithvoid
79637785Smsmithshorten_branches (rtx first ATTRIBUTE_UNUSED)
79737785Smsmith{
79837785Smsmith  rtx insn;
79937785Smsmith  int max_uid;
80037785Smsmith  int i;
80137785Smsmith  int max_log;
80272940Simp  int max_skip;
80372940Simp#ifdef HAVE_ATTR_length
80437785Smsmith#define MAX_CODE_ALIGN 16
80537785Smsmith  rtx seq;
806104252Sbrooks  int something_changed = 1;
807104252Sbrooks  char *varying_length;
80837785Smsmith  rtx body;
80937785Smsmith  int uid;
81037785Smsmith  rtx align_tab[MAX_CODE_ALIGN];
81137785Smsmith
812104252Sbrooks#endif
81337785Smsmith
81437785Smsmith/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
81537785Smsmith  /* Compute maximum UID and allocate uid_shuid.  */
81637785Smsmith/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
81737785Smsmith  max_uid = get_max_uid ();
818111119Simp
81937785Smsmith  /* Free uid_shuid before reallocating it.  */
82037785Smsmith  free (uid_shuid);
82137785Smsmith
82237785Smsmith  uid_shuid = XNEWVEC (int, max_uid);
823111119Simp
82437785Smsmith  /* APPLE LOCAL for-fsf-4_4 3274130 5295549 */ \
82537785Smsmith  /* Initialize set up uid_shuid to be strictly
82637785Smsmith     monotonically rising with insn order.  */
82737785Smsmith  /* We use max_log here to keep track of the maximum alignment we want to
82837785Smsmith     impose on the next CODE_LABEL (or the current one if we are processing
82937785Smsmith     the CODE_LABEL itself).  */
83037785Smsmith
83137785Smsmith  max_log = 0;
83237785Smsmith  max_skip = 0;
83337785Smsmith
83437785Smsmith  for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
83537785Smsmith    {
83637785Smsmith      int log;
83737785Smsmith
83837785Smsmith      INSN_SHUID (insn) = i++;
83937785Smsmith      if (INSN_P (insn))
84037785Smsmith	continue;
84137785Smsmith
84237785Smsmith      if (LABEL_P (insn))
84337785Smsmith	{
84437785Smsmith	  rtx next;
84537785Smsmith
84637785Smsmith	  /* Merge in alignments computed by compute_alignments.  */
84737785Smsmith/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
84837785Smsmith	  log = LABEL_ALIGN_LOG (insn);
849106937Ssam/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
85037785Smsmith	  if (max_log < log)
851122024Simp	    {
852122024Simp	      max_log = log;
85337785Smsmith/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
85437785Smsmith	      max_skip = LABEL_MAX_SKIP (insn);
85537785Smsmith/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
85637785Smsmith	    }
85737785Smsmith
85837785Smsmith	  log = LABEL_ALIGN (insn);
85937785Smsmith	  if (max_log < log)
86037785Smsmith	    {
86158816Simp	      max_log = log;
86237785Smsmith	      max_skip = LABEL_ALIGN_MAX_SKIP;
86358816Simp	    }
86458816Simp	  next = next_nonnote_insn (insn);
86537785Smsmith	  /* ADDR_VECs only take room if read-only data goes into the text
86658816Simp	     section.  */
86737785Smsmith	  if (JUMP_TABLES_IN_TEXT_SECTION
86841591Sarchie	      || readonly_data_section == text_section)
86937785Smsmith	    if (next && JUMP_P (next))
87037785Smsmith	      {
871104252Sbrooks		rtx nextbody = PATTERN (next);
87237785Smsmith		if (GET_CODE (nextbody) == ADDR_VEC
87337785Smsmith		    || GET_CODE (nextbody) == ADDR_DIFF_VEC)
87472940Simp		  {
87537785Smsmith		    log = ADDR_VEC_ALIGN (next);
87637785Smsmith		    if (max_log < log)
877104252Sbrooks		      {
87837785Smsmith			max_log = log;
87937785Smsmith			max_skip = LABEL_ALIGN_MAX_SKIP;
88037785Smsmith		      }
88137785Smsmith		  }
88237785Smsmith	      }
88337785Smsmith/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
88437785Smsmith	  SET_LABEL_ALIGN (insn, max_log, max_skip);
88537785Smsmith/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
88637785Smsmith	  max_log = 0;
88737785Smsmith	  max_skip = 0;
88837785Smsmith	}
88937785Smsmith      else if (BARRIER_P (insn))
89037785Smsmith	{
89137785Smsmith	  rtx label;
89237785Smsmith
89337785Smsmith	  for (label = insn; label && ! INSN_P (label);
89437785Smsmith	       label = NEXT_INSN (label))
89537785Smsmith	    if (LABEL_P (label))
89637785Smsmith	      {
89737785Smsmith		log = LABEL_ALIGN_AFTER_BARRIER (insn);
89837785Smsmith		if (max_log < log)
89937785Smsmith		  {
90037785Smsmith		    max_log = log;
90137785Smsmith		    max_skip = LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP;
90237785Smsmith		  }
90337785Smsmith		break;
90437785Smsmith	      }
90537785Smsmith	}
90637785Smsmith    }
90737785Smsmith#ifdef HAVE_ATTR_length
90837785Smsmith
90937785Smsmith  /* Allocate the rest of the arrays.  */
91037785Smsmith  insn_lengths = XNEWVEC (int, max_uid);
91137785Smsmith  insn_lengths_max_uid = max_uid;
91237785Smsmith  /* Syntax errors can lead to labels being outside of the main insn stream.
91337785Smsmith     Initialize insn_addresses, so that we get reproducible results.  */
91437785Smsmith  INSN_ADDRESSES_ALLOC (max_uid);
91537785Smsmith
91637785Smsmith  varying_length = XCNEWVEC (char, max_uid);
91737785Smsmith
91837785Smsmith  /* Initialize uid_align.  We scan instructions
91937785Smsmith     from end to start, and keep in align_tab[n] the last seen insn
92037785Smsmith     that does an alignment of at least n+1, i.e. the successor
92137785Smsmith     in the alignment chain for an insn that does / has a known
92237785Smsmith     alignment of n.  */
92337785Smsmith  uid_align = XCNEWVEC (rtx, max_uid);
92437785Smsmith
92537785Smsmith  for (i = MAX_CODE_ALIGN; --i >= 0;)
92637785Smsmith    align_tab[i] = NULL_RTX;
92737785Smsmith  seq = get_last_insn ();
92837785Smsmith  for (; seq; seq = PREV_INSN (seq))
92937785Smsmith    {
93037785Smsmith      int uid = INSN_UID (seq);
93137785Smsmith      int log;
93237785Smsmith/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
93337785Smsmith      log = (LABEL_P (seq) ? LABEL_ALIGN_LOG (seq) : 0);
93437785Smsmith/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
93537785Smsmith      uid_align[uid] = align_tab[0];
93637785Smsmith      if (log)
93737785Smsmith	{
93837785Smsmith	  /* Found an alignment label.  */
93937785Smsmith	  uid_align[uid] = align_tab[log];
94037785Smsmith	  for (i = log - 1; i >= 0; i--)
94137785Smsmith	    align_tab[i] = seq;
94237785Smsmith	}
94337785Smsmith    }
94437785Smsmith#ifdef CASE_VECTOR_SHORTEN_MODE
94537785Smsmith  if (optimize)
94637785Smsmith    {
94737785Smsmith      /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
94837785Smsmith         label fields.  */
94937785Smsmith
95037785Smsmith      int min_shuid = INSN_SHUID (get_insns ()) - 1;
95137785Smsmith      int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
95237785Smsmith      int rel;
95337785Smsmith
95437785Smsmith      for (insn = first; insn != 0; insn = NEXT_INSN (insn))
95537785Smsmith	{
95637785Smsmith	  rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
95737785Smsmith	  int len, i, min, max, insn_shuid;
95837785Smsmith	  int min_align;
95937785Smsmith	  addr_diff_vec_flags flags;
96037785Smsmith
96137785Smsmith	  if (!JUMP_P (insn)
96237785Smsmith	      || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
96337785Smsmith	    continue;
96437785Smsmith	  pat = PATTERN (insn);
96537785Smsmith	  len = XVECLEN (pat, 1);
96637785Smsmith	  gcc_assert (len > 0);
96737785Smsmith	  min_align = MAX_CODE_ALIGN;
96837785Smsmith	  for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
96937785Smsmith	    {
97037785Smsmith	      rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
97137785Smsmith	      int shuid = INSN_SHUID (lab);
97237785Smsmith	      if (shuid < min)
97337785Smsmith		{
97437785Smsmith		  min = shuid;
97537785Smsmith		  min_lab = lab;
97637785Smsmith		}
97737785Smsmith	      if (shuid > max)
97837785Smsmith		{
97937785Smsmith		  max = shuid;
98037785Smsmith		  max_lab = lab;
98137785Smsmith		}
98237785Smsmith/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
98337785Smsmith	      if (min_align > (int) LABEL_ALIGN_LOG (lab))
98437785Smsmith		min_align = LABEL_ALIGN_LOG (lab);
98537785Smsmith/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
98637785Smsmith	    }
98737785Smsmith	  XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
98837785Smsmith	  XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
98937785Smsmith	  insn_shuid = INSN_SHUID (insn);
99037785Smsmith	  rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
99137785Smsmith	  memset (&flags, 0, sizeof (flags));
992106937Ssam	  flags.min_align = min_align;
99337785Smsmith	  flags.base_after_vec = rel > insn_shuid;
99437785Smsmith	  flags.min_after_vec  = min > insn_shuid;
99537785Smsmith	  flags.max_after_vec  = max > insn_shuid;
99637785Smsmith	  flags.min_after_base = min > rel;
99737785Smsmith	  flags.max_after_base = max > rel;
99837785Smsmith	  ADDR_DIFF_VEC_FLAGS (pat) = flags;
99937785Smsmith	}
100072940Simp    }
100172940Simp#endif /* CASE_VECTOR_SHORTEN_MODE */
100237785Smsmith
100337785Smsmith  /* Compute initial lengths, addresses, and varying flags for each insn.  */
100437785Smsmith  for (insn_current_address = 0, insn = first;
100537785Smsmith       insn != 0;
100637785Smsmith       insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
100737785Smsmith    {
100872940Simp      uid = INSN_UID (insn);
100937785Smsmith
101037785Smsmith      insn_lengths[uid] = 0;
101137785Smsmith
101237785Smsmith      if (LABEL_P (insn))
101337785Smsmith	{
101437785Smsmith/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
101537785Smsmith	  int log = LABEL_ALIGN_LOG (insn);
101637785Smsmith/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
101737785Smsmith	  if (log)
101837785Smsmith	    {
101937785Smsmith	      int align = 1 << log;
102037785Smsmith	      int new_address = (insn_current_address + align - 1) & -align;
102137785Smsmith	      insn_lengths[uid] = new_address - insn_current_address;
102237785Smsmith	    }
102337785Smsmith	}
102437785Smsmith
102537785Smsmith      INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
102637785Smsmith
102737785Smsmith      if (NOTE_P (insn) || BARRIER_P (insn)
102837785Smsmith	  || LABEL_P (insn))
102937785Smsmith	continue;
103037785Smsmith      if (INSN_DELETED_P (insn))
103137785Smsmith	continue;
103237785Smsmith
103337785Smsmith      body = PATTERN (insn);
103437785Smsmith      if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
103537785Smsmith	{
103637785Smsmith	  /* This only takes room if read-only data goes into the text
103737785Smsmith	     section.  */
103872940Simp	  if (JUMP_TABLES_IN_TEXT_SECTION
103972940Simp	      || readonly_data_section == text_section)
104072940Simp	    insn_lengths[uid] = (XVECLEN (body,
104172940Simp					  GET_CODE (body) == ADDR_DIFF_VEC)
104237785Smsmith				 * GET_MODE_SIZE (GET_MODE (body)));
104337785Smsmith	  /* Alignment is handled by ADDR_VEC_ALIGN.  */
104437785Smsmith	}
104537785Smsmith      else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
104637785Smsmith	insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
104737785Smsmith      else if (GET_CODE (body) == SEQUENCE)
104837785Smsmith	{
104937785Smsmith	  int i;
105037785Smsmith	  int const_delay_slots;
105137785Smsmith#ifdef DELAY_SLOTS
105237785Smsmith	  const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
105337785Smsmith#else
105437785Smsmith	  const_delay_slots = 0;
105537785Smsmith#endif
105637785Smsmith	  /* Inside a delay slot sequence, we do not do any branch shortening
105737785Smsmith	     if the shortening could change the number of delay slots
105837785Smsmith	     of the branch.  */
105937785Smsmith	  for (i = 0; i < XVECLEN (body, 0); i++)
106037785Smsmith	    {
106137785Smsmith	      rtx inner_insn = XVECEXP (body, 0, i);
106237785Smsmith	      int inner_uid = INSN_UID (inner_insn);
106337785Smsmith	      int inner_length;
106437785Smsmith
106537785Smsmith	      if (GET_CODE (body) == ASM_INPUT
106672940Simp		  || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
106737785Smsmith		inner_length = (asm_insn_count (PATTERN (inner_insn))
106837785Smsmith				* insn_default_length (inner_insn));
106937785Smsmith	      else
107037785Smsmith		inner_length = insn_default_length (inner_insn);
107137785Smsmith
107237785Smsmith	      insn_lengths[inner_uid] = inner_length;
107337785Smsmith	      if (const_delay_slots)
107437785Smsmith		{
107537785Smsmith		  if ((varying_length[inner_uid]
107637785Smsmith		       = insn_variable_length_p (inner_insn)) != 0)
107737785Smsmith		    varying_length[uid] = 1;
107837785Smsmith		  INSN_ADDRESSES (inner_uid) = (insn_current_address
107937785Smsmith						+ insn_lengths[uid]);
108037785Smsmith		}
108137785Smsmith	      else
108237785Smsmith		varying_length[inner_uid] = 0;
108337785Smsmith	      insn_lengths[uid] += inner_length;
108437785Smsmith	    }
108537785Smsmith	}
108637785Smsmith      else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
108737785Smsmith	{
108837785Smsmith	  insn_lengths[uid] = insn_default_length (insn);
108937785Smsmith	  varying_length[uid] = insn_variable_length_p (insn);
109037785Smsmith	}
109137785Smsmith
109237785Smsmith      /* If needed, do any adjustment.  */
109337785Smsmith#ifdef ADJUST_INSN_LENGTH
109472940Simp      ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
109537785Smsmith      if (insn_lengths[uid] < 0)
109637785Smsmith	fatal_insn ("negative insn length", insn);
109772940Simp#endif
109837785Smsmith    }
109937785Smsmith
110037785Smsmith  /* Now loop over all the insns finding varying length insns.  For each,
110137785Smsmith     get the current insn length.  If it has changed, reflect the change.
110237785Smsmith     When nothing changes for a full pass, we are done.  */
110337785Smsmith
110437785Smsmith  while (something_changed)
110537785Smsmith    {
110637785Smsmith      something_changed = 0;
110737785Smsmith      insn_current_align = MAX_CODE_ALIGN - 1;
1108104252Sbrooks      for (insn_current_address = 0, insn = first;
110937785Smsmith	   insn != 0;
111037785Smsmith	   insn = NEXT_INSN (insn))
111137785Smsmith	{
111237785Smsmith	  int new_length;
111337785Smsmith#ifdef ADJUST_INSN_LENGTH
111437785Smsmith	  int tmp_length;
111537785Smsmith#endif
111637785Smsmith	  int length_align;
111737785Smsmith
111837785Smsmith	  uid = INSN_UID (insn);
111937785Smsmith
112037785Smsmith	  if (LABEL_P (insn))
112137785Smsmith	    {
112237785Smsmith/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
112337785Smsmith	      int log = LABEL_ALIGN_LOG (insn);
112437785Smsmith/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
112537785Smsmith	      if (log > insn_current_align)
112637785Smsmith		{
112737785Smsmith		  int align = 1 << log;
112837785Smsmith		  int new_address= (insn_current_address + align - 1) & -align;
112937785Smsmith		  insn_lengths[uid] = new_address - insn_current_address;
113037785Smsmith		  insn_current_align = log;
113137785Smsmith		  insn_current_address = new_address;
113237785Smsmith		}
113337785Smsmith	      else
113437785Smsmith		insn_lengths[uid] = 0;
113537785Smsmith	      INSN_ADDRESSES (uid) = insn_current_address;
113637785Smsmith	      continue;
113737785Smsmith	    }
113837785Smsmith
113937785Smsmith	  length_align = INSN_LENGTH_ALIGNMENT (insn);
114037785Smsmith	  if (length_align < insn_current_align)
114137785Smsmith	    insn_current_align = length_align;
114237785Smsmith
114337785Smsmith	  insn_last_address = INSN_ADDRESSES (uid);
114437785Smsmith	  INSN_ADDRESSES (uid) = insn_current_address;
114537785Smsmith
114637785Smsmith#ifdef CASE_VECTOR_SHORTEN_MODE
114737785Smsmith	  if (optimize && JUMP_P (insn)
114837785Smsmith	      && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
114937785Smsmith	    {
115037785Smsmith	      rtx body = PATTERN (insn);
115137785Smsmith	      int old_length = insn_lengths[uid];
115237785Smsmith	      rtx rel_lab = XEXP (XEXP (body, 0), 0);
115337785Smsmith	      rtx min_lab = XEXP (XEXP (body, 2), 0);
115437785Smsmith	      rtx max_lab = XEXP (XEXP (body, 3), 0);
1155140921Simp	      int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1156106937Ssam	      int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
115737785Smsmith	      int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
115837785Smsmith	      rtx prev;
115937785Smsmith	      int rel_align = 0;
116037785Smsmith	      addr_diff_vec_flags flags;
116137785Smsmith
116237785Smsmith	      /* Avoid automatic aggregate initialization.  */
116337785Smsmith	      flags = ADDR_DIFF_VEC_FLAGS (body);
116437785Smsmith
116537785Smsmith	      /* Try to find a known alignment for rel_lab.  */
116637785Smsmith	      for (prev = rel_lab;
116737785Smsmith		   prev
116837785Smsmith		   && ! insn_lengths[INSN_UID (prev)]
116937785Smsmith		   && ! (varying_length[INSN_UID (prev)] & 1);
117058816Simp		   prev = PREV_INSN (prev))
117137785Smsmith		if (varying_length[INSN_UID (prev)] & 2)
117237785Smsmith		  {
1173121816Sbrooks/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
117437785Smsmith		    rel_align = LABEL_ALIGN_LOG (prev);
117537785Smsmith/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
117637785Smsmith		    break;
117737785Smsmith		  }
117837785Smsmith
117937785Smsmith	      /* See the comment on addr_diff_vec_flags in rtl.h for the
118037785Smsmith		 meaning of the flags values.  base: REL_LAB   vec: INSN  */
118137785Smsmith	      /* Anything after INSN has still addresses from the last
118237785Smsmith		 pass; adjust these so that they reflect our current
118337785Smsmith		 estimate for this pass.  */
118437785Smsmith	      if (flags.base_after_vec)
118537785Smsmith		rel_addr += insn_current_address - insn_last_address;
118637785Smsmith	      if (flags.min_after_vec)
118737785Smsmith		min_addr += insn_current_address - insn_last_address;
118837785Smsmith	      if (flags.max_after_vec)
118937785Smsmith		max_addr += insn_current_address - insn_last_address;
119037785Smsmith	      /* We want to know the worst case, i.e. lowest possible value
119137785Smsmith		 for the offset of MIN_LAB.  If MIN_LAB is after REL_LAB,
119237785Smsmith		 its offset is positive, and we have to be wary of code shrink;
119337785Smsmith		 otherwise, it is negative, and we have to be vary of code
119437785Smsmith		 size increase.  */
119537785Smsmith	      if (flags.min_after_base)
119637785Smsmith		{
119737785Smsmith		  /* If INSN is between REL_LAB and MIN_LAB, the size
119837785Smsmith		     changes we are about to make can change the alignment
119937785Smsmith		     within the observed offset, therefore we have to break
120037785Smsmith		     it up into two parts that are independent.  */
120172940Simp		  if (! flags.base_after_vec && flags.min_after_vec)
120237785Smsmith		    {
120337785Smsmith		      min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
120437785Smsmith		      min_addr -= align_fuzz (insn, min_lab, 0, 0);
120572940Simp		    }
120637785Smsmith		  else
120772940Simp		    min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
120837785Smsmith		}
120937785Smsmith	      else
121037785Smsmith		{
121137785Smsmith		  if (flags.base_after_vec && ! flags.min_after_vec)
121237785Smsmith		    {
121337785Smsmith		      min_addr -= align_fuzz (min_lab, insn, 0, ~0);
121438305Smsmith		      min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
121572940Simp		    }
121672940Simp		  else
121738305Smsmith		    min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
121872940Simp		}
121938305Smsmith	      /* Likewise, determine the highest lowest possible value
122038305Smsmith		 for the offset of MAX_LAB.  */
122138305Smsmith	      if (flags.max_after_base)
122238305Smsmith		{
122337785Smsmith		  if (! flags.base_after_vec && flags.max_after_vec)
122437785Smsmith		    {
122537785Smsmith		      max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
122637785Smsmith		      max_addr += align_fuzz (insn, max_lab, 0, ~0);
122737785Smsmith		    }
122837785Smsmith		  else
122937785Smsmith		    max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
123037785Smsmith		}
123137785Smsmith	      else
123272940Simp		{
123372940Simp		  if (flags.base_after_vec && ! flags.max_after_vec)
123437785Smsmith		    {
123537785Smsmith		      max_addr += align_fuzz (max_lab, insn, 0, 0);
1236104252Sbrooks		      max_addr += align_fuzz (insn, rel_lab, 0, 0);
123737785Smsmith		    }
123837785Smsmith		  else
123937785Smsmith		    max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
124037785Smsmith		}
124137785Smsmith	      PUT_MODE (body, CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
124238592Smsmith							max_addr - rel_addr,
124338592Smsmith							body));
124443314Sdillon	      if (JUMP_TABLES_IN_TEXT_SECTION
124538592Smsmith		  || readonly_data_section == text_section)
124637785Smsmith		{
124737785Smsmith		  insn_lengths[uid]
124843314Sdillon		    = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
124937785Smsmith		  insn_current_address += insn_lengths[uid];
125037785Smsmith		  if (insn_lengths[uid] != old_length)
125137785Smsmith		    something_changed = 1;
125237785Smsmith		}
125337785Smsmith
125437785Smsmith	      continue;
125537785Smsmith	    }
125637785Smsmith#endif /* CASE_VECTOR_SHORTEN_MODE */
125737785Smsmith
125837785Smsmith	  if (! (varying_length[uid]))
125937785Smsmith	    {
126037785Smsmith	      if (NONJUMP_INSN_P (insn)
126137785Smsmith		  && GET_CODE (PATTERN (insn)) == SEQUENCE)
126237785Smsmith		{
126337785Smsmith		  int i;
126437785Smsmith
126537785Smsmith		  body = PATTERN (insn);
126637785Smsmith		  for (i = 0; i < XVECLEN (body, 0); i++)
126737785Smsmith		    {
126872940Simp		      rtx inner_insn = XVECEXP (body, 0, i);
126972940Simp		      int inner_uid = INSN_UID (inner_insn);
127037785Smsmith
127137785Smsmith		      INSN_ADDRESSES (inner_uid) = insn_current_address;
127237785Smsmith
1273		      insn_current_address += insn_lengths[inner_uid];
1274		    }
1275		}
1276	      else
1277		insn_current_address += insn_lengths[uid];
1278
1279	      continue;
1280	    }
1281
1282	  if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1283	    {
1284	      int i;
1285
1286	      body = PATTERN (insn);
1287	      new_length = 0;
1288	      for (i = 0; i < XVECLEN (body, 0); i++)
1289		{
1290		  rtx inner_insn = XVECEXP (body, 0, i);
1291		  int inner_uid = INSN_UID (inner_insn);
1292		  int inner_length;
1293
1294		  INSN_ADDRESSES (inner_uid) = insn_current_address;
1295
1296		  /* insn_current_length returns 0 for insns with a
1297		     non-varying length.  */
1298		  if (! varying_length[inner_uid])
1299		    inner_length = insn_lengths[inner_uid];
1300		  else
1301		    inner_length = insn_current_length (inner_insn);
1302
1303		  if (inner_length != insn_lengths[inner_uid])
1304		    {
1305		      insn_lengths[inner_uid] = inner_length;
1306		      something_changed = 1;
1307		    }
1308		  insn_current_address += insn_lengths[inner_uid];
1309		  new_length += inner_length;
1310		}
1311	    }
1312	  else
1313	    {
1314	      new_length = insn_current_length (insn);
1315	      insn_current_address += new_length;
1316	    }
1317
1318#ifdef ADJUST_INSN_LENGTH
1319	  /* If needed, do any adjustment.  */
1320	  tmp_length = new_length;
1321	  ADJUST_INSN_LENGTH (insn, new_length);
1322	  insn_current_address += (new_length - tmp_length);
1323#endif
1324
1325	  if (new_length != insn_lengths[uid])
1326	    {
1327	      insn_lengths[uid] = new_length;
1328	      something_changed = 1;
1329	    }
1330	}
1331      /* For a non-optimizing compile, do only a single pass.  */
1332      if (!optimize)
1333	break;
1334    }
1335
1336  free (varying_length);
1337
1338#endif /* HAVE_ATTR_length */
1339}
1340
1341#ifdef HAVE_ATTR_length
1342/* Given the body of an INSN known to be generated by an ASM statement, return
1343   the number of machine instructions likely to be generated for this insn.
1344   This is used to compute its length.  */
1345
1346static int
1347asm_insn_count (rtx body)
1348{
1349  const char *template;
1350  int count = 1;
1351
1352  if (GET_CODE (body) == ASM_INPUT)
1353    template = XSTR (body, 0);
1354  else
1355    template = decode_asm_operands (body, NULL, NULL, NULL, NULL);
1356
1357  for (; *template; template++)
1358    if (IS_ASM_LOGICAL_LINE_SEPARATOR (*template) || *template == '\n')
1359      count++;
1360
1361  return count;
1362}
1363#endif
1364
1365/* Output assembler code for the start of a function,
1366   and initialize some of the variables in this file
1367   for the new function.  The label for the function and associated
1368   assembler pseudo-ops have already been output in `assemble_start_function'.
1369
1370   FIRST is the first insn of the rtl for the function being compiled.
1371   FILE is the file to write assembler code to.
1372   OPTIMIZE is nonzero if we should eliminate redundant
1373     test and compare insns.  */
1374
1375void
1376final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1377		      int optimize ATTRIBUTE_UNUSED)
1378{
1379  block_depth = 0;
1380
1381  this_is_asm_operands = 0;
1382
1383  last_filename = locator_file (prologue_locator);
1384  last_linenum = locator_line (prologue_locator);
1385
1386  high_block_linenum = high_function_linenum = last_linenum;
1387
1388  (*debug_hooks->begin_prologue) (last_linenum, last_filename);
1389
1390#if defined (DWARF2_UNWIND_INFO) || defined (TARGET_UNWIND_INFO)
1391  if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1392    dwarf2out_begin_prologue (0, NULL);
1393#endif
1394
1395#ifdef LEAF_REG_REMAP
1396  if (current_function_uses_only_leaf_regs)
1397    leaf_renumber_regs (first);
1398#endif
1399
1400  /* The Sun386i and perhaps other machines don't work right
1401     if the profiling code comes after the prologue.  */
1402#ifdef PROFILE_BEFORE_PROLOGUE
1403  if (current_function_profile)
1404    profile_function (file);
1405#endif /* PROFILE_BEFORE_PROLOGUE */
1406
1407#if defined (DWARF2_UNWIND_INFO) && defined (HAVE_prologue)
1408  if (dwarf2out_do_frame ())
1409    dwarf2out_frame_debug (NULL_RTX, false);
1410#endif
1411
1412  /* If debugging, assign block numbers to all of the blocks in this
1413     function.  */
1414  if (write_symbols)
1415    {
1416      reemit_insn_block_notes ();
1417      number_blocks (current_function_decl);
1418      /* We never actually put out begin/end notes for the top-level
1419	 block in the function.  But, conceptually, that block is
1420	 always needed.  */
1421      TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1422    }
1423
1424  if (warn_frame_larger_than
1425    && get_frame_size () > frame_larger_than_size)
1426  {
1427      /* Issue a warning */
1428      warning (OPT_Wframe_larger_than_,
1429               "the frame size of %wd bytes is larger than %wd bytes",
1430               get_frame_size (), frame_larger_than_size);
1431  }
1432
1433  /* First output the function prologue: code to set up the stack frame.  */
1434  targetm.asm_out.function_prologue (file, get_frame_size ());
1435
1436  /* If the machine represents the prologue as RTL, the profiling code must
1437     be emitted when NOTE_INSN_PROLOGUE_END is scanned.  */
1438#ifdef HAVE_prologue
1439  if (! HAVE_prologue)
1440#endif
1441    profile_after_prologue (file);
1442}
1443
1444static void
1445profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1446{
1447#ifndef PROFILE_BEFORE_PROLOGUE
1448  if (current_function_profile)
1449    profile_function (file);
1450#endif /* not PROFILE_BEFORE_PROLOGUE */
1451}
1452
1453static void
1454profile_function (FILE *file ATTRIBUTE_UNUSED)
1455{
1456#ifndef NO_PROFILE_COUNTERS
1457# define NO_PROFILE_COUNTERS	0
1458#endif
1459#if defined(ASM_OUTPUT_REG_PUSH)
1460  int sval = current_function_returns_struct;
1461  rtx svrtx = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl), 1);
1462#if defined(STATIC_CHAIN_INCOMING_REGNUM) || defined(STATIC_CHAIN_REGNUM)
1463  int cxt = cfun->static_chain_decl != NULL;
1464#endif
1465#endif /* ASM_OUTPUT_REG_PUSH */
1466
1467  if (! NO_PROFILE_COUNTERS)
1468    {
1469      int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1470      switch_to_section (data_section);
1471      ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1472      targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1473      assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1474    }
1475
1476  switch_to_section (current_function_section ());
1477
1478#if defined(ASM_OUTPUT_REG_PUSH)
1479  if (sval && svrtx != NULL_RTX && REG_P (svrtx))
1480    ASM_OUTPUT_REG_PUSH (file, REGNO (svrtx));
1481#endif
1482
1483#if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1484  if (cxt)
1485    ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_INCOMING_REGNUM);
1486#else
1487#if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1488  if (cxt)
1489    {
1490      ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_REGNUM);
1491    }
1492#endif
1493#endif
1494
1495  FUNCTION_PROFILER (file, current_function_funcdef_no);
1496
1497#if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1498  if (cxt)
1499    ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_INCOMING_REGNUM);
1500#else
1501#if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1502  if (cxt)
1503    {
1504      ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_REGNUM);
1505    }
1506#endif
1507#endif
1508
1509#if defined(ASM_OUTPUT_REG_PUSH)
1510  if (sval && svrtx != NULL_RTX && REG_P (svrtx))
1511    ASM_OUTPUT_REG_POP (file, REGNO (svrtx));
1512#endif
1513}
1514
1515/* Output assembler code for the end of a function.
1516   For clarity, args are same as those of `final_start_function'
1517   even though not all of them are needed.  */
1518
1519void
1520final_end_function (void)
1521{
1522  app_disable ();
1523
1524  (*debug_hooks->end_function) (high_function_linenum);
1525
1526  /* Finally, output the function epilogue:
1527     code to restore the stack frame and return to the caller.  */
1528  targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1529
1530  /* And debug output.  */
1531  (*debug_hooks->end_epilogue) (last_linenum, last_filename);
1532
1533#if defined (DWARF2_UNWIND_INFO)
1534  if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG
1535      && dwarf2out_do_frame ())
1536    dwarf2out_end_epilogue (last_linenum, last_filename);
1537#endif
1538}
1539
1540/* Output assembler code for some insns: all or part of a function.
1541   For description of args, see `final_start_function', above.  */
1542
1543void
1544final (rtx first, FILE *file, int optimize)
1545{
1546  rtx insn;
1547  int max_uid = 0;
1548  int seen = 0;
1549
1550  last_ignored_compare = 0;
1551
1552#ifdef SDB_DEBUGGING_INFO
1553  /* When producing SDB debugging info, delete troublesome line number
1554     notes from inlined functions in other files as well as duplicate
1555     line number notes.  */
1556  if (write_symbols == SDB_DEBUG)
1557    {
1558      rtx last = 0;
1559      for (insn = first; insn; insn = NEXT_INSN (insn))
1560	if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
1561	  {
1562	    if (last != 0
1563#ifdef USE_MAPPED_LOCATION
1564		&& NOTE_SOURCE_LOCATION (insn) == NOTE_SOURCE_LOCATION (last)
1565#else
1566		&& NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last)
1567		&& NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last)
1568#endif
1569	      )
1570	      {
1571		delete_insn (insn);	/* Use delete_note.  */
1572		continue;
1573	      }
1574	    last = insn;
1575	  }
1576    }
1577#endif
1578
1579  for (insn = first; insn; insn = NEXT_INSN (insn))
1580    {
1581      if (INSN_UID (insn) > max_uid)       /* Find largest UID.  */
1582	max_uid = INSN_UID (insn);
1583#ifdef HAVE_cc0
1584      /* If CC tracking across branches is enabled, record the insn which
1585	 jumps to each branch only reached from one place.  */
1586      if (optimize && JUMP_P (insn))
1587	{
1588	  rtx lab = JUMP_LABEL (insn);
1589	  if (lab && LABEL_NUSES (lab) == 1)
1590	    {
1591	      LABEL_REFS (lab) = insn;
1592	    }
1593	}
1594#endif
1595    }
1596
1597  init_recog ();
1598
1599  CC_STATUS_INIT;
1600
1601  /* Output the insns.  */
1602  for (insn = NEXT_INSN (first); insn;)
1603    {
1604#ifdef HAVE_ATTR_length
1605      if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1606	{
1607	  /* This can be triggered by bugs elsewhere in the compiler if
1608	     new insns are created after init_insn_lengths is called.  */
1609	  gcc_assert (NOTE_P (insn));
1610	  insn_current_address = -1;
1611	}
1612      else
1613	insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1614#endif /* HAVE_ATTR_length */
1615
1616      insn = final_scan_insn (insn, file, optimize, 0, &seen);
1617    }
1618}
1619
1620const char *
1621get_insn_template (int code, rtx insn)
1622{
1623  switch (insn_data[code].output_format)
1624    {
1625    case INSN_OUTPUT_FORMAT_SINGLE:
1626      return insn_data[code].output.single;
1627    case INSN_OUTPUT_FORMAT_MULTI:
1628      return insn_data[code].output.multi[which_alternative];
1629    case INSN_OUTPUT_FORMAT_FUNCTION:
1630      gcc_assert (insn);
1631      return (*insn_data[code].output.function) (recog_data.operand, insn);
1632
1633    default:
1634      gcc_unreachable ();
1635    }
1636}
1637
1638/* Emit the appropriate declaration for an alternate-entry-point
1639   symbol represented by INSN, to FILE.  INSN is a CODE_LABEL with
1640   LABEL_KIND != LABEL_NORMAL.
1641
1642   The case fall-through in this function is intentional.  */
1643static void
1644output_alternate_entry_point (FILE *file, rtx insn)
1645{
1646  const char *name = LABEL_NAME (insn);
1647
1648  switch (LABEL_KIND (insn))
1649    {
1650    case LABEL_WEAK_ENTRY:
1651#ifdef ASM_WEAKEN_LABEL
1652      ASM_WEAKEN_LABEL (file, name);
1653#endif
1654    case LABEL_GLOBAL_ENTRY:
1655      targetm.asm_out.globalize_label (file, name);
1656    case LABEL_STATIC_ENTRY:
1657#ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1658      ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1659#endif
1660      ASM_OUTPUT_LABEL (file, name);
1661      break;
1662
1663    case LABEL_NORMAL:
1664    default:
1665      gcc_unreachable ();
1666    }
1667}
1668
1669/* The final scan for one insn, INSN.
1670   Args are same as in `final', except that INSN
1671   is the insn being scanned.
1672   Value returned is the next insn to be scanned.
1673
1674   NOPEEPHOLES is the flag to disallow peephole processing (currently
1675   used for within delayed branch sequence output).
1676
1677   SEEN is used to track the end of the prologue, for emitting
1678   debug information.  We force the emission of a line note after
1679   both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
1680   at the beginning of the second basic block, whichever comes
1681   first.  */
1682
1683rtx
1684final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
1685		 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
1686{
1687#ifdef HAVE_cc0
1688  rtx set;
1689#endif
1690  rtx next;
1691
1692  insn_counter++;
1693
1694  /* Ignore deleted insns.  These can occur when we split insns (due to a
1695     template of "#") while not optimizing.  */
1696  if (INSN_DELETED_P (insn))
1697    return NEXT_INSN (insn);
1698
1699  switch (GET_CODE (insn))
1700    {
1701    case NOTE:
1702      switch (NOTE_LINE_NUMBER (insn))
1703	{
1704	case NOTE_INSN_DELETED:
1705	case NOTE_INSN_FUNCTION_END:
1706	case NOTE_INSN_REPEATED_LINE_NUMBER:
1707	case NOTE_INSN_EXPECTED_VALUE:
1708	  break;
1709
1710	case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1711	  in_cold_section_p = !in_cold_section_p;
1712	  (*debug_hooks->switch_text_section) ();
1713	  switch_to_section (current_function_section ());
1714	  break;
1715
1716	case NOTE_INSN_BASIC_BLOCK:
1717#ifdef TARGET_UNWIND_INFO
1718	  targetm.asm_out.unwind_emit (asm_out_file, insn);
1719#endif
1720
1721	  if (flag_debug_asm)
1722	    fprintf (asm_out_file, "\t%s basic block %d\n",
1723		     ASM_COMMENT_START, NOTE_BASIC_BLOCK (insn)->index);
1724
1725	  if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
1726	    {
1727	      *seen |= SEEN_EMITTED;
1728	      force_source_line = true;
1729	    }
1730	  else
1731	    *seen |= SEEN_BB;
1732
1733	  break;
1734
1735	case NOTE_INSN_EH_REGION_BEG:
1736	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
1737				  NOTE_EH_HANDLER (insn));
1738	  break;
1739
1740	case NOTE_INSN_EH_REGION_END:
1741	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
1742				  NOTE_EH_HANDLER (insn));
1743	  break;
1744
1745	case NOTE_INSN_PROLOGUE_END:
1746	  targetm.asm_out.function_end_prologue (file);
1747	  profile_after_prologue (file);
1748
1749	  if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1750	    {
1751	      *seen |= SEEN_EMITTED;
1752	      force_source_line = true;
1753	    }
1754	  else
1755	    *seen |= SEEN_NOTE;
1756
1757	  break;
1758
1759	case NOTE_INSN_EPILOGUE_BEG:
1760	  targetm.asm_out.function_begin_epilogue (file);
1761	  break;
1762
1763	case NOTE_INSN_FUNCTION_BEG:
1764	  app_disable ();
1765	  (*debug_hooks->end_prologue) (last_linenum, last_filename);
1766
1767	  if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1768	    {
1769	      *seen |= SEEN_EMITTED;
1770	      force_source_line = true;
1771	    }
1772	  else
1773	    *seen |= SEEN_NOTE;
1774
1775	  break;
1776
1777	case NOTE_INSN_BLOCK_BEG:
1778	  if (debug_info_level == DINFO_LEVEL_NORMAL
1779	      || debug_info_level == DINFO_LEVEL_VERBOSE
1780	      || write_symbols == DWARF2_DEBUG
1781	      || write_symbols == VMS_AND_DWARF2_DEBUG
1782	      || write_symbols == VMS_DEBUG)
1783	    {
1784	      int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1785
1786	      app_disable ();
1787	      ++block_depth;
1788	      high_block_linenum = last_linenum;
1789
1790	      /* Output debugging info about the symbol-block beginning.  */
1791	      (*debug_hooks->begin_block) (last_linenum, n);
1792
1793	      /* Mark this block as output.  */
1794	      TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
1795	    }
1796	  break;
1797
1798	case NOTE_INSN_BLOCK_END:
1799	  if (debug_info_level == DINFO_LEVEL_NORMAL
1800	      || debug_info_level == DINFO_LEVEL_VERBOSE
1801	      || write_symbols == DWARF2_DEBUG
1802	      || write_symbols == VMS_AND_DWARF2_DEBUG
1803	      || write_symbols == VMS_DEBUG)
1804	    {
1805	      int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1806
1807	      app_disable ();
1808
1809	      /* End of a symbol-block.  */
1810	      --block_depth;
1811	      gcc_assert (block_depth >= 0);
1812
1813	      (*debug_hooks->end_block) (high_block_linenum, n);
1814	    }
1815	  break;
1816
1817	case NOTE_INSN_DELETED_LABEL:
1818	  /* Emit the label.  We may have deleted the CODE_LABEL because
1819	     the label could be proved to be unreachable, though still
1820	     referenced (in the form of having its address taken.  */
1821	  ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
1822	  break;
1823
1824	case NOTE_INSN_VAR_LOCATION:
1825	  (*debug_hooks->var_location) (insn);
1826	  break;
1827
1828	case 0:
1829	  break;
1830
1831	default:
1832	  gcc_assert (NOTE_LINE_NUMBER (insn) > 0);
1833	  break;
1834	}
1835      break;
1836
1837    case BARRIER:
1838#if defined (DWARF2_UNWIND_INFO)
1839      if (dwarf2out_do_frame ())
1840	dwarf2out_frame_debug (insn, false);
1841#endif
1842      break;
1843
1844    case CODE_LABEL:
1845      /* The target port might emit labels in the output function for
1846	 some insn, e.g. sh.c output_branchy_insn.  */
1847/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
1848      {
1849	int align = LABEL_ALIGN_LOG (insn);
1850#ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1851	int max_skip = LABEL_MAX_SKIP (insn);
1852#endif
1853
1854	if (align && NEXT_INSN (insn))
1855	  {
1856#ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1857	    ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
1858#else
1859#ifdef ASM_OUTPUT_ALIGN_WITH_NOP
1860	    ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
1861#else
1862	    ASM_OUTPUT_ALIGN (file, align);
1863#endif
1864#endif
1865	  }
1866      }
1867/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
1868#ifdef HAVE_cc0
1869      CC_STATUS_INIT;
1870      /* If this label is reached from only one place, set the condition
1871	 codes from the instruction just before the branch.  */
1872
1873      /* Disabled because some insns set cc_status in the C output code
1874	 and NOTICE_UPDATE_CC alone can set incorrect status.  */
1875      if (0 /* optimize && LABEL_NUSES (insn) == 1*/)
1876	{
1877	  rtx jump = LABEL_REFS (insn);
1878	  rtx barrier = prev_nonnote_insn (insn);
1879	  rtx prev;
1880	  /* If the LABEL_REFS field of this label has been set to point
1881	     at a branch, the predecessor of the branch is a regular
1882	     insn, and that branch is the only way to reach this label,
1883	     set the condition codes based on the branch and its
1884	     predecessor.  */
1885	  if (barrier && BARRIER_P (barrier)
1886	      && jump && JUMP_P (jump)
1887	      && (prev = prev_nonnote_insn (jump))
1888	      && NONJUMP_INSN_P (prev))
1889	    {
1890	      NOTICE_UPDATE_CC (PATTERN (prev), prev);
1891	      NOTICE_UPDATE_CC (PATTERN (jump), jump);
1892	    }
1893	}
1894#endif
1895
1896      if (LABEL_NAME (insn))
1897	(*debug_hooks->label) (insn);
1898
1899      if (app_on)
1900	{
1901	  fputs (ASM_APP_OFF, file);
1902	  app_on = 0;
1903	}
1904
1905      next = next_nonnote_insn (insn);
1906      if (next != 0 && JUMP_P (next))
1907	{
1908	  rtx nextbody = PATTERN (next);
1909
1910	  /* If this label is followed by a jump-table,
1911	     make sure we put the label in the read-only section.  Also
1912	     possibly write the label and jump table together.  */
1913
1914	  if (GET_CODE (nextbody) == ADDR_VEC
1915	      || GET_CODE (nextbody) == ADDR_DIFF_VEC)
1916	    {
1917#if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
1918	      /* In this case, the case vector is being moved by the
1919		 target, so don't output the label at all.  Leave that
1920		 to the back end macros.  */
1921#else
1922	      if (! JUMP_TABLES_IN_TEXT_SECTION)
1923		{
1924		  int log_align;
1925
1926		  switch_to_section (targetm.asm_out.function_rodata_section
1927				     (current_function_decl));
1928
1929#ifdef ADDR_VEC_ALIGN
1930		  log_align = ADDR_VEC_ALIGN (next);
1931#else
1932		  log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1933#endif
1934		  ASM_OUTPUT_ALIGN (file, log_align);
1935		}
1936	      else
1937		switch_to_section (current_function_section ());
1938
1939#ifdef ASM_OUTPUT_CASE_LABEL
1940	      ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
1941				     next);
1942#else
1943	      targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
1944#endif
1945#endif
1946	      break;
1947	    }
1948	}
1949      if (LABEL_ALT_ENTRY_P (insn))
1950	output_alternate_entry_point (file, insn);
1951      else
1952	targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
1953      break;
1954
1955    default:
1956      {
1957	rtx body = PATTERN (insn);
1958	int insn_code_number;
1959	const char *template;
1960
1961#ifdef HAVE_conditional_execution
1962	/* Reset this early so it is correct for ASM statements.  */
1963	current_insn_predicate = NULL_RTX;
1964#endif
1965	/* An INSN, JUMP_INSN or CALL_INSN.
1966	   First check for special kinds that recog doesn't recognize.  */
1967
1968	if (GET_CODE (body) == USE /* These are just declarations.  */
1969	    || GET_CODE (body) == CLOBBER)
1970	  break;
1971
1972#ifdef HAVE_cc0
1973	{
1974	  /* If there is a REG_CC_SETTER note on this insn, it means that
1975	     the setting of the condition code was done in the delay slot
1976	     of the insn that branched here.  So recover the cc status
1977	     from the insn that set it.  */
1978
1979	  rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
1980	  if (note)
1981	    {
1982	      NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
1983	      cc_prev_status = cc_status;
1984	    }
1985	}
1986#endif
1987
1988	/* Detect insns that are really jump-tables
1989	   and output them as such.  */
1990
1991	if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1992	  {
1993#if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
1994	    int vlen, idx;
1995#endif
1996
1997	    if (! JUMP_TABLES_IN_TEXT_SECTION)
1998	      switch_to_section (targetm.asm_out.function_rodata_section
1999				 (current_function_decl));
2000	    else
2001	      switch_to_section (current_function_section ());
2002
2003	    if (app_on)
2004	      {
2005		fputs (ASM_APP_OFF, file);
2006		app_on = 0;
2007	      }
2008
2009#if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2010	    if (GET_CODE (body) == ADDR_VEC)
2011	      {
2012#ifdef ASM_OUTPUT_ADDR_VEC
2013		ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2014#else
2015		gcc_unreachable ();
2016#endif
2017	      }
2018	    else
2019	      {
2020#ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2021		ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2022#else
2023		gcc_unreachable ();
2024#endif
2025	      }
2026#else
2027	    vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2028	    for (idx = 0; idx < vlen; idx++)
2029	      {
2030		if (GET_CODE (body) == ADDR_VEC)
2031		  {
2032#ifdef ASM_OUTPUT_ADDR_VEC_ELT
2033		    ASM_OUTPUT_ADDR_VEC_ELT
2034		      (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2035#else
2036		    gcc_unreachable ();
2037#endif
2038		  }
2039		else
2040		  {
2041#ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2042		    ASM_OUTPUT_ADDR_DIFF_ELT
2043		      (file,
2044		       body,
2045		       CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2046		       CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2047#else
2048		    gcc_unreachable ();
2049#endif
2050		  }
2051	      }
2052#ifdef ASM_OUTPUT_CASE_END
2053	    ASM_OUTPUT_CASE_END (file,
2054				 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2055				 insn);
2056#endif
2057#endif
2058
2059	    switch_to_section (current_function_section ());
2060
2061	    break;
2062	  }
2063	/* Output this line note if it is the first or the last line
2064	   note in a row.  */
2065	if (notice_source_line (insn))
2066	  {
2067	    (*debug_hooks->source_line) (last_linenum, last_filename);
2068	  }
2069
2070	if (GET_CODE (body) == ASM_INPUT)
2071	  {
2072	    const char *string = XSTR (body, 0);
2073
2074	    /* There's no telling what that did to the condition codes.  */
2075	    CC_STATUS_INIT;
2076
2077	    if (string[0])
2078	      {
2079		if (! app_on)
2080		  {
2081		    fputs (ASM_APP_ON, file);
2082		    app_on = 1;
2083		  }
2084		fprintf (asm_out_file, "\t%s\n", string);
2085	      }
2086	    break;
2087	  }
2088
2089	/* Detect `asm' construct with operands.  */
2090	if (asm_noperands (body) >= 0)
2091	  {
2092	    unsigned int noperands = asm_noperands (body);
2093	    rtx *ops = alloca (noperands * sizeof (rtx));
2094	    const char *string;
2095
2096	    /* There's no telling what that did to the condition codes.  */
2097	    CC_STATUS_INIT;
2098
2099	    /* Get out the operand values.  */
2100	    string = decode_asm_operands (body, ops, NULL, NULL, NULL);
2101	    /* Inhibit dieing on what would otherwise be compiler bugs.  */
2102	    insn_noperands = noperands;
2103	    this_is_asm_operands = insn;
2104
2105#ifdef FINAL_PRESCAN_INSN
2106	    FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2107#endif
2108
2109	    /* Output the insn using them.  */
2110	    if (string[0])
2111	      {
2112		if (! app_on)
2113		  {
2114		    fputs (ASM_APP_ON, file);
2115		    app_on = 1;
2116		  }
2117	        output_asm_insn (string, ops);
2118	      }
2119
2120	    this_is_asm_operands = 0;
2121	    break;
2122	  }
2123
2124	if (app_on)
2125	  {
2126	    fputs (ASM_APP_OFF, file);
2127	    app_on = 0;
2128	  }
2129
2130	if (GET_CODE (body) == SEQUENCE)
2131	  {
2132	    /* A delayed-branch sequence */
2133	    int i;
2134
2135	    final_sequence = body;
2136
2137	    /* Record the delay slots' frame information before the branch.
2138	       This is needed for delayed calls: see execute_cfa_program().  */
2139#if defined (DWARF2_UNWIND_INFO)
2140	    if (dwarf2out_do_frame ())
2141	      for (i = 1; i < XVECLEN (body, 0); i++)
2142		dwarf2out_frame_debug (XVECEXP (body, 0, i), false);
2143#endif
2144
2145	    /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2146	       force the restoration of a comparison that was previously
2147	       thought unnecessary.  If that happens, cancel this sequence
2148	       and cause that insn to be restored.  */
2149
2150	    next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, 1, seen);
2151	    if (next != XVECEXP (body, 0, 1))
2152	      {
2153		final_sequence = 0;
2154		return next;
2155	      }
2156
2157	    for (i = 1; i < XVECLEN (body, 0); i++)
2158	      {
2159		rtx insn = XVECEXP (body, 0, i);
2160		rtx next = NEXT_INSN (insn);
2161		/* We loop in case any instruction in a delay slot gets
2162		   split.  */
2163		do
2164		  insn = final_scan_insn (insn, file, 0, 1, seen);
2165		while (insn != next);
2166	      }
2167#ifdef DBR_OUTPUT_SEQEND
2168	    DBR_OUTPUT_SEQEND (file);
2169#endif
2170	    final_sequence = 0;
2171
2172	    /* If the insn requiring the delay slot was a CALL_INSN, the
2173	       insns in the delay slot are actually executed before the
2174	       called function.  Hence we don't preserve any CC-setting
2175	       actions in these insns and the CC must be marked as being
2176	       clobbered by the function.  */
2177	    if (CALL_P (XVECEXP (body, 0, 0)))
2178	      {
2179		CC_STATUS_INIT;
2180	      }
2181	    break;
2182	  }
2183
2184	/* We have a real machine instruction as rtl.  */
2185
2186	body = PATTERN (insn);
2187
2188#ifdef HAVE_cc0
2189	set = single_set (insn);
2190
2191	/* Check for redundant test and compare instructions
2192	   (when the condition codes are already set up as desired).
2193	   This is done only when optimizing; if not optimizing,
2194	   it should be possible for the user to alter a variable
2195	   with the debugger in between statements
2196	   and the next statement should reexamine the variable
2197	   to compute the condition codes.  */
2198
2199	if (optimize)
2200	  {
2201	    if (set
2202		&& GET_CODE (SET_DEST (set)) == CC0
2203		&& insn != last_ignored_compare)
2204	      {
2205		if (GET_CODE (SET_SRC (set)) == SUBREG)
2206		  SET_SRC (set) = alter_subreg (&SET_SRC (set));
2207		else if (GET_CODE (SET_SRC (set)) == COMPARE)
2208		  {
2209		    if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2210		      XEXP (SET_SRC (set), 0)
2211			= alter_subreg (&XEXP (SET_SRC (set), 0));
2212		    if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2213		      XEXP (SET_SRC (set), 1)
2214			= alter_subreg (&XEXP (SET_SRC (set), 1));
2215		  }
2216		if ((cc_status.value1 != 0
2217		     && rtx_equal_p (SET_SRC (set), cc_status.value1))
2218		    || (cc_status.value2 != 0
2219			&& rtx_equal_p (SET_SRC (set), cc_status.value2)))
2220		  {
2221		    /* Don't delete insn if it has an addressing side-effect.  */
2222		    if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2223			/* or if anything in it is volatile.  */
2224			&& ! volatile_refs_p (PATTERN (insn)))
2225		      {
2226			/* We don't really delete the insn; just ignore it.  */
2227			last_ignored_compare = insn;
2228			break;
2229		      }
2230		  }
2231	      }
2232	  }
2233#endif
2234
2235#ifdef HAVE_cc0
2236	/* If this is a conditional branch, maybe modify it
2237	   if the cc's are in a nonstandard state
2238	   so that it accomplishes the same thing that it would
2239	   do straightforwardly if the cc's were set up normally.  */
2240
2241	if (cc_status.flags != 0
2242	    && JUMP_P (insn)
2243	    && GET_CODE (body) == SET
2244	    && SET_DEST (body) == pc_rtx
2245	    && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2246	    && COMPARISON_P (XEXP (SET_SRC (body), 0))
2247	    && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2248	  {
2249	    /* This function may alter the contents of its argument
2250	       and clear some of the cc_status.flags bits.
2251	       It may also return 1 meaning condition now always true
2252	       or -1 meaning condition now always false
2253	       or 2 meaning condition nontrivial but altered.  */
2254	    int result = alter_cond (XEXP (SET_SRC (body), 0));
2255	    /* If condition now has fixed value, replace the IF_THEN_ELSE
2256	       with its then-operand or its else-operand.  */
2257	    if (result == 1)
2258	      SET_SRC (body) = XEXP (SET_SRC (body), 1);
2259	    if (result == -1)
2260	      SET_SRC (body) = XEXP (SET_SRC (body), 2);
2261
2262	    /* The jump is now either unconditional or a no-op.
2263	       If it has become a no-op, don't try to output it.
2264	       (It would not be recognized.)  */
2265	    if (SET_SRC (body) == pc_rtx)
2266	      {
2267	        delete_insn (insn);
2268		break;
2269	      }
2270	    else if (GET_CODE (SET_SRC (body)) == RETURN)
2271	      /* Replace (set (pc) (return)) with (return).  */
2272	      PATTERN (insn) = body = SET_SRC (body);
2273
2274	    /* Rerecognize the instruction if it has changed.  */
2275	    if (result != 0)
2276	      INSN_CODE (insn) = -1;
2277	  }
2278
2279	/* Make same adjustments to instructions that examine the
2280	   condition codes without jumping and instructions that
2281	   handle conditional moves (if this machine has either one).  */
2282
2283	if (cc_status.flags != 0
2284	    && set != 0)
2285	  {
2286	    rtx cond_rtx, then_rtx, else_rtx;
2287
2288	    if (!JUMP_P (insn)
2289		&& GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2290	      {
2291		cond_rtx = XEXP (SET_SRC (set), 0);
2292		then_rtx = XEXP (SET_SRC (set), 1);
2293		else_rtx = XEXP (SET_SRC (set), 2);
2294	      }
2295	    else
2296	      {
2297		cond_rtx = SET_SRC (set);
2298		then_rtx = const_true_rtx;
2299		else_rtx = const0_rtx;
2300	      }
2301
2302	    switch (GET_CODE (cond_rtx))
2303	      {
2304	      case GTU:
2305	      case GT:
2306	      case LTU:
2307	      case LT:
2308	      case GEU:
2309	      case GE:
2310	      case LEU:
2311	      case LE:
2312	      case EQ:
2313	      case NE:
2314		{
2315		  int result;
2316		  if (XEXP (cond_rtx, 0) != cc0_rtx)
2317		    break;
2318		  result = alter_cond (cond_rtx);
2319		  if (result == 1)
2320		    validate_change (insn, &SET_SRC (set), then_rtx, 0);
2321		  else if (result == -1)
2322		    validate_change (insn, &SET_SRC (set), else_rtx, 0);
2323		  else if (result == 2)
2324		    INSN_CODE (insn) = -1;
2325		  if (SET_DEST (set) == SET_SRC (set))
2326		    delete_insn (insn);
2327		}
2328		break;
2329
2330	      default:
2331		break;
2332	      }
2333	  }
2334
2335#endif
2336
2337#ifdef HAVE_peephole
2338	/* Do machine-specific peephole optimizations if desired.  */
2339
2340	if (optimize && !flag_no_peephole && !nopeepholes)
2341	  {
2342	    rtx next = peephole (insn);
2343	    /* When peepholing, if there were notes within the peephole,
2344	       emit them before the peephole.  */
2345	    if (next != 0 && next != NEXT_INSN (insn))
2346	      {
2347		rtx note, prev = PREV_INSN (insn);
2348
2349		for (note = NEXT_INSN (insn); note != next;
2350		     note = NEXT_INSN (note))
2351		  final_scan_insn (note, file, optimize, nopeepholes, seen);
2352
2353		/* Put the notes in the proper position for a later
2354		   rescan.  For example, the SH target can do this
2355		   when generating a far jump in a delayed branch
2356		   sequence.  */
2357		note = NEXT_INSN (insn);
2358		PREV_INSN (note) = prev;
2359		NEXT_INSN (prev) = note;
2360		NEXT_INSN (PREV_INSN (next)) = insn;
2361		PREV_INSN (insn) = PREV_INSN (next);
2362		NEXT_INSN (insn) = next;
2363		PREV_INSN (next) = insn;
2364	      }
2365
2366	    /* PEEPHOLE might have changed this.  */
2367	    body = PATTERN (insn);
2368	  }
2369#endif
2370
2371	/* Try to recognize the instruction.
2372	   If successful, verify that the operands satisfy the
2373	   constraints for the instruction.  Crash if they don't,
2374	   since `reload' should have changed them so that they do.  */
2375
2376	insn_code_number = recog_memoized (insn);
2377	cleanup_subreg_operands (insn);
2378
2379	/* Dump the insn in the assembly for debugging.  */
2380	if (flag_dump_rtl_in_asm)
2381	  {
2382	    print_rtx_head = ASM_COMMENT_START;
2383	    print_rtl_single (asm_out_file, insn);
2384	    print_rtx_head = "";
2385	  }
2386
2387	if (! constrain_operands_cached (1))
2388	  fatal_insn_not_found (insn);
2389
2390	/* Some target machines need to prescan each insn before
2391	   it is output.  */
2392
2393#ifdef FINAL_PRESCAN_INSN
2394	FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2395#endif
2396
2397#ifdef HAVE_conditional_execution
2398	if (GET_CODE (PATTERN (insn)) == COND_EXEC)
2399	  current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2400#endif
2401
2402#ifdef HAVE_cc0
2403	cc_prev_status = cc_status;
2404
2405	/* Update `cc_status' for this instruction.
2406	   The instruction's output routine may change it further.
2407	   If the output routine for a jump insn needs to depend
2408	   on the cc status, it should look at cc_prev_status.  */
2409
2410	NOTICE_UPDATE_CC (body, insn);
2411#endif
2412
2413	current_output_insn = debug_insn = insn;
2414
2415#if defined (DWARF2_UNWIND_INFO)
2416	if (CALL_P (insn) && dwarf2out_do_frame ())
2417	  dwarf2out_frame_debug (insn, false);
2418#endif
2419
2420	/* Find the proper template for this insn.  */
2421	template = get_insn_template (insn_code_number, insn);
2422
2423	/* If the C code returns 0, it means that it is a jump insn
2424	   which follows a deleted test insn, and that test insn
2425	   needs to be reinserted.  */
2426	if (template == 0)
2427	  {
2428	    rtx prev;
2429
2430	    gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2431
2432	    /* We have already processed the notes between the setter and
2433	       the user.  Make sure we don't process them again, this is
2434	       particularly important if one of the notes is a block
2435	       scope note or an EH note.  */
2436	    for (prev = insn;
2437		 prev != last_ignored_compare;
2438		 prev = PREV_INSN (prev))
2439	      {
2440		if (NOTE_P (prev))
2441		  delete_insn (prev);	/* Use delete_note.  */
2442	      }
2443
2444	    return prev;
2445	  }
2446
2447	/* If the template is the string "#", it means that this insn must
2448	   be split.  */
2449	if (template[0] == '#' && template[1] == '\0')
2450	  {
2451	    rtx new = try_split (body, insn, 0);
2452
2453	    /* If we didn't split the insn, go away.  */
2454	    if (new == insn && PATTERN (new) == body)
2455	      fatal_insn ("could not split insn", insn);
2456
2457#ifdef HAVE_ATTR_length
2458	    /* This instruction should have been split in shorten_branches,
2459	       to ensure that we would have valid length info for the
2460	       splitees.  */
2461	    gcc_unreachable ();
2462#endif
2463
2464	    return new;
2465	  }
2466
2467#ifdef TARGET_UNWIND_INFO
2468	/* ??? This will put the directives in the wrong place if
2469	   get_insn_template outputs assembly directly.  However calling it
2470	   before get_insn_template breaks if the insns is split.  */
2471	targetm.asm_out.unwind_emit (asm_out_file, insn);
2472#endif
2473
2474	/* Output assembler code from the template.  */
2475	output_asm_insn (template, recog_data.operand);
2476
2477	/* If necessary, report the effect that the instruction has on
2478	   the unwind info.   We've already done this for delay slots
2479	   and call instructions.  */
2480#if defined (DWARF2_UNWIND_INFO)
2481	if (final_sequence == 0
2482#if !defined (HAVE_prologue)
2483	    && !ACCUMULATE_OUTGOING_ARGS
2484#endif
2485	    && dwarf2out_do_frame ())
2486	  dwarf2out_frame_debug (insn, true);
2487#endif
2488
2489	current_output_insn = debug_insn = 0;
2490      }
2491    }
2492  return NEXT_INSN (insn);
2493}
2494
2495/* Return whether a source line note needs to be emitted before INSN.  */
2496
2497static bool
2498notice_source_line (rtx insn)
2499{
2500  const char *filename = insn_file (insn);
2501  int linenum = insn_line (insn);
2502
2503  if (filename
2504      && (force_source_line
2505	  || filename != last_filename
2506	  || last_linenum != linenum))
2507    {
2508      force_source_line = false;
2509      last_filename = filename;
2510      last_linenum = linenum;
2511      high_block_linenum = MAX (last_linenum, high_block_linenum);
2512      high_function_linenum = MAX (last_linenum, high_function_linenum);
2513      return true;
2514    }
2515  return false;
2516}
2517
2518/* For each operand in INSN, simplify (subreg (reg)) so that it refers
2519   directly to the desired hard register.  */
2520
2521void
2522cleanup_subreg_operands (rtx insn)
2523{
2524  int i;
2525  extract_insn_cached (insn);
2526  for (i = 0; i < recog_data.n_operands; i++)
2527    {
2528      /* The following test cannot use recog_data.operand when testing
2529	 for a SUBREG: the underlying object might have been changed
2530	 already if we are inside a match_operator expression that
2531	 matches the else clause.  Instead we test the underlying
2532	 expression directly.  */
2533      if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2534	recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i]);
2535      else if (GET_CODE (recog_data.operand[i]) == PLUS
2536	       || GET_CODE (recog_data.operand[i]) == MULT
2537	       || MEM_P (recog_data.operand[i]))
2538	recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i]);
2539    }
2540
2541  for (i = 0; i < recog_data.n_dups; i++)
2542    {
2543      if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2544	*recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i]);
2545      else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2546	       || GET_CODE (*recog_data.dup_loc[i]) == MULT
2547	       || MEM_P (*recog_data.dup_loc[i]))
2548	*recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i]);
2549    }
2550}
2551
2552/* If X is a SUBREG, replace it with a REG or a MEM,
2553   based on the thing it is a subreg of.  */
2554
2555rtx
2556alter_subreg (rtx *xp)
2557{
2558  rtx x = *xp;
2559  rtx y = SUBREG_REG (x);
2560
2561  /* simplify_subreg does not remove subreg from volatile references.
2562     We are required to.  */
2563  if (MEM_P (y))
2564    {
2565      int offset = SUBREG_BYTE (x);
2566
2567      /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
2568	 contains 0 instead of the proper offset.  See simplify_subreg.  */
2569      if (offset == 0
2570	  && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
2571        {
2572          int difference = GET_MODE_SIZE (GET_MODE (y))
2573			   - GET_MODE_SIZE (GET_MODE (x));
2574          if (WORDS_BIG_ENDIAN)
2575            offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
2576          if (BYTES_BIG_ENDIAN)
2577            offset += difference % UNITS_PER_WORD;
2578        }
2579
2580      *xp = adjust_address (y, GET_MODE (x), offset);
2581    }
2582  else
2583    {
2584      rtx new = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
2585				 SUBREG_BYTE (x));
2586
2587      if (new != 0)
2588	*xp = new;
2589      else if (REG_P (y))
2590	{
2591	  /* Simplify_subreg can't handle some REG cases, but we have to.  */
2592	  unsigned int regno = subreg_regno (x);
2593	  *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, SUBREG_BYTE (x));
2594	}
2595    }
2596
2597  return *xp;
2598}
2599
2600/* Do alter_subreg on all the SUBREGs contained in X.  */
2601
2602static rtx
2603walk_alter_subreg (rtx *xp)
2604{
2605  rtx x = *xp;
2606  switch (GET_CODE (x))
2607    {
2608    case PLUS:
2609    case MULT:
2610    case AND:
2611      XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2612      XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1));
2613      break;
2614
2615    case MEM:
2616    case ZERO_EXTEND:
2617      XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2618      break;
2619
2620    case SUBREG:
2621      return alter_subreg (xp);
2622
2623    default:
2624      break;
2625    }
2626
2627  return *xp;
2628}
2629
2630#ifdef HAVE_cc0
2631
2632/* Given BODY, the body of a jump instruction, alter the jump condition
2633   as required by the bits that are set in cc_status.flags.
2634   Not all of the bits there can be handled at this level in all cases.
2635
2636   The value is normally 0.
2637   1 means that the condition has become always true.
2638   -1 means that the condition has become always false.
2639   2 means that COND has been altered.  */
2640
2641static int
2642alter_cond (rtx cond)
2643{
2644  int value = 0;
2645
2646  if (cc_status.flags & CC_REVERSED)
2647    {
2648      value = 2;
2649      PUT_CODE (cond, swap_condition (GET_CODE (cond)));
2650    }
2651
2652  if (cc_status.flags & CC_INVERTED)
2653    {
2654      value = 2;
2655      PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
2656    }
2657
2658  if (cc_status.flags & CC_NOT_POSITIVE)
2659    switch (GET_CODE (cond))
2660      {
2661      case LE:
2662      case LEU:
2663      case GEU:
2664	/* Jump becomes unconditional.  */
2665	return 1;
2666
2667      case GT:
2668      case GTU:
2669      case LTU:
2670	/* Jump becomes no-op.  */
2671	return -1;
2672
2673      case GE:
2674	PUT_CODE (cond, EQ);
2675	value = 2;
2676	break;
2677
2678      case LT:
2679	PUT_CODE (cond, NE);
2680	value = 2;
2681	break;
2682
2683      default:
2684	break;
2685      }
2686
2687  if (cc_status.flags & CC_NOT_NEGATIVE)
2688    switch (GET_CODE (cond))
2689      {
2690      case GE:
2691      case GEU:
2692	/* Jump becomes unconditional.  */
2693	return 1;
2694
2695      case LT:
2696      case LTU:
2697	/* Jump becomes no-op.  */
2698	return -1;
2699
2700      case LE:
2701      case LEU:
2702	PUT_CODE (cond, EQ);
2703	value = 2;
2704	break;
2705
2706      case GT:
2707      case GTU:
2708	PUT_CODE (cond, NE);
2709	value = 2;
2710	break;
2711
2712      default:
2713	break;
2714      }
2715
2716  if (cc_status.flags & CC_NO_OVERFLOW)
2717    switch (GET_CODE (cond))
2718      {
2719      case GEU:
2720	/* Jump becomes unconditional.  */
2721	return 1;
2722
2723      case LEU:
2724	PUT_CODE (cond, EQ);
2725	value = 2;
2726	break;
2727
2728      case GTU:
2729	PUT_CODE (cond, NE);
2730	value = 2;
2731	break;
2732
2733      case LTU:
2734	/* Jump becomes no-op.  */
2735	return -1;
2736
2737      default:
2738	break;
2739      }
2740
2741  if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
2742    switch (GET_CODE (cond))
2743      {
2744      default:
2745	gcc_unreachable ();
2746
2747      case NE:
2748	PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
2749	value = 2;
2750	break;
2751
2752      case EQ:
2753	PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
2754	value = 2;
2755	break;
2756      }
2757
2758  if (cc_status.flags & CC_NOT_SIGNED)
2759    /* The flags are valid if signed condition operators are converted
2760       to unsigned.  */
2761    switch (GET_CODE (cond))
2762      {
2763      case LE:
2764	PUT_CODE (cond, LEU);
2765	value = 2;
2766	break;
2767
2768      case LT:
2769	PUT_CODE (cond, LTU);
2770	value = 2;
2771	break;
2772
2773      case GT:
2774	PUT_CODE (cond, GTU);
2775	value = 2;
2776	break;
2777
2778      case GE:
2779	PUT_CODE (cond, GEU);
2780	value = 2;
2781	break;
2782
2783      default:
2784	break;
2785      }
2786
2787  return value;
2788}
2789#endif
2790
2791/* Report inconsistency between the assembler template and the operands.
2792   In an `asm', it's the user's fault; otherwise, the compiler's fault.  */
2793
2794void
2795output_operand_lossage (const char *cmsgid, ...)
2796{
2797  char *fmt_string;
2798  char *new_message;
2799  const char *pfx_str;
2800  va_list ap;
2801
2802  va_start (ap, cmsgid);
2803
2804  pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
2805  asprintf (&fmt_string, "%s%s", pfx_str, _(cmsgid));
2806  vasprintf (&new_message, fmt_string, ap);
2807
2808  if (this_is_asm_operands)
2809    error_for_asm (this_is_asm_operands, "%s", new_message);
2810  else
2811    internal_error ("%s", new_message);
2812
2813  free (fmt_string);
2814  free (new_message);
2815  va_end (ap);
2816}
2817
2818/* Output of assembler code from a template, and its subroutines.  */
2819
2820/* Annotate the assembly with a comment describing the pattern and
2821   alternative used.  */
2822
2823static void
2824output_asm_name (void)
2825{
2826  if (debug_insn)
2827    {
2828      int num = INSN_CODE (debug_insn);
2829      fprintf (asm_out_file, "\t%s %d\t%s",
2830	       ASM_COMMENT_START, INSN_UID (debug_insn),
2831	       insn_data[num].name);
2832      if (insn_data[num].n_alternatives > 1)
2833	fprintf (asm_out_file, "/%d", which_alternative + 1);
2834#ifdef HAVE_ATTR_length
2835      fprintf (asm_out_file, "\t[length = %d]",
2836	       get_attr_length (debug_insn));
2837#endif
2838      /* Clear this so only the first assembler insn
2839	 of any rtl insn will get the special comment for -dp.  */
2840      debug_insn = 0;
2841    }
2842}
2843
2844/* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
2845   or its address, return that expr .  Set *PADDRESSP to 1 if the expr
2846   corresponds to the address of the object and 0 if to the object.  */
2847
2848static tree
2849get_mem_expr_from_op (rtx op, int *paddressp)
2850{
2851  tree expr;
2852  int inner_addressp;
2853
2854  *paddressp = 0;
2855
2856  if (REG_P (op))
2857    return REG_EXPR (op);
2858  else if (!MEM_P (op))
2859    return 0;
2860
2861  if (MEM_EXPR (op) != 0)
2862    return MEM_EXPR (op);
2863
2864  /* Otherwise we have an address, so indicate it and look at the address.  */
2865  *paddressp = 1;
2866  op = XEXP (op, 0);
2867
2868  /* First check if we have a decl for the address, then look at the right side
2869     if it is a PLUS.  Otherwise, strip off arithmetic and keep looking.
2870     But don't allow the address to itself be indirect.  */
2871  if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
2872    return expr;
2873  else if (GET_CODE (op) == PLUS
2874	   && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
2875    return expr;
2876
2877  while (GET_RTX_CLASS (GET_CODE (op)) == RTX_UNARY
2878	 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
2879    op = XEXP (op, 0);
2880
2881  expr = get_mem_expr_from_op (op, &inner_addressp);
2882  return inner_addressp ? 0 : expr;
2883}
2884
2885/* Output operand names for assembler instructions.  OPERANDS is the
2886   operand vector, OPORDER is the order to write the operands, and NOPS
2887   is the number of operands to write.  */
2888
2889static void
2890output_asm_operand_names (rtx *operands, int *oporder, int nops)
2891{
2892  int wrote = 0;
2893  int i;
2894
2895  for (i = 0; i < nops; i++)
2896    {
2897      int addressp;
2898      rtx op = operands[oporder[i]];
2899      tree expr = get_mem_expr_from_op (op, &addressp);
2900
2901      fprintf (asm_out_file, "%c%s",
2902	       wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
2903      wrote = 1;
2904      if (expr)
2905	{
2906	  fprintf (asm_out_file, "%s",
2907		   addressp ? "*" : "");
2908	  print_mem_expr (asm_out_file, expr);
2909	  wrote = 1;
2910	}
2911      else if (REG_P (op) && ORIGINAL_REGNO (op)
2912	       && ORIGINAL_REGNO (op) != REGNO (op))
2913	fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
2914    }
2915}
2916
2917/* Output text from TEMPLATE to the assembler output file,
2918   obeying %-directions to substitute operands taken from
2919   the vector OPERANDS.
2920
2921   %N (for N a digit) means print operand N in usual manner.
2922   %lN means require operand N to be a CODE_LABEL or LABEL_REF
2923      and print the label name with no punctuation.
2924   %cN means require operand N to be a constant
2925      and print the constant expression with no punctuation.
2926   %aN means expect operand N to be a memory address
2927      (not a memory reference!) and print a reference
2928      to that address.
2929   %nN means expect operand N to be a constant
2930      and print a constant expression for minus the value
2931      of the operand, with no other punctuation.  */
2932
2933void
2934output_asm_insn (const char *template, rtx *operands)
2935{
2936  const char *p;
2937  int c;
2938#ifdef ASSEMBLER_DIALECT
2939  int dialect = 0;
2940#endif
2941  int oporder[MAX_RECOG_OPERANDS];
2942  char opoutput[MAX_RECOG_OPERANDS];
2943  int ops = 0;
2944
2945  /* An insn may return a null string template
2946     in a case where no assembler code is needed.  */
2947  if (*template == 0)
2948    return;
2949
2950  memset (opoutput, 0, sizeof opoutput);
2951  p = template;
2952  putc ('\t', asm_out_file);
2953
2954#ifdef ASM_OUTPUT_OPCODE
2955  ASM_OUTPUT_OPCODE (asm_out_file, p);
2956#endif
2957
2958  while ((c = *p++))
2959    switch (c)
2960      {
2961      case '\n':
2962	if (flag_verbose_asm)
2963	  output_asm_operand_names (operands, oporder, ops);
2964	if (flag_print_asm_name)
2965	  output_asm_name ();
2966
2967	ops = 0;
2968	memset (opoutput, 0, sizeof opoutput);
2969
2970	putc (c, asm_out_file);
2971#ifdef ASM_OUTPUT_OPCODE
2972	while ((c = *p) == '\t')
2973	  {
2974	    putc (c, asm_out_file);
2975	    p++;
2976	  }
2977	ASM_OUTPUT_OPCODE (asm_out_file, p);
2978#endif
2979	break;
2980
2981#ifdef ASSEMBLER_DIALECT
2982      case '{':
2983	{
2984	  int i;
2985
2986	  if (dialect)
2987	    output_operand_lossage ("nested assembly dialect alternatives");
2988	  else
2989	    dialect = 1;
2990
2991	  /* If we want the first dialect, do nothing.  Otherwise, skip
2992	     DIALECT_NUMBER of strings ending with '|'.  */
2993	  for (i = 0; i < dialect_number; i++)
2994	    {
2995	      while (*p && *p != '}' && *p++ != '|')
2996		;
2997	      if (*p == '}')
2998		break;
2999	      if (*p == '|')
3000		p++;
3001	    }
3002
3003	  if (*p == '\0')
3004	    output_operand_lossage ("unterminated assembly dialect alternative");
3005	}
3006	break;
3007
3008      case '|':
3009	if (dialect)
3010	  {
3011	    /* Skip to close brace.  */
3012	    do
3013	      {
3014		if (*p == '\0')
3015		  {
3016		    output_operand_lossage ("unterminated assembly dialect alternative");
3017		    break;
3018		  }
3019	      }
3020	    while (*p++ != '}');
3021	    dialect = 0;
3022	  }
3023	else
3024	  putc (c, asm_out_file);
3025	break;
3026
3027      case '}':
3028	if (! dialect)
3029	  putc (c, asm_out_file);
3030	dialect = 0;
3031	break;
3032#endif
3033
3034      case '%':
3035	/* %% outputs a single %.  */
3036	if (*p == '%')
3037	  {
3038	    p++;
3039	    putc (c, asm_out_file);
3040	  }
3041	/* %= outputs a number which is unique to each insn in the entire
3042	   compilation.  This is useful for making local labels that are
3043	   referred to more than once in a given insn.  */
3044	else if (*p == '=')
3045	  {
3046	    p++;
3047	    fprintf (asm_out_file, "%d", insn_counter);
3048	  }
3049	/* % followed by a letter and some digits
3050	   outputs an operand in a special way depending on the letter.
3051	   Letters `acln' are implemented directly.
3052	   Other letters are passed to `output_operand' so that
3053	   the PRINT_OPERAND macro can define them.  */
3054	else if (ISALPHA (*p))
3055	  {
3056	    int letter = *p++;
3057	    unsigned long opnum;
3058	    char *endptr;
3059
3060	    opnum = strtoul (p, &endptr, 10);
3061
3062	    if (endptr == p)
3063	      output_operand_lossage ("operand number missing "
3064				      "after %%-letter");
3065	    else if (this_is_asm_operands && opnum >= insn_noperands)
3066	      output_operand_lossage ("operand number out of range");
3067	    else if (letter == 'l')
3068	      output_asm_label (operands[opnum]);
3069	    else if (letter == 'a')
3070	      output_address (operands[opnum]);
3071	    else if (letter == 'c')
3072	      {
3073		if (CONSTANT_ADDRESS_P (operands[opnum]))
3074		  output_addr_const (asm_out_file, operands[opnum]);
3075		else
3076		  output_operand (operands[opnum], 'c');
3077	      }
3078	    else if (letter == 'n')
3079	      {
3080		if (GET_CODE (operands[opnum]) == CONST_INT)
3081		  fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3082			   - INTVAL (operands[opnum]));
3083		else
3084		  {
3085		    putc ('-', asm_out_file);
3086		    output_addr_const (asm_out_file, operands[opnum]);
3087		  }
3088	      }
3089	    else
3090	      output_operand (operands[opnum], letter);
3091
3092	    if (!opoutput[opnum])
3093	      oporder[ops++] = opnum;
3094	    opoutput[opnum] = 1;
3095
3096	    p = endptr;
3097	    c = *p;
3098	  }
3099	/* % followed by a digit outputs an operand the default way.  */
3100	else if (ISDIGIT (*p))
3101	  {
3102	    unsigned long opnum;
3103	    char *endptr;
3104
3105	    opnum = strtoul (p, &endptr, 10);
3106	    if (this_is_asm_operands && opnum >= insn_noperands)
3107	      output_operand_lossage ("operand number out of range");
3108	    else
3109	      output_operand (operands[opnum], 0);
3110
3111	    if (!opoutput[opnum])
3112	      oporder[ops++] = opnum;
3113	    opoutput[opnum] = 1;
3114
3115	    p = endptr;
3116	    c = *p;
3117	  }
3118	/* % followed by punctuation: output something for that
3119	   punctuation character alone, with no operand.
3120	   The PRINT_OPERAND macro decides what is actually done.  */
3121#ifdef PRINT_OPERAND_PUNCT_VALID_P
3122	else if (PRINT_OPERAND_PUNCT_VALID_P ((unsigned char) *p))
3123	  output_operand (NULL_RTX, *p++);
3124#endif
3125	else
3126	  output_operand_lossage ("invalid %%-code");
3127	break;
3128
3129      default:
3130	putc (c, asm_out_file);
3131      }
3132
3133  /* Write out the variable names for operands, if we know them.  */
3134  if (flag_verbose_asm)
3135    output_asm_operand_names (operands, oporder, ops);
3136  if (flag_print_asm_name)
3137    output_asm_name ();
3138
3139  putc ('\n', asm_out_file);
3140}
3141
3142/* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol.  */
3143
3144void
3145output_asm_label (rtx x)
3146{
3147  char buf[256];
3148
3149  if (GET_CODE (x) == LABEL_REF)
3150    x = XEXP (x, 0);
3151  if (LABEL_P (x)
3152      || (NOTE_P (x)
3153	  && NOTE_LINE_NUMBER (x) == NOTE_INSN_DELETED_LABEL))
3154    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3155  else
3156    output_operand_lossage ("'%%l' operand isn't a label");
3157
3158  assemble_name (asm_out_file, buf);
3159}
3160
3161/* Print operand X using machine-dependent assembler syntax.
3162   The macro PRINT_OPERAND is defined just to control this function.
3163   CODE is a non-digit that preceded the operand-number in the % spec,
3164   such as 'z' if the spec was `%z3'.  CODE is 0 if there was no char
3165   between the % and the digits.
3166   When CODE is a non-letter, X is 0.
3167
3168   The meanings of the letters are machine-dependent and controlled
3169   by PRINT_OPERAND.  */
3170
3171static void
3172output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3173{
3174  if (x && GET_CODE (x) == SUBREG)
3175    x = alter_subreg (&x);
3176
3177  /* X must not be a pseudo reg.  */
3178  gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3179
3180  PRINT_OPERAND (asm_out_file, x, code);
3181}
3182
3183/* Print a memory reference operand for address X
3184   using machine-dependent assembler syntax.
3185   The macro PRINT_OPERAND_ADDRESS exists just to control this function.  */
3186
3187void
3188output_address (rtx x)
3189{
3190  walk_alter_subreg (&x);
3191  PRINT_OPERAND_ADDRESS (asm_out_file, x);
3192}
3193
3194/* Print an integer constant expression in assembler syntax.
3195   Addition and subtraction are the only arithmetic
3196   that may appear in these expressions.  */
3197
3198void
3199output_addr_const (FILE *file, rtx x)
3200{
3201  char buf[256];
3202
3203 restart:
3204  switch (GET_CODE (x))
3205    {
3206    case PC:
3207      putc ('.', file);
3208      break;
3209
3210    case SYMBOL_REF:
3211      if (SYMBOL_REF_DECL (x))
3212	mark_decl_referenced (SYMBOL_REF_DECL (x));
3213#ifdef ASM_OUTPUT_SYMBOL_REF
3214      ASM_OUTPUT_SYMBOL_REF (file, x);
3215#else
3216      assemble_name (file, XSTR (x, 0));
3217#endif
3218      break;
3219
3220    case LABEL_REF:
3221      x = XEXP (x, 0);
3222      /* Fall through.  */
3223    case CODE_LABEL:
3224      ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3225#ifdef ASM_OUTPUT_LABEL_REF
3226      ASM_OUTPUT_LABEL_REF (file, buf);
3227#else
3228      assemble_name (file, buf);
3229#endif
3230      break;
3231
3232    case CONST_INT:
3233      fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3234      break;
3235
3236    case CONST:
3237      /* This used to output parentheses around the expression,
3238	 but that does not work on the 386 (either ATT or BSD assembler).  */
3239      output_addr_const (file, XEXP (x, 0));
3240      break;
3241
3242    case CONST_DOUBLE:
3243      if (GET_MODE (x) == VOIDmode)
3244	{
3245	  /* We can use %d if the number is one word and positive.  */
3246	  if (CONST_DOUBLE_HIGH (x))
3247	    fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3248		     CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
3249	  else if (CONST_DOUBLE_LOW (x) < 0)
3250	    fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
3251	  else
3252	    fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3253	}
3254      else
3255	/* We can't handle floating point constants;
3256	   PRINT_OPERAND must handle them.  */
3257	output_operand_lossage ("floating constant misused");
3258      break;
3259
3260    case PLUS:
3261      /* Some assemblers need integer constants to appear last (eg masm).  */
3262      if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3263	{
3264	  output_addr_const (file, XEXP (x, 1));
3265	  if (INTVAL (XEXP (x, 0)) >= 0)
3266	    fprintf (file, "+");
3267	  output_addr_const (file, XEXP (x, 0));
3268	}
3269      else
3270	{
3271	  output_addr_const (file, XEXP (x, 0));
3272	  if (GET_CODE (XEXP (x, 1)) != CONST_INT
3273	      || INTVAL (XEXP (x, 1)) >= 0)
3274	    fprintf (file, "+");
3275	  output_addr_const (file, XEXP (x, 1));
3276	}
3277      break;
3278
3279    case MINUS:
3280      /* Avoid outputting things like x-x or x+5-x,
3281	 since some assemblers can't handle that.  */
3282      x = simplify_subtraction (x);
3283      if (GET_CODE (x) != MINUS)
3284	goto restart;
3285
3286      output_addr_const (file, XEXP (x, 0));
3287      fprintf (file, "-");
3288      if ((GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0)
3289	  || GET_CODE (XEXP (x, 1)) == PC
3290	  || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3291	output_addr_const (file, XEXP (x, 1));
3292      else
3293	{
3294	  fputs (targetm.asm_out.open_paren, file);
3295	  output_addr_const (file, XEXP (x, 1));
3296	  fputs (targetm.asm_out.close_paren, file);
3297	}
3298      break;
3299
3300    case ZERO_EXTEND:
3301    case SIGN_EXTEND:
3302    case SUBREG:
3303      output_addr_const (file, XEXP (x, 0));
3304      break;
3305
3306    default:
3307#ifdef OUTPUT_ADDR_CONST_EXTRA
3308      OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
3309      break;
3310
3311    fail:
3312#endif
3313      output_operand_lossage ("invalid expression as operand");
3314    }
3315}
3316
3317/* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3318   %R prints the value of REGISTER_PREFIX.
3319   %L prints the value of LOCAL_LABEL_PREFIX.
3320   %U prints the value of USER_LABEL_PREFIX.
3321   %I prints the value of IMMEDIATE_PREFIX.
3322   %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3323   Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3324
3325   We handle alternate assembler dialects here, just like output_asm_insn.  */
3326
3327void
3328asm_fprintf (FILE *file, const char *p, ...)
3329{
3330  char buf[10];
3331  char *q, c;
3332  va_list argptr;
3333
3334  va_start (argptr, p);
3335
3336  buf[0] = '%';
3337
3338  while ((c = *p++))
3339    switch (c)
3340      {
3341#ifdef ASSEMBLER_DIALECT
3342      case '{':
3343	{
3344	  int i;
3345
3346	  /* If we want the first dialect, do nothing.  Otherwise, skip
3347	     DIALECT_NUMBER of strings ending with '|'.  */
3348	  for (i = 0; i < dialect_number; i++)
3349	    {
3350	      while (*p && *p++ != '|')
3351		;
3352
3353	      if (*p == '|')
3354		p++;
3355	    }
3356	}
3357	break;
3358
3359      case '|':
3360	/* Skip to close brace.  */
3361	while (*p && *p++ != '}')
3362	  ;
3363	break;
3364
3365      case '}':
3366	break;
3367#endif
3368
3369      case '%':
3370	c = *p++;
3371	q = &buf[1];
3372	while (strchr ("-+ #0", c))
3373	  {
3374	    *q++ = c;
3375	    c = *p++;
3376	  }
3377	while (ISDIGIT (c) || c == '.')
3378	  {
3379	    *q++ = c;
3380	    c = *p++;
3381	  }
3382	switch (c)
3383	  {
3384	  case '%':
3385	    putc ('%', file);
3386	    break;
3387
3388	  case 'd':  case 'i':  case 'u':
3389	  case 'x':  case 'X':  case 'o':
3390	  case 'c':
3391	    *q++ = c;
3392	    *q = 0;
3393	    fprintf (file, buf, va_arg (argptr, int));
3394	    break;
3395
3396	  case 'w':
3397	    /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3398	       'o' cases, but we do not check for those cases.  It
3399	       means that the value is a HOST_WIDE_INT, which may be
3400	       either `long' or `long long'.  */
3401	    memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3402	    q += strlen (HOST_WIDE_INT_PRINT);
3403	    *q++ = *p++;
3404	    *q = 0;
3405	    fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3406	    break;
3407
3408	  case 'l':
3409	    *q++ = c;
3410#ifdef HAVE_LONG_LONG
3411	    if (*p == 'l')
3412	      {
3413		*q++ = *p++;
3414		*q++ = *p++;
3415		*q = 0;
3416		fprintf (file, buf, va_arg (argptr, long long));
3417	      }
3418	    else
3419#endif
3420	      {
3421		*q++ = *p++;
3422		*q = 0;
3423		fprintf (file, buf, va_arg (argptr, long));
3424	      }
3425
3426	    break;
3427
3428	  case 's':
3429	    *q++ = c;
3430	    *q = 0;
3431	    fprintf (file, buf, va_arg (argptr, char *));
3432	    break;
3433
3434	  case 'O':
3435#ifdef ASM_OUTPUT_OPCODE
3436	    ASM_OUTPUT_OPCODE (asm_out_file, p);
3437#endif
3438	    break;
3439
3440	  case 'R':
3441#ifdef REGISTER_PREFIX
3442	    fprintf (file, "%s", REGISTER_PREFIX);
3443#endif
3444	    break;
3445
3446	  case 'I':
3447#ifdef IMMEDIATE_PREFIX
3448	    fprintf (file, "%s", IMMEDIATE_PREFIX);
3449#endif
3450	    break;
3451
3452	  case 'L':
3453#ifdef LOCAL_LABEL_PREFIX
3454	    fprintf (file, "%s", LOCAL_LABEL_PREFIX);
3455#endif
3456	    break;
3457
3458	  case 'U':
3459	    fputs (user_label_prefix, file);
3460	    break;
3461
3462#ifdef ASM_FPRINTF_EXTENSIONS
3463	    /* Uppercase letters are reserved for general use by asm_fprintf
3464	       and so are not available to target specific code.  In order to
3465	       prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
3466	       they are defined here.  As they get turned into real extensions
3467	       to asm_fprintf they should be removed from this list.  */
3468	  case 'A': case 'B': case 'C': case 'D': case 'E':
3469	  case 'F': case 'G': case 'H': case 'J': case 'K':
3470	  case 'M': case 'N': case 'P': case 'Q': case 'S':
3471	  case 'T': case 'V': case 'W': case 'Y': case 'Z':
3472	    break;
3473
3474	  ASM_FPRINTF_EXTENSIONS (file, argptr, p)
3475#endif
3476	  default:
3477	    gcc_unreachable ();
3478	  }
3479	break;
3480
3481      default:
3482	putc (c, file);
3483      }
3484  va_end (argptr);
3485}
3486
3487/* Split up a CONST_DOUBLE or integer constant rtx
3488   into two rtx's for single words,
3489   storing in *FIRST the word that comes first in memory in the target
3490   and in *SECOND the other.  */
3491
3492void
3493split_double (rtx value, rtx *first, rtx *second)
3494{
3495  if (GET_CODE (value) == CONST_INT)
3496    {
3497      if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
3498	{
3499	  /* In this case the CONST_INT holds both target words.
3500	     Extract the bits from it into two word-sized pieces.
3501	     Sign extend each half to HOST_WIDE_INT.  */
3502	  unsigned HOST_WIDE_INT low, high;
3503	  unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
3504
3505	  /* Set sign_bit to the most significant bit of a word.  */
3506	  sign_bit = 1;
3507	  sign_bit <<= BITS_PER_WORD - 1;
3508
3509	  /* Set mask so that all bits of the word are set.  We could
3510	     have used 1 << BITS_PER_WORD instead of basing the
3511	     calculation on sign_bit.  However, on machines where
3512	     HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
3513	     compiler warning, even though the code would never be
3514	     executed.  */
3515	  mask = sign_bit << 1;
3516	  mask--;
3517
3518	  /* Set sign_extend as any remaining bits.  */
3519	  sign_extend = ~mask;
3520
3521	  /* Pick the lower word and sign-extend it.  */
3522	  low = INTVAL (value);
3523	  low &= mask;
3524	  if (low & sign_bit)
3525	    low |= sign_extend;
3526
3527	  /* Pick the higher word, shifted to the least significant
3528	     bits, and sign-extend it.  */
3529	  high = INTVAL (value);
3530	  high >>= BITS_PER_WORD - 1;
3531	  high >>= 1;
3532	  high &= mask;
3533	  if (high & sign_bit)
3534	    high |= sign_extend;
3535
3536	  /* Store the words in the target machine order.  */
3537	  if (WORDS_BIG_ENDIAN)
3538	    {
3539	      *first = GEN_INT (high);
3540	      *second = GEN_INT (low);
3541	    }
3542	  else
3543	    {
3544	      *first = GEN_INT (low);
3545	      *second = GEN_INT (high);
3546	    }
3547	}
3548      else
3549	{
3550	  /* The rule for using CONST_INT for a wider mode
3551	     is that we regard the value as signed.
3552	     So sign-extend it.  */
3553	  rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
3554	  if (WORDS_BIG_ENDIAN)
3555	    {
3556	      *first = high;
3557	      *second = value;
3558	    }
3559	  else
3560	    {
3561	      *first = value;
3562	      *second = high;
3563	    }
3564	}
3565    }
3566  else if (GET_CODE (value) != CONST_DOUBLE)
3567    {
3568      if (WORDS_BIG_ENDIAN)
3569	{
3570	  *first = const0_rtx;
3571	  *second = value;
3572	}
3573      else
3574	{
3575	  *first = value;
3576	  *second = const0_rtx;
3577	}
3578    }
3579  else if (GET_MODE (value) == VOIDmode
3580	   /* This is the old way we did CONST_DOUBLE integers.  */
3581	   || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
3582    {
3583      /* In an integer, the words are defined as most and least significant.
3584	 So order them by the target's convention.  */
3585      if (WORDS_BIG_ENDIAN)
3586	{
3587	  *first = GEN_INT (CONST_DOUBLE_HIGH (value));
3588	  *second = GEN_INT (CONST_DOUBLE_LOW (value));
3589	}
3590      else
3591	{
3592	  *first = GEN_INT (CONST_DOUBLE_LOW (value));
3593	  *second = GEN_INT (CONST_DOUBLE_HIGH (value));
3594	}
3595    }
3596  else
3597    {
3598      REAL_VALUE_TYPE r;
3599      long l[2];
3600      REAL_VALUE_FROM_CONST_DOUBLE (r, value);
3601
3602      /* Note, this converts the REAL_VALUE_TYPE to the target's
3603	 format, splits up the floating point double and outputs
3604	 exactly 32 bits of it into each of l[0] and l[1] --
3605	 not necessarily BITS_PER_WORD bits.  */
3606      REAL_VALUE_TO_TARGET_DOUBLE (r, l);
3607
3608      /* If 32 bits is an entire word for the target, but not for the host,
3609	 then sign-extend on the host so that the number will look the same
3610	 way on the host that it would on the target.  See for instance
3611	 simplify_unary_operation.  The #if is needed to avoid compiler
3612	 warnings.  */
3613
3614#if HOST_BITS_PER_LONG > 32
3615      if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
3616	{
3617	  if (l[0] & ((long) 1 << 31))
3618	    l[0] |= ((long) (-1) << 32);
3619	  if (l[1] & ((long) 1 << 31))
3620	    l[1] |= ((long) (-1) << 32);
3621	}
3622#endif
3623
3624      *first = GEN_INT (l[0]);
3625      *second = GEN_INT (l[1]);
3626    }
3627}
3628
3629/* Return nonzero if this function has no function calls.  */
3630
3631int
3632leaf_function_p (void)
3633{
3634  rtx insn;
3635  rtx link;
3636
3637  if (current_function_profile || profile_arc_flag)
3638    return 0;
3639
3640  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3641    {
3642      if (CALL_P (insn)
3643	  && ! SIBLING_CALL_P (insn))
3644	return 0;
3645      if (NONJUMP_INSN_P (insn)
3646	  && GET_CODE (PATTERN (insn)) == SEQUENCE
3647	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3648	  && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3649	return 0;
3650    }
3651  for (link = current_function_epilogue_delay_list;
3652       link;
3653       link = XEXP (link, 1))
3654    {
3655      insn = XEXP (link, 0);
3656
3657      if (CALL_P (insn)
3658	  && ! SIBLING_CALL_P (insn))
3659	return 0;
3660      if (NONJUMP_INSN_P (insn)
3661	  && GET_CODE (PATTERN (insn)) == SEQUENCE
3662	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3663	  && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3664	return 0;
3665    }
3666
3667  return 1;
3668}
3669
3670/* Return 1 if branch is a forward branch.
3671   Uses insn_shuid array, so it works only in the final pass.  May be used by
3672   output templates to customary add branch prediction hints.
3673 */
3674int
3675final_forward_branch_p (rtx insn)
3676{
3677  int insn_id, label_id;
3678
3679  gcc_assert (uid_shuid);
3680  insn_id = INSN_SHUID (insn);
3681  label_id = INSN_SHUID (JUMP_LABEL (insn));
3682  /* We've hit some insns that does not have id information available.  */
3683  gcc_assert (insn_id && label_id);
3684  return insn_id < label_id;
3685}
3686
3687/* On some machines, a function with no call insns
3688   can run faster if it doesn't create its own register window.
3689   When output, the leaf function should use only the "output"
3690   registers.  Ordinarily, the function would be compiled to use
3691   the "input" registers to find its arguments; it is a candidate
3692   for leaf treatment if it uses only the "input" registers.
3693   Leaf function treatment means renumbering so the function
3694   uses the "output" registers instead.  */
3695
3696#ifdef LEAF_REGISTERS
3697
3698/* Return 1 if this function uses only the registers that can be
3699   safely renumbered.  */
3700
3701int
3702only_leaf_regs_used (void)
3703{
3704  int i;
3705  const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
3706
3707  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3708    if ((regs_ever_live[i] || global_regs[i])
3709	&& ! permitted_reg_in_leaf_functions[i])
3710      return 0;
3711
3712  if (current_function_uses_pic_offset_table
3713      && pic_offset_table_rtx != 0
3714      && REG_P (pic_offset_table_rtx)
3715      && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
3716    return 0;
3717
3718  return 1;
3719}
3720
3721/* Scan all instructions and renumber all registers into those
3722   available in leaf functions.  */
3723
3724static void
3725leaf_renumber_regs (rtx first)
3726{
3727  rtx insn;
3728
3729  /* Renumber only the actual patterns.
3730     The reg-notes can contain frame pointer refs,
3731     and renumbering them could crash, and should not be needed.  */
3732  for (insn = first; insn; insn = NEXT_INSN (insn))
3733    if (INSN_P (insn))
3734      leaf_renumber_regs_insn (PATTERN (insn));
3735  for (insn = current_function_epilogue_delay_list;
3736       insn;
3737       insn = XEXP (insn, 1))
3738    if (INSN_P (XEXP (insn, 0)))
3739      leaf_renumber_regs_insn (PATTERN (XEXP (insn, 0)));
3740}
3741
3742/* Scan IN_RTX and its subexpressions, and renumber all regs into those
3743   available in leaf functions.  */
3744
3745void
3746leaf_renumber_regs_insn (rtx in_rtx)
3747{
3748  int i, j;
3749  const char *format_ptr;
3750
3751  if (in_rtx == 0)
3752    return;
3753
3754  /* Renumber all input-registers into output-registers.
3755     renumbered_regs would be 1 for an output-register;
3756     they  */
3757
3758  if (REG_P (in_rtx))
3759    {
3760      int newreg;
3761
3762      /* Don't renumber the same reg twice.  */
3763      if (in_rtx->used)
3764	return;
3765
3766      newreg = REGNO (in_rtx);
3767      /* Don't try to renumber pseudo regs.  It is possible for a pseudo reg
3768	 to reach here as part of a REG_NOTE.  */
3769      if (newreg >= FIRST_PSEUDO_REGISTER)
3770	{
3771	  in_rtx->used = 1;
3772	  return;
3773	}
3774      newreg = LEAF_REG_REMAP (newreg);
3775      gcc_assert (newreg >= 0);
3776      regs_ever_live[REGNO (in_rtx)] = 0;
3777      regs_ever_live[newreg] = 1;
3778      REGNO (in_rtx) = newreg;
3779      in_rtx->used = 1;
3780    }
3781
3782  if (INSN_P (in_rtx))
3783    {
3784      /* Inside a SEQUENCE, we find insns.
3785	 Renumber just the patterns of these insns,
3786	 just as we do for the top-level insns.  */
3787      leaf_renumber_regs_insn (PATTERN (in_rtx));
3788      return;
3789    }
3790
3791  format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
3792
3793  for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
3794    switch (*format_ptr++)
3795      {
3796      case 'e':
3797	leaf_renumber_regs_insn (XEXP (in_rtx, i));
3798	break;
3799
3800      case 'E':
3801	if (NULL != XVEC (in_rtx, i))
3802	  {
3803	    for (j = 0; j < XVECLEN (in_rtx, i); j++)
3804	      leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
3805	  }
3806	break;
3807
3808      case 'S':
3809      case 's':
3810      case '0':
3811      case 'i':
3812      case 'w':
3813      case 'n':
3814      case 'u':
3815	break;
3816
3817      default:
3818	gcc_unreachable ();
3819      }
3820}
3821#endif
3822
3823
3824/* When -gused is used, emit debug info for only used symbols. But in
3825   addition to the standard intercepted debug_hooks there are some direct
3826   calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
3827   Those routines may also be called from a higher level intercepted routine. So
3828   to prevent recording data for an inner call to one of these for an intercept,
3829   we maintain an intercept nesting counter (debug_nesting). We only save the
3830   intercepted arguments if the nesting is 1.  */
3831int debug_nesting = 0;
3832
3833static tree *symbol_queue;
3834int symbol_queue_index = 0;
3835static int symbol_queue_size = 0;
3836
3837/* Generate the symbols for any queued up type symbols we encountered
3838   while generating the type info for some originally used symbol.
3839   This might generate additional entries in the queue.  Only when
3840   the nesting depth goes to 0 is this routine called.  */
3841
3842void
3843debug_flush_symbol_queue (void)
3844{
3845  int i;
3846
3847  /* Make sure that additionally queued items are not flushed
3848     prematurely.  */
3849
3850  ++debug_nesting;
3851
3852  for (i = 0; i < symbol_queue_index; ++i)
3853    {
3854      /* If we pushed queued symbols then such symbols must be
3855         output no matter what anyone else says.  Specifically,
3856         we need to make sure dbxout_symbol() thinks the symbol was
3857         used and also we need to override TYPE_DECL_SUPPRESS_DEBUG
3858         which may be set for outside reasons.  */
3859      int saved_tree_used = TREE_USED (symbol_queue[i]);
3860      int saved_suppress_debug = TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]);
3861      TREE_USED (symbol_queue[i]) = 1;
3862      TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = 0;
3863
3864#ifdef DBX_DEBUGGING_INFO
3865      dbxout_symbol (symbol_queue[i], 0);
3866#endif
3867
3868      TREE_USED (symbol_queue[i]) = saved_tree_used;
3869      TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = saved_suppress_debug;
3870    }
3871
3872  symbol_queue_index = 0;
3873  --debug_nesting;
3874}
3875
3876/* Queue a type symbol needed as part of the definition of a decl
3877   symbol.  These symbols are generated when debug_flush_symbol_queue()
3878   is called.  */
3879
3880void
3881debug_queue_symbol (tree decl)
3882{
3883  if (symbol_queue_index >= symbol_queue_size)
3884    {
3885      symbol_queue_size += 10;
3886      symbol_queue = xrealloc (symbol_queue,
3887			       symbol_queue_size * sizeof (tree));
3888    }
3889
3890  symbol_queue[symbol_queue_index++] = decl;
3891}
3892
3893/* Free symbol queue.  */
3894void
3895debug_free_queue (void)
3896{
3897  if (symbol_queue)
3898    {
3899      free (symbol_queue);
3900      symbol_queue = NULL;
3901      symbol_queue_size = 0;
3902    }
3903}
3904
3905/* Turn the RTL into assembly.  */
3906static unsigned int
3907rest_of_handle_final (void)
3908{
3909  rtx x;
3910  const char *fnname;
3911
3912  /* Get the function's name, as described by its RTL.  This may be
3913     different from the DECL_NAME name used in the source file.  */
3914
3915  x = DECL_RTL (current_function_decl);
3916  gcc_assert (MEM_P (x));
3917  x = XEXP (x, 0);
3918  gcc_assert (GET_CODE (x) == SYMBOL_REF);
3919  fnname = XSTR (x, 0);
3920
3921  assemble_start_function (current_function_decl, fnname);
3922  final_start_function (get_insns (), asm_out_file, optimize);
3923  final (get_insns (), asm_out_file, optimize);
3924  final_end_function ();
3925
3926#ifdef TARGET_UNWIND_INFO
3927  /* ??? The IA-64 ".handlerdata" directive must be issued before
3928     the ".endp" directive that closes the procedure descriptor.  */
3929  output_function_exception_table ();
3930#endif
3931
3932  assemble_end_function (current_function_decl, fnname);
3933
3934#ifndef TARGET_UNWIND_INFO
3935  /* Otherwise, it feels unclean to switch sections in the middle.  */
3936  output_function_exception_table ();
3937#endif
3938
3939  user_defined_section_attribute = false;
3940
3941  if (! quiet_flag)
3942    fflush (asm_out_file);
3943
3944  /* Release all memory allocated by flow.  */
3945  free_basic_block_vars ();
3946
3947  /* Write DBX symbols if requested.  */
3948
3949  /* Note that for those inline functions where we don't initially
3950     know for certain that we will be generating an out-of-line copy,
3951     the first invocation of this routine (rest_of_compilation) will
3952     skip over this code by doing a `goto exit_rest_of_compilation;'.
3953     Later on, wrapup_global_declarations will (indirectly) call
3954     rest_of_compilation again for those inline functions that need
3955     to have out-of-line copies generated.  During that call, we
3956     *will* be routed past here.  */
3957
3958  timevar_push (TV_SYMOUT);
3959  (*debug_hooks->function_decl) (current_function_decl);
3960  timevar_pop (TV_SYMOUT);
3961  return 0;
3962}
3963
3964struct tree_opt_pass pass_final =
3965{
3966  NULL,                                 /* name */
3967  NULL,                                 /* gate */
3968  rest_of_handle_final,                 /* execute */
3969  NULL,                                 /* sub */
3970  NULL,                                 /* next */
3971  0,                                    /* static_pass_number */
3972  TV_FINAL,                             /* tv_id */
3973  0,                                    /* properties_required */
3974  0,                                    /* properties_provided */
3975  0,                                    /* properties_destroyed */
3976  0,                                    /* todo_flags_start */
3977  TODO_ggc_collect,                     /* todo_flags_finish */
3978  0                                     /* letter */
3979};
3980
3981
3982static unsigned int
3983rest_of_handle_shorten_branches (void)
3984{
3985  /* Shorten branches.  */
3986  shorten_branches (get_insns ());
3987  return 0;
3988}
3989
3990struct tree_opt_pass pass_shorten_branches =
3991{
3992  "shorten",                            /* name */
3993  NULL,                                 /* gate */
3994  rest_of_handle_shorten_branches,      /* execute */
3995  NULL,                                 /* sub */
3996  NULL,                                 /* next */
3997  0,                                    /* static_pass_number */
3998  TV_FINAL,                             /* tv_id */
3999  0,                                    /* properties_required */
4000  0,                                    /* properties_provided */
4001  0,                                    /* properties_destroyed */
4002  0,                                    /* todo_flags_start */
4003  TODO_dump_func,                       /* todo_flags_finish */
4004  0                                     /* letter */
4005};
4006
4007
4008static unsigned int
4009rest_of_clean_state (void)
4010{
4011  rtx insn, next;
4012
4013  /* It is very important to decompose the RTL instruction chain here:
4014     debug information keeps pointing into CODE_LABEL insns inside the function
4015     body.  If these remain pointing to the other insns, we end up preserving
4016     whole RTL chain and attached detailed debug info in memory.  */
4017  for (insn = get_insns (); insn; insn = next)
4018    {
4019      next = NEXT_INSN (insn);
4020      NEXT_INSN (insn) = NULL;
4021      PREV_INSN (insn) = NULL;
4022    }
4023
4024  /* In case the function was not output,
4025     don't leave any temporary anonymous types
4026     queued up for sdb output.  */
4027#ifdef SDB_DEBUGGING_INFO
4028  if (write_symbols == SDB_DEBUG)
4029    sdbout_types (NULL_TREE);
4030#endif
4031
4032  reload_completed = 0;
4033  epilogue_completed = 0;
4034  flow2_completed = 0;
4035  no_new_pseudos = 0;
4036#ifdef STACK_REGS
4037  regstack_completed = 0;
4038#endif
4039
4040  /* Clear out the insn_length contents now that they are no
4041     longer valid.  */
4042  init_insn_lengths ();
4043
4044  /* Show no temporary slots allocated.  */
4045  init_temp_slots ();
4046
4047  free_basic_block_vars ();
4048  free_bb_for_insn ();
4049
4050
4051  if (targetm.binds_local_p (current_function_decl))
4052    {
4053      int pref = cfun->preferred_stack_boundary;
4054      if (cfun->stack_alignment_needed > cfun->preferred_stack_boundary)
4055        pref = cfun->stack_alignment_needed;
4056      cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
4057        = pref;
4058    }
4059
4060  /* Make sure volatile mem refs aren't considered valid operands for
4061     arithmetic insns.  We must call this here if this is a nested inline
4062     function, since the above code leaves us in the init_recog state,
4063     and the function context push/pop code does not save/restore volatile_ok.
4064
4065     ??? Maybe it isn't necessary for expand_start_function to call this
4066     anymore if we do it here?  */
4067
4068  init_recog_no_volatile ();
4069
4070  /* We're done with this function.  Free up memory if we can.  */
4071  free_after_parsing (cfun);
4072  free_after_compilation (cfun);
4073  return 0;
4074}
4075
4076struct tree_opt_pass pass_clean_state =
4077{
4078  NULL,                                 /* name */
4079  NULL,                                 /* gate */
4080  rest_of_clean_state,                  /* execute */
4081  NULL,                                 /* sub */
4082  NULL,                                 /* next */
4083  0,                                    /* static_pass_number */
4084  TV_FINAL,                             /* tv_id */
4085  0,                                    /* properties_required */
4086  0,                                    /* properties_provided */
4087  PROP_rtl,                             /* properties_destroyed */
4088  0,                                    /* todo_flags_start */
4089  0,                                    /* todo_flags_finish */
4090  0                                     /* letter */
4091};
4092