1/* Convert RTL to assembler code and output it, for GNU compiler.
2   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
4   Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to the Free
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA.  */
22
23/* This is the final pass of the compiler.
24   It looks at the rtl code for a function and outputs assembler code.
25
26   Call `final_start_function' to output the assembler code for function entry,
27   `final' to output assembler code for some RTL code,
28   `final_end_function' to output assembler code for function exit.
29   If a function is compiled in several pieces, each piece is
30   output separately with `final'.
31
32   Some optimizations are also done at this level.
33   Move instructions that were made unnecessary by good register allocation
34   are detected and omitted from the output.  (Though most of these
35   are removed by the last jump pass.)
36
37   Instructions to set the condition codes are omitted when it can be
38   seen that the condition codes already had the desired values.
39
40   In some cases it is sufficient if the inherited condition codes
41   have related values, but this may require the following insn
42   (the one that tests the condition codes) to be modified.
43
44   The code for the function prologue and epilogue are generated
45   directly in assembler by the target functions function_prologue and
46   function_epilogue.  Those instructions never exist as rtl.  */
47
48#include "config.h"
49#include "system.h"
50#include "coretypes.h"
51#include "tm.h"
52
53#include "tree.h"
54#include "rtl.h"
55#include "tm_p.h"
56#include "regs.h"
57#include "insn-config.h"
58#include "insn-attr.h"
59#include "recog.h"
60#include "conditions.h"
61#include "flags.h"
62#include "real.h"
63#include "hard-reg-set.h"
64#include "output.h"
65#include "except.h"
66#include "function.h"
67#include "toplev.h"
68#include "reload.h"
69#include "intl.h"
70#include "basic-block.h"
71#include "target.h"
72#include "debug.h"
73#include "expr.h"
74#include "cfglayout.h"
75#include "tree-pass.h"
76#include "timevar.h"
77#include "cgraph.h"
78#include "coverage.h"
79
80#ifdef XCOFF_DEBUGGING_INFO
81#include "xcoffout.h"		/* Needed for external data
82				   declarations for e.g. AIX 4.x.  */
83#endif
84
85#if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
86#include "dwarf2out.h"
87#endif
88
89#ifdef DBX_DEBUGGING_INFO
90#include "dbxout.h"
91#endif
92
93#ifdef SDB_DEBUGGING_INFO
94#include "sdbout.h"
95#endif
96
97/* If we aren't using cc0, CC_STATUS_INIT shouldn't exist.  So define a
98   null default for it to save conditionalization later.  */
99#ifndef CC_STATUS_INIT
100#define CC_STATUS_INIT
101#endif
102
103/* How to start an assembler comment.  */
104#ifndef ASM_COMMENT_START
105#define ASM_COMMENT_START ";#"
106#endif
107
108/* Is the given character a logical line separator for the assembler?  */
109#ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
110#define IS_ASM_LOGICAL_LINE_SEPARATOR(C) ((C) == ';')
111#endif
112
113#ifndef JUMP_TABLES_IN_TEXT_SECTION
114#define JUMP_TABLES_IN_TEXT_SECTION 0
115#endif
116
117/* Bitflags used by final_scan_insn.  */
118#define SEEN_BB		1
119#define SEEN_NOTE	2
120#define SEEN_EMITTED	4
121
122/* Last insn processed by final_scan_insn.  */
123static rtx debug_insn;
124rtx current_output_insn;
125
126/* Line number of last NOTE.  */
127static int last_linenum;
128
129/* Highest line number in current block.  */
130static int high_block_linenum;
131
132/* Likewise for function.  */
133static int high_function_linenum;
134
135/* Filename of last NOTE.  */
136static const char *last_filename;
137
138/* Whether to force emission of a line note before the next insn.  */
139static bool force_source_line = false;
140
141extern const int length_unit_log; /* This is defined in insn-attrtab.c.  */
142
143/* Nonzero while outputting an `asm' with operands.
144   This means that inconsistencies are the user's fault, so don't die.
145   The precise value is the insn being output, to pass to error_for_asm.  */
146rtx this_is_asm_operands;
147
148/* Number of operands of this insn, for an `asm' with operands.  */
149static unsigned int insn_noperands;
150
151/* Compare optimization flag.  */
152
153static rtx last_ignored_compare = 0;
154
155/* Assign a unique number to each insn that is output.
156   This can be used to generate unique local labels.  */
157
158static int insn_counter = 0;
159
160#ifdef HAVE_cc0
161/* This variable contains machine-dependent flags (defined in tm.h)
162   set and examined by output routines
163   that describe how to interpret the condition codes properly.  */
164
165CC_STATUS cc_status;
166
167/* During output of an insn, this contains a copy of cc_status
168   from before the insn.  */
169
170CC_STATUS cc_prev_status;
171#endif
172
173/* Indexed by hardware reg number, is 1 if that register is ever
174   used in the current function.
175
176   In life_analysis, or in stupid_life_analysis, this is set
177   up to record the hard regs used explicitly.  Reload adds
178   in the hard regs used for holding pseudo regs.  Final uses
179   it to generate the code in the function prologue and epilogue
180   to save and restore registers as needed.  */
181
182char regs_ever_live[FIRST_PSEUDO_REGISTER];
183
184/* Like regs_ever_live, but 1 if a reg is set or clobbered from an asm.
185   Unlike regs_ever_live, elements of this array corresponding to
186   eliminable regs like the frame pointer are set if an asm sets them.  */
187
188char regs_asm_clobbered[FIRST_PSEUDO_REGISTER];
189
190/* Nonzero means current function must be given a frame pointer.
191   Initialized in function.c to 0.  Set only in reload1.c as per
192   the needs of the function.  */
193
194int frame_pointer_needed;
195
196/* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen.  */
197
198static int block_depth;
199
200/* Nonzero if have enabled APP processing of our assembler output.  */
201
202static int app_on;
203
204/* If we are outputting an insn sequence, this contains the sequence rtx.
205   Zero otherwise.  */
206
207rtx final_sequence;
208
209#ifdef ASSEMBLER_DIALECT
210
211/* Number of the assembler dialect to use, starting at 0.  */
212static int dialect_number;
213#endif
214
215#ifdef HAVE_conditional_execution
216/* Nonnull if the insn currently being emitted was a COND_EXEC pattern.  */
217rtx current_insn_predicate;
218#endif
219
220#ifdef HAVE_ATTR_length
221static int asm_insn_count (rtx);
222#endif
223static void profile_function (FILE *);
224static void profile_after_prologue (FILE *);
225static bool notice_source_line (rtx);
226static rtx walk_alter_subreg (rtx *);
227static void output_asm_name (void);
228static void output_alternate_entry_point (FILE *, rtx);
229static tree get_mem_expr_from_op (rtx, int *);
230static void output_asm_operand_names (rtx *, int *, int);
231static void output_operand (rtx, int);
232#ifdef LEAF_REGISTERS
233static void leaf_renumber_regs (rtx);
234#endif
235#ifdef HAVE_cc0
236static int alter_cond (rtx);
237#endif
238#ifndef ADDR_VEC_ALIGN
239static int final_addr_vec_align (rtx);
240#endif
241#ifdef HAVE_ATTR_length
242static int align_fuzz (rtx, rtx, int, unsigned);
243#endif
244
245/* Initialize data in final at the beginning of a compilation.  */
246
247void
248init_final (const char *filename ATTRIBUTE_UNUSED)
249{
250  app_on = 0;
251  final_sequence = 0;
252
253#ifdef ASSEMBLER_DIALECT
254  dialect_number = ASSEMBLER_DIALECT;
255#endif
256}
257
258/* Default target function prologue and epilogue assembler output.
259
260   If not overridden for epilogue code, then the function body itself
261   contains return instructions wherever needed.  */
262void
263default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
264			       HOST_WIDE_INT size ATTRIBUTE_UNUSED)
265{
266}
267
268/* Default target hook that outputs nothing to a stream.  */
269void
270no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
271{
272}
273
274/* Enable APP processing of subsequent output.
275   Used before the output from an `asm' statement.  */
276
277void
278app_enable (void)
279{
280  if (! app_on)
281    {
282      fputs (ASM_APP_ON, asm_out_file);
283      app_on = 1;
284    }
285}
286
287/* Disable APP processing of subsequent output.
288   Called from varasm.c before most kinds of output.  */
289
290void
291app_disable (void)
292{
293  if (app_on)
294    {
295      fputs (ASM_APP_OFF, asm_out_file);
296      app_on = 0;
297    }
298}
299
300/* Return the number of slots filled in the current
301   delayed branch sequence (we don't count the insn needing the
302   delay slot).   Zero if not in a delayed branch sequence.  */
303
304#ifdef DELAY_SLOTS
305int
306dbr_sequence_length (void)
307{
308  if (final_sequence != 0)
309    return XVECLEN (final_sequence, 0) - 1;
310  else
311    return 0;
312}
313#endif
314
315/* The next two pages contain routines used to compute the length of an insn
316   and to shorten branches.  */
317
318/* Arrays for insn lengths, and addresses.  The latter is referenced by
319   `insn_current_length'.  */
320
321static int *insn_lengths;
322
323varray_type insn_addresses_;
324
325/* Max uid for which the above arrays are valid.  */
326static int insn_lengths_max_uid;
327
328/* Address of insn being processed.  Used by `insn_current_length'.  */
329int insn_current_address;
330
331/* Address of insn being processed in previous iteration.  */
332int insn_last_address;
333
334/* known invariant alignment of insn being processed.  */
335int insn_current_align;
336
337/* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
338   gives the next following alignment insn that increases the known
339   alignment, or NULL_RTX if there is no such insn.
340   For any alignment obtained this way, we can again index uid_align with
341   its uid to obtain the next following align that in turn increases the
342   alignment, till we reach NULL_RTX; the sequence obtained this way
343   for each insn we'll call the alignment chain of this insn in the following
344   comments.  */
345
346/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
347/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
348static rtx *uid_align;
349static int *uid_shuid;
350/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
351
352/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
353
354/* Indicate that branch shortening hasn't yet been done.  */
355
356void
357init_insn_lengths (void)
358{
359  if (uid_shuid)
360    {
361      free (uid_shuid);
362      uid_shuid = 0;
363    }
364  if (insn_lengths)
365    {
366      free (insn_lengths);
367      insn_lengths = 0;
368      insn_lengths_max_uid = 0;
369    }
370#ifdef HAVE_ATTR_length
371  INSN_ADDRESSES_FREE ();
372#endif
373  if (uid_align)
374    {
375      free (uid_align);
376      uid_align = 0;
377    }
378}
379
380/* Obtain the current length of an insn.  If branch shortening has been done,
381   get its actual length.  Otherwise, use FALLBACK_FN to calculate the
382   length.  */
383static inline int
384get_attr_length_1 (rtx insn ATTRIBUTE_UNUSED,
385		   int (*fallback_fn) (rtx) ATTRIBUTE_UNUSED)
386{
387#ifdef HAVE_ATTR_length
388  rtx body;
389  int i;
390  int length = 0;
391
392  if (insn_lengths_max_uid > INSN_UID (insn))
393    return insn_lengths[INSN_UID (insn)];
394  else
395    switch (GET_CODE (insn))
396      {
397      case NOTE:
398      case BARRIER:
399      case CODE_LABEL:
400	return 0;
401
402      case CALL_INSN:
403	length = fallback_fn (insn);
404	break;
405
406      case JUMP_INSN:
407	body = PATTERN (insn);
408	if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
409	  {
410	    /* Alignment is machine-dependent and should be handled by
411	       ADDR_VEC_ALIGN.  */
412	  }
413	else
414	  length = fallback_fn (insn);
415	break;
416
417      case INSN:
418	body = PATTERN (insn);
419	if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
420	  return 0;
421
422	else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
423	  length = asm_insn_count (body) * fallback_fn (insn);
424	else if (GET_CODE (body) == SEQUENCE)
425	  for (i = 0; i < XVECLEN (body, 0); i++)
426	    length += get_attr_length (XVECEXP (body, 0, i));
427	else
428	  length = fallback_fn (insn);
429	break;
430
431      default:
432	break;
433      }
434
435#ifdef ADJUST_INSN_LENGTH
436  ADJUST_INSN_LENGTH (insn, length);
437#endif
438  return length;
439#else /* not HAVE_ATTR_length */
440  return 0;
441#define insn_default_length 0
442#define insn_min_length 0
443#endif /* not HAVE_ATTR_length */
444}
445
446/* Obtain the current length of an insn.  If branch shortening has been done,
447   get its actual length.  Otherwise, get its maximum length.  */
448int
449get_attr_length (rtx insn)
450{
451  return get_attr_length_1 (insn, insn_default_length);
452}
453
454/* Obtain the current length of an insn.  If branch shortening has been done,
455   get its actual length.  Otherwise, get its minimum length.  */
456int
457get_attr_min_length (rtx insn)
458{
459  return get_attr_length_1 (insn, insn_min_length);
460}
461
462/* Code to handle alignment inside shorten_branches.  */
463
464/* Here is an explanation how the algorithm in align_fuzz can give
465   proper results:
466
467   Call a sequence of instructions beginning with alignment point X
468   and continuing until the next alignment point `block X'.  When `X'
469   is used in an expression, it means the alignment value of the
470   alignment point.
471
472   Call the distance between the start of the first insn of block X, and
473   the end of the last insn of block X `IX', for the `inner size of X'.
474   This is clearly the sum of the instruction lengths.
475
476   Likewise with the next alignment-delimited block following X, which we
477   shall call block Y.
478
479   Call the distance between the start of the first insn of block X, and
480   the start of the first insn of block Y `OX', for the `outer size of X'.
481
482   The estimated padding is then OX - IX.
483
484   OX can be safely estimated as
485
486           if (X >= Y)
487                   OX = round_up(IX, Y)
488           else
489                   OX = round_up(IX, X) + Y - X
490
491   Clearly est(IX) >= real(IX), because that only depends on the
492   instruction lengths, and those being overestimated is a given.
493
494   Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
495   we needn't worry about that when thinking about OX.
496
497   When X >= Y, the alignment provided by Y adds no uncertainty factor
498   for branch ranges starting before X, so we can just round what we have.
499   But when X < Y, we don't know anything about the, so to speak,
500   `middle bits', so we have to assume the worst when aligning up from an
501   address mod X to one mod Y, which is Y - X.  */
502
503#ifndef LABEL_ALIGN
504#define LABEL_ALIGN(LABEL) align_labels_log
505#endif
506
507#ifndef LABEL_ALIGN_MAX_SKIP
508#define LABEL_ALIGN_MAX_SKIP align_labels_max_skip
509#endif
510
511#ifndef LOOP_ALIGN
512#define LOOP_ALIGN(LABEL) align_loops_log
513#endif
514
515#ifndef LOOP_ALIGN_MAX_SKIP
516#define LOOP_ALIGN_MAX_SKIP align_loops_max_skip
517#endif
518
519#ifndef LABEL_ALIGN_AFTER_BARRIER
520#define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
521#endif
522
523#ifndef LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
524#define LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP 0
525#endif
526
527#ifndef JUMP_ALIGN
528#define JUMP_ALIGN(LABEL) align_jumps_log
529#endif
530
531#ifndef JUMP_ALIGN_MAX_SKIP
532#define JUMP_ALIGN_MAX_SKIP align_jumps_max_skip
533#endif
534
535#ifndef ADDR_VEC_ALIGN
536static int
537final_addr_vec_align (rtx addr_vec)
538{
539  int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
540
541  if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
542    align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
543  return exact_log2 (align);
544
545}
546
547#define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
548#endif
549
550#ifndef INSN_LENGTH_ALIGNMENT
551#define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
552#endif
553
554#define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
555
556/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
557/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
558/* For the benefit of port specific code do this also as a function.  */
559
560int
561label_to_alignment (rtx label)
562{
563/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
564  return LABEL_ALIGN_LOG (label);
565/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
566}
567
568#ifdef HAVE_ATTR_length
569/* The differences in addresses
570   between a branch and its target might grow or shrink depending on
571   the alignment the start insn of the range (the branch for a forward
572   branch or the label for a backward branch) starts out on; if these
573   differences are used naively, they can even oscillate infinitely.
574   We therefore want to compute a 'worst case' address difference that
575   is independent of the alignment the start insn of the range end
576   up on, and that is at least as large as the actual difference.
577   The function align_fuzz calculates the amount we have to add to the
578   naively computed difference, by traversing the part of the alignment
579   chain of the start insn of the range that is in front of the end insn
580   of the range, and considering for each alignment the maximum amount
581   that it might contribute to a size increase.
582
583   For casesi tables, we also want to know worst case minimum amounts of
584   address difference, in case a machine description wants to introduce
585   some common offset that is added to all offsets in a table.
586   For this purpose, align_fuzz with a growth argument of 0 computes the
587   appropriate adjustment.  */
588
589/* Compute the maximum delta by which the difference of the addresses of
590   START and END might grow / shrink due to a different address for start
591   which changes the size of alignment insns between START and END.
592   KNOWN_ALIGN_LOG is the alignment known for START.
593   GROWTH should be ~0 if the objective is to compute potential code size
594   increase, and 0 if the objective is to compute potential shrink.
595   The return value is undefined for any other value of GROWTH.  */
596
597static int
598align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
599{
600  int uid = INSN_UID (start);
601  rtx align_label;
602  int known_align = 1 << known_align_log;
603  int end_shuid = INSN_SHUID (end);
604  int fuzz = 0;
605
606  for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
607    {
608      int align_addr, new_align;
609
610      uid = INSN_UID (align_label);
611      align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
612      if (uid_shuid[uid] > end_shuid)
613	break;
614/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
615      known_align_log = LABEL_ALIGN_LOG (align_label);
616/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
617      new_align = 1 << known_align_log;
618      if (new_align < known_align)
619	continue;
620      fuzz += (-align_addr ^ growth) & (new_align - known_align);
621      known_align = new_align;
622    }
623  return fuzz;
624}
625
626/* Compute a worst-case reference address of a branch so that it
627   can be safely used in the presence of aligned labels.  Since the
628   size of the branch itself is unknown, the size of the branch is
629   not included in the range.  I.e. for a forward branch, the reference
630   address is the end address of the branch as known from the previous
631   branch shortening pass, minus a value to account for possible size
632   increase due to alignment.  For a backward branch, it is the start
633   address of the branch as known from the current pass, plus a value
634   to account for possible size increase due to alignment.
635   NB.: Therefore, the maximum offset allowed for backward branches needs
636   to exclude the branch size.  */
637
638int
639insn_current_reference_address (rtx branch)
640{
641  rtx dest, seq;
642  int seq_uid;
643
644  if (! INSN_ADDRESSES_SET_P ())
645    return 0;
646
647  seq = NEXT_INSN (PREV_INSN (branch));
648  seq_uid = INSN_UID (seq);
649  if (!JUMP_P (branch))
650    /* This can happen for example on the PA; the objective is to know the
651       offset to address something in front of the start of the function.
652       Thus, we can treat it like a backward branch.
653       We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
654       any alignment we'd encounter, so we skip the call to align_fuzz.  */
655    return insn_current_address;
656  dest = JUMP_LABEL (branch);
657
658  /* BRANCH has no proper alignment chain set, so use SEQ.
659     BRANCH also has no INSN_SHUID.  */
660  if (INSN_SHUID (seq) < INSN_SHUID (dest))
661    {
662      /* Forward branch.  */
663      return (insn_last_address + insn_lengths[seq_uid]
664	      - align_fuzz (seq, dest, length_unit_log, ~0));
665    }
666  else
667    {
668      /* Backward branch.  */
669      return (insn_current_address
670	      + align_fuzz (dest, seq, length_unit_log, ~0));
671    }
672}
673#endif /* HAVE_ATTR_length */
674
675/* Compute branch alignments based on frequency information in the
676   CFG.  */
677
678static unsigned int
679compute_alignments (void)
680{
681/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
682/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
683  basic_block bb;
684
685/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
686/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
687
688  /* If not optimizing or optimizing for size, don't assign any alignments.  */
689  if (! optimize || optimize_size)
690    return 0;
691
692  FOR_EACH_BB (bb)
693    {
694      rtx label = BB_HEAD (bb);
695      int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
696      edge e;
697      edge_iterator ei;
698/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
699      int log, max_skip, max_log;
700
701/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
702      if (!LABEL_P (label)
703	  || probably_never_executed_bb_p (bb))
704	continue;
705/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
706      /* If user has specified an alignment, honour it.  */
707      if (LABEL_ALIGN_LOG (label) > 0)
708	continue;
709
710/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
711      max_log = LABEL_ALIGN (label);
712      max_skip = LABEL_ALIGN_MAX_SKIP;
713
714      FOR_EACH_EDGE (e, ei, bb->preds)
715	{
716	  if (e->flags & EDGE_FALLTHRU)
717	    has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
718	  else
719	    branch_frequency += EDGE_FREQUENCY (e);
720	}
721
722      /* There are two purposes to align block with no fallthru incoming edge:
723	 1) to avoid fetch stalls when branch destination is near cache boundary
724	 2) to improve cache efficiency in case the previous block is not executed
725	    (so it does not need to be in the cache).
726
727	 We to catch first case, we align frequently executed blocks.
728	 To catch the second, we align blocks that are executed more frequently
729	 than the predecessor and the predecessor is likely to not be executed
730	 when function is called.  */
731
732      if (!has_fallthru
733	  && (branch_frequency > BB_FREQ_MAX / 10
734	      || (bb->frequency > bb->prev_bb->frequency * 10
735		  && (bb->prev_bb->frequency
736		      <= ENTRY_BLOCK_PTR->frequency / 2))))
737	{
738	  log = JUMP_ALIGN (label);
739	  if (max_log < log)
740	    {
741	      max_log = log;
742	      max_skip = JUMP_ALIGN_MAX_SKIP;
743	    }
744	}
745      /* In case block is frequent and reached mostly by non-fallthru edge,
746	 align it.  It is most likely a first block of loop.  */
747      if (has_fallthru
748	  && maybe_hot_bb_p (bb)
749	  && branch_frequency + fallthru_frequency > BB_FREQ_MAX / 10
750	  && branch_frequency > fallthru_frequency * 2)
751	{
752	  log = LOOP_ALIGN (label);
753	  if (max_log < log)
754	    {
755	      max_log = log;
756	      max_skip = LOOP_ALIGN_MAX_SKIP;
757	    }
758	}
759/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
760      SET_LABEL_ALIGN (label, max_log, max_skip);
761/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
762    }
763  return 0;
764}
765
766struct tree_opt_pass pass_compute_alignments =
767{
768  NULL,                                 /* name */
769  NULL,                                 /* gate */
770  compute_alignments,                   /* execute */
771  NULL,                                 /* sub */
772  NULL,                                 /* next */
773  0,                                    /* static_pass_number */
774  0,                                    /* tv_id */
775  0,                                    /* properties_required */
776  0,                                    /* properties_provided */
777  0,                                    /* properties_destroyed */
778  0,                                    /* todo_flags_start */
779  0,                                    /* todo_flags_finish */
780  0                                     /* letter */
781};
782
783
784/* Make a pass over all insns and compute their actual lengths by shortening
785   any branches of variable length if possible.  */
786
787/* shorten_branches might be called multiple times:  for example, the SH
788   port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
789   In order to do this, it needs proper length information, which it obtains
790   by calling shorten_branches.  This cannot be collapsed with
791   shorten_branches itself into a single pass unless we also want to integrate
792   reorg.c, since the branch splitting exposes new instructions with delay
793   slots.  */
794
795void
796shorten_branches (rtx first ATTRIBUTE_UNUSED)
797{
798  rtx insn;
799  int max_uid;
800  int i;
801  int max_log;
802  int max_skip;
803#ifdef HAVE_ATTR_length
804#define MAX_CODE_ALIGN 16
805  rtx seq;
806  int something_changed = 1;
807  char *varying_length;
808  rtx body;
809  int uid;
810  rtx align_tab[MAX_CODE_ALIGN];
811
812#endif
813
814/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
815  /* Compute maximum UID and allocate uid_shuid.  */
816/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
817  max_uid = get_max_uid ();
818
819  /* Free uid_shuid before reallocating it.  */
820  free (uid_shuid);
821
822  uid_shuid = XNEWVEC (int, max_uid);
823
824  /* APPLE LOCAL for-fsf-4_4 3274130 5295549 */ \
825  /* Initialize set up uid_shuid to be strictly
826     monotonically rising with insn order.  */
827  /* We use max_log here to keep track of the maximum alignment we want to
828     impose on the next CODE_LABEL (or the current one if we are processing
829     the CODE_LABEL itself).  */
830
831  max_log = 0;
832  max_skip = 0;
833
834  for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
835    {
836      int log;
837
838      INSN_SHUID (insn) = i++;
839      if (INSN_P (insn))
840	continue;
841
842      if (LABEL_P (insn))
843	{
844	  rtx next;
845
846	  /* Merge in alignments computed by compute_alignments.  */
847/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
848	  log = LABEL_ALIGN_LOG (insn);
849/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
850	  if (max_log < log)
851	    {
852	      max_log = log;
853/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
854	      max_skip = LABEL_MAX_SKIP (insn);
855/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
856	    }
857
858	  log = LABEL_ALIGN (insn);
859	  if (max_log < log)
860	    {
861	      max_log = log;
862	      max_skip = LABEL_ALIGN_MAX_SKIP;
863	    }
864	  next = next_nonnote_insn (insn);
865	  /* ADDR_VECs only take room if read-only data goes into the text
866	     section.  */
867	  if (JUMP_TABLES_IN_TEXT_SECTION
868	      || readonly_data_section == text_section)
869	    if (next && JUMP_P (next))
870	      {
871		rtx nextbody = PATTERN (next);
872		if (GET_CODE (nextbody) == ADDR_VEC
873		    || GET_CODE (nextbody) == ADDR_DIFF_VEC)
874		  {
875		    log = ADDR_VEC_ALIGN (next);
876		    if (max_log < log)
877		      {
878			max_log = log;
879			max_skip = LABEL_ALIGN_MAX_SKIP;
880		      }
881		  }
882	      }
883/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
884	  SET_LABEL_ALIGN (insn, max_log, max_skip);
885/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
886	  max_log = 0;
887	  max_skip = 0;
888	}
889      else if (BARRIER_P (insn))
890	{
891	  rtx label;
892
893	  for (label = insn; label && ! INSN_P (label);
894	       label = NEXT_INSN (label))
895	    if (LABEL_P (label))
896	      {
897		log = LABEL_ALIGN_AFTER_BARRIER (insn);
898		if (max_log < log)
899		  {
900		    max_log = log;
901		    max_skip = LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP;
902		  }
903		break;
904	      }
905	}
906    }
907#ifdef HAVE_ATTR_length
908
909  /* Allocate the rest of the arrays.  */
910  insn_lengths = XNEWVEC (int, max_uid);
911  insn_lengths_max_uid = max_uid;
912  /* Syntax errors can lead to labels being outside of the main insn stream.
913     Initialize insn_addresses, so that we get reproducible results.  */
914  INSN_ADDRESSES_ALLOC (max_uid);
915
916  varying_length = XCNEWVEC (char, max_uid);
917
918  /* Initialize uid_align.  We scan instructions
919     from end to start, and keep in align_tab[n] the last seen insn
920     that does an alignment of at least n+1, i.e. the successor
921     in the alignment chain for an insn that does / has a known
922     alignment of n.  */
923  uid_align = XCNEWVEC (rtx, max_uid);
924
925  for (i = MAX_CODE_ALIGN; --i >= 0;)
926    align_tab[i] = NULL_RTX;
927  seq = get_last_insn ();
928  for (; seq; seq = PREV_INSN (seq))
929    {
930      int uid = INSN_UID (seq);
931      int log;
932/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
933      log = (LABEL_P (seq) ? LABEL_ALIGN_LOG (seq) : 0);
934/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
935      uid_align[uid] = align_tab[0];
936      if (log)
937	{
938	  /* Found an alignment label.  */
939	  uid_align[uid] = align_tab[log];
940	  for (i = log - 1; i >= 0; i--)
941	    align_tab[i] = seq;
942	}
943    }
944#ifdef CASE_VECTOR_SHORTEN_MODE
945  if (optimize)
946    {
947      /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
948         label fields.  */
949
950      int min_shuid = INSN_SHUID (get_insns ()) - 1;
951      int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
952      int rel;
953
954      for (insn = first; insn != 0; insn = NEXT_INSN (insn))
955	{
956	  rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
957	  int len, i, min, max, insn_shuid;
958	  int min_align;
959	  addr_diff_vec_flags flags;
960
961	  if (!JUMP_P (insn)
962	      || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
963	    continue;
964	  pat = PATTERN (insn);
965	  len = XVECLEN (pat, 1);
966	  gcc_assert (len > 0);
967	  min_align = MAX_CODE_ALIGN;
968	  for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
969	    {
970	      rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
971	      int shuid = INSN_SHUID (lab);
972	      if (shuid < min)
973		{
974		  min = shuid;
975		  min_lab = lab;
976		}
977	      if (shuid > max)
978		{
979		  max = shuid;
980		  max_lab = lab;
981		}
982/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
983	      if (min_align > (int) LABEL_ALIGN_LOG (lab))
984		min_align = LABEL_ALIGN_LOG (lab);
985/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
986	    }
987	  XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
988	  XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
989	  insn_shuid = INSN_SHUID (insn);
990	  rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
991	  memset (&flags, 0, sizeof (flags));
992	  flags.min_align = min_align;
993	  flags.base_after_vec = rel > insn_shuid;
994	  flags.min_after_vec  = min > insn_shuid;
995	  flags.max_after_vec  = max > insn_shuid;
996	  flags.min_after_base = min > rel;
997	  flags.max_after_base = max > rel;
998	  ADDR_DIFF_VEC_FLAGS (pat) = flags;
999	}
1000    }
1001#endif /* CASE_VECTOR_SHORTEN_MODE */
1002
1003  /* Compute initial lengths, addresses, and varying flags for each insn.  */
1004  for (insn_current_address = 0, insn = first;
1005       insn != 0;
1006       insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1007    {
1008      uid = INSN_UID (insn);
1009
1010      insn_lengths[uid] = 0;
1011
1012      if (LABEL_P (insn))
1013	{
1014/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
1015	  int log = LABEL_ALIGN_LOG (insn);
1016/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
1017	  if (log)
1018	    {
1019	      int align = 1 << log;
1020	      int new_address = (insn_current_address + align - 1) & -align;
1021	      insn_lengths[uid] = new_address - insn_current_address;
1022	    }
1023	}
1024
1025      INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1026
1027      if (NOTE_P (insn) || BARRIER_P (insn)
1028	  || LABEL_P (insn))
1029	continue;
1030      if (INSN_DELETED_P (insn))
1031	continue;
1032
1033      body = PATTERN (insn);
1034      if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1035	{
1036	  /* This only takes room if read-only data goes into the text
1037	     section.  */
1038	  if (JUMP_TABLES_IN_TEXT_SECTION
1039	      || readonly_data_section == text_section)
1040	    insn_lengths[uid] = (XVECLEN (body,
1041					  GET_CODE (body) == ADDR_DIFF_VEC)
1042				 * GET_MODE_SIZE (GET_MODE (body)));
1043	  /* Alignment is handled by ADDR_VEC_ALIGN.  */
1044	}
1045      else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1046	insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1047      else if (GET_CODE (body) == SEQUENCE)
1048	{
1049	  int i;
1050	  int const_delay_slots;
1051#ifdef DELAY_SLOTS
1052	  const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1053#else
1054	  const_delay_slots = 0;
1055#endif
1056	  /* Inside a delay slot sequence, we do not do any branch shortening
1057	     if the shortening could change the number of delay slots
1058	     of the branch.  */
1059	  for (i = 0; i < XVECLEN (body, 0); i++)
1060	    {
1061	      rtx inner_insn = XVECEXP (body, 0, i);
1062	      int inner_uid = INSN_UID (inner_insn);
1063	      int inner_length;
1064
1065	      if (GET_CODE (body) == ASM_INPUT
1066		  || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1067		inner_length = (asm_insn_count (PATTERN (inner_insn))
1068				* insn_default_length (inner_insn));
1069	      else
1070		inner_length = insn_default_length (inner_insn);
1071
1072	      insn_lengths[inner_uid] = inner_length;
1073	      if (const_delay_slots)
1074		{
1075		  if ((varying_length[inner_uid]
1076		       = insn_variable_length_p (inner_insn)) != 0)
1077		    varying_length[uid] = 1;
1078		  INSN_ADDRESSES (inner_uid) = (insn_current_address
1079						+ insn_lengths[uid]);
1080		}
1081	      else
1082		varying_length[inner_uid] = 0;
1083	      insn_lengths[uid] += inner_length;
1084	    }
1085	}
1086      else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1087	{
1088	  insn_lengths[uid] = insn_default_length (insn);
1089	  varying_length[uid] = insn_variable_length_p (insn);
1090	}
1091
1092      /* If needed, do any adjustment.  */
1093#ifdef ADJUST_INSN_LENGTH
1094      ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1095      if (insn_lengths[uid] < 0)
1096	fatal_insn ("negative insn length", insn);
1097#endif
1098    }
1099
1100  /* Now loop over all the insns finding varying length insns.  For each,
1101     get the current insn length.  If it has changed, reflect the change.
1102     When nothing changes for a full pass, we are done.  */
1103
1104  while (something_changed)
1105    {
1106      something_changed = 0;
1107      insn_current_align = MAX_CODE_ALIGN - 1;
1108      for (insn_current_address = 0, insn = first;
1109	   insn != 0;
1110	   insn = NEXT_INSN (insn))
1111	{
1112	  int new_length;
1113#ifdef ADJUST_INSN_LENGTH
1114	  int tmp_length;
1115#endif
1116	  int length_align;
1117
1118	  uid = INSN_UID (insn);
1119
1120	  if (LABEL_P (insn))
1121	    {
1122/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
1123	      int log = LABEL_ALIGN_LOG (insn);
1124/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
1125	      if (log > insn_current_align)
1126		{
1127		  int align = 1 << log;
1128		  int new_address= (insn_current_address + align - 1) & -align;
1129		  insn_lengths[uid] = new_address - insn_current_address;
1130		  insn_current_align = log;
1131		  insn_current_address = new_address;
1132		}
1133	      else
1134		insn_lengths[uid] = 0;
1135	      INSN_ADDRESSES (uid) = insn_current_address;
1136	      continue;
1137	    }
1138
1139	  length_align = INSN_LENGTH_ALIGNMENT (insn);
1140	  if (length_align < insn_current_align)
1141	    insn_current_align = length_align;
1142
1143	  insn_last_address = INSN_ADDRESSES (uid);
1144	  INSN_ADDRESSES (uid) = insn_current_address;
1145
1146#ifdef CASE_VECTOR_SHORTEN_MODE
1147	  if (optimize && JUMP_P (insn)
1148	      && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1149	    {
1150	      rtx body = PATTERN (insn);
1151	      int old_length = insn_lengths[uid];
1152	      rtx rel_lab = XEXP (XEXP (body, 0), 0);
1153	      rtx min_lab = XEXP (XEXP (body, 2), 0);
1154	      rtx max_lab = XEXP (XEXP (body, 3), 0);
1155	      int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1156	      int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1157	      int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1158	      rtx prev;
1159	      int rel_align = 0;
1160	      addr_diff_vec_flags flags;
1161
1162	      /* Avoid automatic aggregate initialization.  */
1163	      flags = ADDR_DIFF_VEC_FLAGS (body);
1164
1165	      /* Try to find a known alignment for rel_lab.  */
1166	      for (prev = rel_lab;
1167		   prev
1168		   && ! insn_lengths[INSN_UID (prev)]
1169		   && ! (varying_length[INSN_UID (prev)] & 1);
1170		   prev = PREV_INSN (prev))
1171		if (varying_length[INSN_UID (prev)] & 2)
1172		  {
1173/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
1174		    rel_align = LABEL_ALIGN_LOG (prev);
1175/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
1176		    break;
1177		  }
1178
1179	      /* See the comment on addr_diff_vec_flags in rtl.h for the
1180		 meaning of the flags values.  base: REL_LAB   vec: INSN  */
1181	      /* Anything after INSN has still addresses from the last
1182		 pass; adjust these so that they reflect our current
1183		 estimate for this pass.  */
1184	      if (flags.base_after_vec)
1185		rel_addr += insn_current_address - insn_last_address;
1186	      if (flags.min_after_vec)
1187		min_addr += insn_current_address - insn_last_address;
1188	      if (flags.max_after_vec)
1189		max_addr += insn_current_address - insn_last_address;
1190	      /* We want to know the worst case, i.e. lowest possible value
1191		 for the offset of MIN_LAB.  If MIN_LAB is after REL_LAB,
1192		 its offset is positive, and we have to be wary of code shrink;
1193		 otherwise, it is negative, and we have to be vary of code
1194		 size increase.  */
1195	      if (flags.min_after_base)
1196		{
1197		  /* If INSN is between REL_LAB and MIN_LAB, the size
1198		     changes we are about to make can change the alignment
1199		     within the observed offset, therefore we have to break
1200		     it up into two parts that are independent.  */
1201		  if (! flags.base_after_vec && flags.min_after_vec)
1202		    {
1203		      min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1204		      min_addr -= align_fuzz (insn, min_lab, 0, 0);
1205		    }
1206		  else
1207		    min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1208		}
1209	      else
1210		{
1211		  if (flags.base_after_vec && ! flags.min_after_vec)
1212		    {
1213		      min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1214		      min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1215		    }
1216		  else
1217		    min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1218		}
1219	      /* Likewise, determine the highest lowest possible value
1220		 for the offset of MAX_LAB.  */
1221	      if (flags.max_after_base)
1222		{
1223		  if (! flags.base_after_vec && flags.max_after_vec)
1224		    {
1225		      max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1226		      max_addr += align_fuzz (insn, max_lab, 0, ~0);
1227		    }
1228		  else
1229		    max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1230		}
1231	      else
1232		{
1233		  if (flags.base_after_vec && ! flags.max_after_vec)
1234		    {
1235		      max_addr += align_fuzz (max_lab, insn, 0, 0);
1236		      max_addr += align_fuzz (insn, rel_lab, 0, 0);
1237		    }
1238		  else
1239		    max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1240		}
1241	      PUT_MODE (body, CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1242							max_addr - rel_addr,
1243							body));
1244	      if (JUMP_TABLES_IN_TEXT_SECTION
1245		  || readonly_data_section == text_section)
1246		{
1247		  insn_lengths[uid]
1248		    = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1249		  insn_current_address += insn_lengths[uid];
1250		  if (insn_lengths[uid] != old_length)
1251		    something_changed = 1;
1252		}
1253
1254	      continue;
1255	    }
1256#endif /* CASE_VECTOR_SHORTEN_MODE */
1257
1258	  if (! (varying_length[uid]))
1259	    {
1260	      if (NONJUMP_INSN_P (insn)
1261		  && GET_CODE (PATTERN (insn)) == SEQUENCE)
1262		{
1263		  int i;
1264
1265		  body = PATTERN (insn);
1266		  for (i = 0; i < XVECLEN (body, 0); i++)
1267		    {
1268		      rtx inner_insn = XVECEXP (body, 0, i);
1269		      int inner_uid = INSN_UID (inner_insn);
1270
1271		      INSN_ADDRESSES (inner_uid) = insn_current_address;
1272
1273		      insn_current_address += insn_lengths[inner_uid];
1274		    }
1275		}
1276	      else
1277		insn_current_address += insn_lengths[uid];
1278
1279	      continue;
1280	    }
1281
1282	  if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1283	    {
1284	      int i;
1285
1286	      body = PATTERN (insn);
1287	      new_length = 0;
1288	      for (i = 0; i < XVECLEN (body, 0); i++)
1289		{
1290		  rtx inner_insn = XVECEXP (body, 0, i);
1291		  int inner_uid = INSN_UID (inner_insn);
1292		  int inner_length;
1293
1294		  INSN_ADDRESSES (inner_uid) = insn_current_address;
1295
1296		  /* insn_current_length returns 0 for insns with a
1297		     non-varying length.  */
1298		  if (! varying_length[inner_uid])
1299		    inner_length = insn_lengths[inner_uid];
1300		  else
1301		    inner_length = insn_current_length (inner_insn);
1302
1303		  if (inner_length != insn_lengths[inner_uid])
1304		    {
1305		      insn_lengths[inner_uid] = inner_length;
1306		      something_changed = 1;
1307		    }
1308		  insn_current_address += insn_lengths[inner_uid];
1309		  new_length += inner_length;
1310		}
1311	    }
1312	  else
1313	    {
1314	      new_length = insn_current_length (insn);
1315	      insn_current_address += new_length;
1316	    }
1317
1318#ifdef ADJUST_INSN_LENGTH
1319	  /* If needed, do any adjustment.  */
1320	  tmp_length = new_length;
1321	  ADJUST_INSN_LENGTH (insn, new_length);
1322	  insn_current_address += (new_length - tmp_length);
1323#endif
1324
1325	  if (new_length != insn_lengths[uid])
1326	    {
1327	      insn_lengths[uid] = new_length;
1328	      something_changed = 1;
1329	    }
1330	}
1331      /* For a non-optimizing compile, do only a single pass.  */
1332      if (!optimize)
1333	break;
1334    }
1335
1336  free (varying_length);
1337
1338#endif /* HAVE_ATTR_length */
1339}
1340
1341#ifdef HAVE_ATTR_length
1342/* Given the body of an INSN known to be generated by an ASM statement, return
1343   the number of machine instructions likely to be generated for this insn.
1344   This is used to compute its length.  */
1345
1346static int
1347asm_insn_count (rtx body)
1348{
1349  const char *template;
1350  int count = 1;
1351
1352  if (GET_CODE (body) == ASM_INPUT)
1353    template = XSTR (body, 0);
1354  else
1355    template = decode_asm_operands (body, NULL, NULL, NULL, NULL);
1356
1357  for (; *template; template++)
1358    if (IS_ASM_LOGICAL_LINE_SEPARATOR (*template) || *template == '\n')
1359      count++;
1360
1361  return count;
1362}
1363#endif
1364
1365/* Output assembler code for the start of a function,
1366   and initialize some of the variables in this file
1367   for the new function.  The label for the function and associated
1368   assembler pseudo-ops have already been output in `assemble_start_function'.
1369
1370   FIRST is the first insn of the rtl for the function being compiled.
1371   FILE is the file to write assembler code to.
1372   OPTIMIZE is nonzero if we should eliminate redundant
1373     test and compare insns.  */
1374
1375void
1376final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1377		      int optimize ATTRIBUTE_UNUSED)
1378{
1379  block_depth = 0;
1380
1381  this_is_asm_operands = 0;
1382
1383  last_filename = locator_file (prologue_locator);
1384  last_linenum = locator_line (prologue_locator);
1385
1386  high_block_linenum = high_function_linenum = last_linenum;
1387
1388  (*debug_hooks->begin_prologue) (last_linenum, last_filename);
1389
1390#if defined (DWARF2_UNWIND_INFO) || defined (TARGET_UNWIND_INFO)
1391  if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1392    dwarf2out_begin_prologue (0, NULL);
1393#endif
1394
1395#ifdef LEAF_REG_REMAP
1396  if (current_function_uses_only_leaf_regs)
1397    leaf_renumber_regs (first);
1398#endif
1399
1400  /* The Sun386i and perhaps other machines don't work right
1401     if the profiling code comes after the prologue.  */
1402#ifdef PROFILE_BEFORE_PROLOGUE
1403  if (current_function_profile)
1404    profile_function (file);
1405#endif /* PROFILE_BEFORE_PROLOGUE */
1406
1407#if defined (DWARF2_UNWIND_INFO) && defined (HAVE_prologue)
1408  if (dwarf2out_do_frame ())
1409    dwarf2out_frame_debug (NULL_RTX, false);
1410#endif
1411
1412  /* If debugging, assign block numbers to all of the blocks in this
1413     function.  */
1414  if (write_symbols)
1415    {
1416      reemit_insn_block_notes ();
1417      number_blocks (current_function_decl);
1418      /* We never actually put out begin/end notes for the top-level
1419	 block in the function.  But, conceptually, that block is
1420	 always needed.  */
1421      TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1422    }
1423
1424  if (warn_frame_larger_than
1425    && get_frame_size () > frame_larger_than_size)
1426  {
1427      /* Issue a warning */
1428      warning (OPT_Wframe_larger_than_,
1429               "the frame size of %wd bytes is larger than %wd bytes",
1430               get_frame_size (), frame_larger_than_size);
1431  }
1432
1433  /* First output the function prologue: code to set up the stack frame.  */
1434  targetm.asm_out.function_prologue (file, get_frame_size ());
1435
1436  /* If the machine represents the prologue as RTL, the profiling code must
1437     be emitted when NOTE_INSN_PROLOGUE_END is scanned.  */
1438#ifdef HAVE_prologue
1439  if (! HAVE_prologue)
1440#endif
1441    profile_after_prologue (file);
1442}
1443
1444static void
1445profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1446{
1447#ifndef PROFILE_BEFORE_PROLOGUE
1448  if (current_function_profile)
1449    profile_function (file);
1450#endif /* not PROFILE_BEFORE_PROLOGUE */
1451}
1452
1453static void
1454profile_function (FILE *file ATTRIBUTE_UNUSED)
1455{
1456#ifndef NO_PROFILE_COUNTERS
1457# define NO_PROFILE_COUNTERS	0
1458#endif
1459#if defined(ASM_OUTPUT_REG_PUSH)
1460  int sval = current_function_returns_struct;
1461  rtx svrtx = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl), 1);
1462#if defined(STATIC_CHAIN_INCOMING_REGNUM) || defined(STATIC_CHAIN_REGNUM)
1463  int cxt = cfun->static_chain_decl != NULL;
1464#endif
1465#endif /* ASM_OUTPUT_REG_PUSH */
1466
1467  if (! NO_PROFILE_COUNTERS)
1468    {
1469      int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1470      switch_to_section (data_section);
1471      ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1472      targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1473      assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1474    }
1475
1476  switch_to_section (current_function_section ());
1477
1478#if defined(ASM_OUTPUT_REG_PUSH)
1479  if (sval && svrtx != NULL_RTX && REG_P (svrtx))
1480    ASM_OUTPUT_REG_PUSH (file, REGNO (svrtx));
1481#endif
1482
1483#if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1484  if (cxt)
1485    ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_INCOMING_REGNUM);
1486#else
1487#if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1488  if (cxt)
1489    {
1490      ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_REGNUM);
1491    }
1492#endif
1493#endif
1494
1495  FUNCTION_PROFILER (file, current_function_funcdef_no);
1496
1497#if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1498  if (cxt)
1499    ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_INCOMING_REGNUM);
1500#else
1501#if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1502  if (cxt)
1503    {
1504      ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_REGNUM);
1505    }
1506#endif
1507#endif
1508
1509#if defined(ASM_OUTPUT_REG_PUSH)
1510  if (sval && svrtx != NULL_RTX && REG_P (svrtx))
1511    ASM_OUTPUT_REG_POP (file, REGNO (svrtx));
1512#endif
1513}
1514
1515/* Output assembler code for the end of a function.
1516   For clarity, args are same as those of `final_start_function'
1517   even though not all of them are needed.  */
1518
1519void
1520final_end_function (void)
1521{
1522  app_disable ();
1523
1524  (*debug_hooks->end_function) (high_function_linenum);
1525
1526  /* Finally, output the function epilogue:
1527     code to restore the stack frame and return to the caller.  */
1528  targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1529
1530  /* And debug output.  */
1531  (*debug_hooks->end_epilogue) (last_linenum, last_filename);
1532
1533#if defined (DWARF2_UNWIND_INFO)
1534  if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG
1535      && dwarf2out_do_frame ())
1536    dwarf2out_end_epilogue (last_linenum, last_filename);
1537#endif
1538}
1539
1540/* Output assembler code for some insns: all or part of a function.
1541   For description of args, see `final_start_function', above.  */
1542
1543void
1544final (rtx first, FILE *file, int optimize)
1545{
1546  rtx insn;
1547  int max_uid = 0;
1548  int seen = 0;
1549
1550  last_ignored_compare = 0;
1551
1552#ifdef SDB_DEBUGGING_INFO
1553  /* When producing SDB debugging info, delete troublesome line number
1554     notes from inlined functions in other files as well as duplicate
1555     line number notes.  */
1556  if (write_symbols == SDB_DEBUG)
1557    {
1558      rtx last = 0;
1559      for (insn = first; insn; insn = NEXT_INSN (insn))
1560	if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
1561	  {
1562	    if (last != 0
1563#ifdef USE_MAPPED_LOCATION
1564		&& NOTE_SOURCE_LOCATION (insn) == NOTE_SOURCE_LOCATION (last)
1565#else
1566		&& NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last)
1567		&& NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last)
1568#endif
1569	      )
1570	      {
1571		delete_insn (insn);	/* Use delete_note.  */
1572		continue;
1573	      }
1574	    last = insn;
1575	  }
1576    }
1577#endif
1578
1579  for (insn = first; insn; insn = NEXT_INSN (insn))
1580    {
1581      if (INSN_UID (insn) > max_uid)       /* Find largest UID.  */
1582	max_uid = INSN_UID (insn);
1583#ifdef HAVE_cc0
1584      /* If CC tracking across branches is enabled, record the insn which
1585	 jumps to each branch only reached from one place.  */
1586      if (optimize && JUMP_P (insn))
1587	{
1588	  rtx lab = JUMP_LABEL (insn);
1589	  if (lab && LABEL_NUSES (lab) == 1)
1590	    {
1591	      LABEL_REFS (lab) = insn;
1592	    }
1593	}
1594#endif
1595    }
1596
1597  init_recog ();
1598
1599  CC_STATUS_INIT;
1600
1601  /* Output the insns.  */
1602  for (insn = NEXT_INSN (first); insn;)
1603    {
1604#ifdef HAVE_ATTR_length
1605      if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1606	{
1607	  /* This can be triggered by bugs elsewhere in the compiler if
1608	     new insns are created after init_insn_lengths is called.  */
1609	  gcc_assert (NOTE_P (insn));
1610	  insn_current_address = -1;
1611	}
1612      else
1613	insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1614#endif /* HAVE_ATTR_length */
1615
1616      insn = final_scan_insn (insn, file, optimize, 0, &seen);
1617    }
1618}
1619
1620const char *
1621get_insn_template (int code, rtx insn)
1622{
1623  switch (insn_data[code].output_format)
1624    {
1625    case INSN_OUTPUT_FORMAT_SINGLE:
1626      return insn_data[code].output.single;
1627    case INSN_OUTPUT_FORMAT_MULTI:
1628      return insn_data[code].output.multi[which_alternative];
1629    case INSN_OUTPUT_FORMAT_FUNCTION:
1630      gcc_assert (insn);
1631      return (*insn_data[code].output.function) (recog_data.operand, insn);
1632
1633    default:
1634      gcc_unreachable ();
1635    }
1636}
1637
1638/* Emit the appropriate declaration for an alternate-entry-point
1639   symbol represented by INSN, to FILE.  INSN is a CODE_LABEL with
1640   LABEL_KIND != LABEL_NORMAL.
1641
1642   The case fall-through in this function is intentional.  */
1643static void
1644output_alternate_entry_point (FILE *file, rtx insn)
1645{
1646  const char *name = LABEL_NAME (insn);
1647
1648  switch (LABEL_KIND (insn))
1649    {
1650    case LABEL_WEAK_ENTRY:
1651#ifdef ASM_WEAKEN_LABEL
1652      ASM_WEAKEN_LABEL (file, name);
1653#endif
1654    case LABEL_GLOBAL_ENTRY:
1655      targetm.asm_out.globalize_label (file, name);
1656    case LABEL_STATIC_ENTRY:
1657#ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1658      ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1659#endif
1660      ASM_OUTPUT_LABEL (file, name);
1661      break;
1662
1663    case LABEL_NORMAL:
1664    default:
1665      gcc_unreachable ();
1666    }
1667}
1668
1669/* The final scan for one insn, INSN.
1670   Args are same as in `final', except that INSN
1671   is the insn being scanned.
1672   Value returned is the next insn to be scanned.
1673
1674   NOPEEPHOLES is the flag to disallow peephole processing (currently
1675   used for within delayed branch sequence output).
1676
1677   SEEN is used to track the end of the prologue, for emitting
1678   debug information.  We force the emission of a line note after
1679   both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
1680   at the beginning of the second basic block, whichever comes
1681   first.  */
1682
1683rtx
1684final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
1685		 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
1686{
1687#ifdef HAVE_cc0
1688  rtx set;
1689#endif
1690  rtx next;
1691
1692  insn_counter++;
1693
1694  /* Ignore deleted insns.  These can occur when we split insns (due to a
1695     template of "#") while not optimizing.  */
1696  if (INSN_DELETED_P (insn))
1697    return NEXT_INSN (insn);
1698
1699  switch (GET_CODE (insn))
1700    {
1701    case NOTE:
1702      switch (NOTE_LINE_NUMBER (insn))
1703	{
1704	case NOTE_INSN_DELETED:
1705	case NOTE_INSN_FUNCTION_END:
1706	case NOTE_INSN_REPEATED_LINE_NUMBER:
1707	case NOTE_INSN_EXPECTED_VALUE:
1708	  break;
1709
1710	case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1711	  in_cold_section_p = !in_cold_section_p;
1712	  (*debug_hooks->switch_text_section) ();
1713	  switch_to_section (current_function_section ());
1714	  break;
1715
1716	case NOTE_INSN_BASIC_BLOCK:
1717#ifdef TARGET_UNWIND_INFO
1718	  targetm.asm_out.unwind_emit (asm_out_file, insn);
1719#endif
1720
1721	  if (flag_debug_asm)
1722	    fprintf (asm_out_file, "\t%s basic block %d\n",
1723		     ASM_COMMENT_START, NOTE_BASIC_BLOCK (insn)->index);
1724
1725	  if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
1726	    {
1727	      *seen |= SEEN_EMITTED;
1728	      force_source_line = true;
1729	    }
1730	  else
1731	    *seen |= SEEN_BB;
1732
1733	  break;
1734
1735	case NOTE_INSN_EH_REGION_BEG:
1736	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
1737				  NOTE_EH_HANDLER (insn));
1738	  break;
1739
1740	case NOTE_INSN_EH_REGION_END:
1741	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
1742				  NOTE_EH_HANDLER (insn));
1743	  break;
1744
1745	case NOTE_INSN_PROLOGUE_END:
1746	  targetm.asm_out.function_end_prologue (file);
1747	  profile_after_prologue (file);
1748
1749	  if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1750	    {
1751	      *seen |= SEEN_EMITTED;
1752	      force_source_line = true;
1753	    }
1754	  else
1755	    *seen |= SEEN_NOTE;
1756
1757	  break;
1758
1759	case NOTE_INSN_EPILOGUE_BEG:
1760	  targetm.asm_out.function_begin_epilogue (file);
1761	  break;
1762
1763	case NOTE_INSN_FUNCTION_BEG:
1764	  app_disable ();
1765	  (*debug_hooks->end_prologue) (last_linenum, last_filename);
1766
1767	  if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1768	    {
1769	      *seen |= SEEN_EMITTED;
1770	      force_source_line = true;
1771	    }
1772	  else
1773	    *seen |= SEEN_NOTE;
1774
1775	  break;
1776
1777	case NOTE_INSN_BLOCK_BEG:
1778	  if (debug_info_level == DINFO_LEVEL_NORMAL
1779	      || debug_info_level == DINFO_LEVEL_VERBOSE
1780	      || write_symbols == DWARF2_DEBUG
1781	      || write_symbols == VMS_AND_DWARF2_DEBUG
1782	      || write_symbols == VMS_DEBUG)
1783	    {
1784	      int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1785
1786	      app_disable ();
1787	      ++block_depth;
1788	      high_block_linenum = last_linenum;
1789
1790	      /* Output debugging info about the symbol-block beginning.  */
1791	      (*debug_hooks->begin_block) (last_linenum, n);
1792
1793	      /* Mark this block as output.  */
1794	      TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
1795	    }
1796	  break;
1797
1798	case NOTE_INSN_BLOCK_END:
1799	  if (debug_info_level == DINFO_LEVEL_NORMAL
1800	      || debug_info_level == DINFO_LEVEL_VERBOSE
1801	      || write_symbols == DWARF2_DEBUG
1802	      || write_symbols == VMS_AND_DWARF2_DEBUG
1803	      || write_symbols == VMS_DEBUG)
1804	    {
1805	      int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1806
1807	      app_disable ();
1808
1809	      /* End of a symbol-block.  */
1810	      --block_depth;
1811	      gcc_assert (block_depth >= 0);
1812
1813	      (*debug_hooks->end_block) (high_block_linenum, n);
1814	    }
1815	  break;
1816
1817	case NOTE_INSN_DELETED_LABEL:
1818	  /* Emit the label.  We may have deleted the CODE_LABEL because
1819	     the label could be proved to be unreachable, though still
1820	     referenced (in the form of having its address taken.  */
1821	  ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
1822	  break;
1823
1824	case NOTE_INSN_VAR_LOCATION:
1825	  (*debug_hooks->var_location) (insn);
1826	  break;
1827
1828	case 0:
1829	  break;
1830
1831	default:
1832	  gcc_assert (NOTE_LINE_NUMBER (insn) > 0);
1833	  break;
1834	}
1835      break;
1836
1837    case BARRIER:
1838#if defined (DWARF2_UNWIND_INFO)
1839      if (dwarf2out_do_frame ())
1840	dwarf2out_frame_debug (insn, false);
1841#endif
1842      break;
1843
1844    case CODE_LABEL:
1845      /* The target port might emit labels in the output function for
1846	 some insn, e.g. sh.c output_branchy_insn.  */
1847/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
1848      {
1849	int align = LABEL_ALIGN_LOG (insn);
1850#ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1851	int max_skip = LABEL_MAX_SKIP (insn);
1852#endif
1853
1854	if (align && NEXT_INSN (insn))
1855	  {
1856#ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1857	    ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
1858#else
1859#ifdef ASM_OUTPUT_ALIGN_WITH_NOP
1860	    ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
1861#else
1862	    ASM_OUTPUT_ALIGN (file, align);
1863#endif
1864#endif
1865	  }
1866      }
1867/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
1868#ifdef HAVE_cc0
1869      CC_STATUS_INIT;
1870      /* If this label is reached from only one place, set the condition
1871	 codes from the instruction just before the branch.  */
1872
1873      /* Disabled because some insns set cc_status in the C output code
1874	 and NOTICE_UPDATE_CC alone can set incorrect status.  */
1875      if (0 /* optimize && LABEL_NUSES (insn) == 1*/)
1876	{
1877	  rtx jump = LABEL_REFS (insn);
1878	  rtx barrier = prev_nonnote_insn (insn);
1879	  rtx prev;
1880	  /* If the LABEL_REFS field of this label has been set to point
1881	     at a branch, the predecessor of the branch is a regular
1882	     insn, and that branch is the only way to reach this label,
1883	     set the condition codes based on the branch and its
1884	     predecessor.  */
1885	  if (barrier && BARRIER_P (barrier)
1886	      && jump && JUMP_P (jump)
1887	      && (prev = prev_nonnote_insn (jump))
1888	      && NONJUMP_INSN_P (prev))
1889	    {
1890	      NOTICE_UPDATE_CC (PATTERN (prev), prev);
1891	      NOTICE_UPDATE_CC (PATTERN (jump), jump);
1892	    }
1893	}
1894#endif
1895
1896      if (LABEL_NAME (insn))
1897	(*debug_hooks->label) (insn);
1898
1899      if (app_on)
1900	{
1901	  fputs (ASM_APP_OFF, file);
1902	  app_on = 0;
1903	}
1904
1905      next = next_nonnote_insn (insn);
1906      if (next != 0 && JUMP_P (next))
1907	{
1908	  rtx nextbody = PATTERN (next);
1909
1910	  /* If this label is followed by a jump-table,
1911	     make sure we put the label in the read-only section.  Also
1912	     possibly write the label and jump table together.  */
1913
1914	  if (GET_CODE (nextbody) == ADDR_VEC
1915	      || GET_CODE (nextbody) == ADDR_DIFF_VEC)
1916	    {
1917#if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
1918	      /* In this case, the case vector is being moved by the
1919		 target, so don't output the label at all.  Leave that
1920		 to the back end macros.  */
1921#else
1922	      if (! JUMP_TABLES_IN_TEXT_SECTION)
1923		{
1924		  int log_align;
1925
1926		  switch_to_section (targetm.asm_out.function_rodata_section
1927				     (current_function_decl));
1928
1929#ifdef ADDR_VEC_ALIGN
1930		  log_align = ADDR_VEC_ALIGN (next);
1931#else
1932		  log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1933#endif
1934		  ASM_OUTPUT_ALIGN (file, log_align);
1935		}
1936	      else
1937		switch_to_section (current_function_section ());
1938
1939#ifdef ASM_OUTPUT_CASE_LABEL
1940	      ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
1941				     next);
1942#else
1943	      targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
1944#endif
1945#endif
1946	      break;
1947	    }
1948	}
1949      if (LABEL_ALT_ENTRY_P (insn))
1950	output_alternate_entry_point (file, insn);
1951      else
1952	targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
1953      break;
1954
1955    default:
1956      {
1957	rtx body = PATTERN (insn);
1958	int insn_code_number;
1959	const char *template;
1960
1961#ifdef HAVE_conditional_execution
1962	/* Reset this early so it is correct for ASM statements.  */
1963	current_insn_predicate = NULL_RTX;
1964#endif
1965	/* An INSN, JUMP_INSN or CALL_INSN.
1966	   First check for special kinds that recog doesn't recognize.  */
1967
1968	if (GET_CODE (body) == USE /* These are just declarations.  */
1969	    || GET_CODE (body) == CLOBBER)
1970	  break;
1971
1972#ifdef HAVE_cc0
1973	{
1974	  /* If there is a REG_CC_SETTER note on this insn, it means that
1975	     the setting of the condition code was done in the delay slot
1976	     of the insn that branched here.  So recover the cc status
1977	     from the insn that set it.  */
1978
1979	  rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
1980	  if (note)
1981	    {
1982	      NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
1983	      cc_prev_status = cc_status;
1984	    }
1985	}
1986#endif
1987
1988	/* Detect insns that are really jump-tables
1989	   and output them as such.  */
1990
1991	if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1992	  {
1993#if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
1994	    int vlen, idx;
1995#endif
1996
1997	    if (! JUMP_TABLES_IN_TEXT_SECTION)
1998	      switch_to_section (targetm.asm_out.function_rodata_section
1999				 (current_function_decl));
2000	    else
2001	      switch_to_section (current_function_section ());
2002
2003	    if (app_on)
2004	      {
2005		fputs (ASM_APP_OFF, file);
2006		app_on = 0;
2007	      }
2008
2009#if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2010	    if (GET_CODE (body) == ADDR_VEC)
2011	      {
2012#ifdef ASM_OUTPUT_ADDR_VEC
2013		ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2014#else
2015		gcc_unreachable ();
2016#endif
2017	      }
2018	    else
2019	      {
2020#ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2021		ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2022#else
2023		gcc_unreachable ();
2024#endif
2025	      }
2026#else
2027	    vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2028	    for (idx = 0; idx < vlen; idx++)
2029	      {
2030		if (GET_CODE (body) == ADDR_VEC)
2031		  {
2032#ifdef ASM_OUTPUT_ADDR_VEC_ELT
2033		    ASM_OUTPUT_ADDR_VEC_ELT
2034		      (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2035#else
2036		    gcc_unreachable ();
2037#endif
2038		  }
2039		else
2040		  {
2041#ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2042		    ASM_OUTPUT_ADDR_DIFF_ELT
2043		      (file,
2044		       body,
2045		       CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2046		       CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2047#else
2048		    gcc_unreachable ();
2049#endif
2050		  }
2051	      }
2052#ifdef ASM_OUTPUT_CASE_END
2053	    ASM_OUTPUT_CASE_END (file,
2054				 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2055				 insn);
2056#endif
2057#endif
2058
2059	    switch_to_section (current_function_section ());
2060
2061	    break;
2062	  }
2063	/* Output this line note if it is the first or the last line
2064	   note in a row.  */
2065	if (notice_source_line (insn))
2066	  {
2067	    (*debug_hooks->source_line) (last_linenum, last_filename);
2068	  }
2069
2070	if (GET_CODE (body) == ASM_INPUT)
2071	  {
2072	    const char *string = XSTR (body, 0);
2073
2074	    /* There's no telling what that did to the condition codes.  */
2075	    CC_STATUS_INIT;
2076
2077	    if (string[0])
2078	      {
2079		if (! app_on)
2080		  {
2081		    fputs (ASM_APP_ON, file);
2082		    app_on = 1;
2083		  }
2084		fprintf (asm_out_file, "\t%s\n", string);
2085	      }
2086	    break;
2087	  }
2088
2089	/* Detect `asm' construct with operands.  */
2090	if (asm_noperands (body) >= 0)
2091	  {
2092	    unsigned int noperands = asm_noperands (body);
2093	    rtx *ops = alloca (noperands * sizeof (rtx));
2094	    const char *string;
2095
2096	    /* There's no telling what that did to the condition codes.  */
2097	    CC_STATUS_INIT;
2098
2099	    /* Get out the operand values.  */
2100	    string = decode_asm_operands (body, ops, NULL, NULL, NULL);
2101	    /* Inhibit dieing on what would otherwise be compiler bugs.  */
2102	    insn_noperands = noperands;
2103	    this_is_asm_operands = insn;
2104
2105#ifdef FINAL_PRESCAN_INSN
2106	    FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2107#endif
2108
2109	    /* Output the insn using them.  */
2110	    if (string[0])
2111	      {
2112		if (! app_on)
2113		  {
2114		    fputs (ASM_APP_ON, file);
2115		    app_on = 1;
2116		  }
2117	        output_asm_insn (string, ops);
2118	      }
2119
2120	    this_is_asm_operands = 0;
2121	    break;
2122	  }
2123
2124	if (app_on)
2125	  {
2126	    fputs (ASM_APP_OFF, file);
2127	    app_on = 0;
2128	  }
2129
2130	if (GET_CODE (body) == SEQUENCE)
2131	  {
2132	    /* A delayed-branch sequence */
2133	    int i;
2134
2135	    final_sequence = body;
2136
2137	    /* Record the delay slots' frame information before the branch.
2138	       This is needed for delayed calls: see execute_cfa_program().  */
2139#if defined (DWARF2_UNWIND_INFO)
2140	    if (dwarf2out_do_frame ())
2141	      for (i = 1; i < XVECLEN (body, 0); i++)
2142		dwarf2out_frame_debug (XVECEXP (body, 0, i), false);
2143#endif
2144
2145	    /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2146	       force the restoration of a comparison that was previously
2147	       thought unnecessary.  If that happens, cancel this sequence
2148	       and cause that insn to be restored.  */
2149
2150	    next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, 1, seen);
2151	    if (next != XVECEXP (body, 0, 1))
2152	      {
2153		final_sequence = 0;
2154		return next;
2155	      }
2156
2157	    for (i = 1; i < XVECLEN (body, 0); i++)
2158	      {
2159		rtx insn = XVECEXP (body, 0, i);
2160		rtx next = NEXT_INSN (insn);
2161		/* We loop in case any instruction in a delay slot gets
2162		   split.  */
2163		do
2164		  insn = final_scan_insn (insn, file, 0, 1, seen);
2165		while (insn != next);
2166	      }
2167#ifdef DBR_OUTPUT_SEQEND
2168	    DBR_OUTPUT_SEQEND (file);
2169#endif
2170	    final_sequence = 0;
2171
2172	    /* If the insn requiring the delay slot was a CALL_INSN, the
2173	       insns in the delay slot are actually executed before the
2174	       called function.  Hence we don't preserve any CC-setting
2175	       actions in these insns and the CC must be marked as being
2176	       clobbered by the function.  */
2177	    if (CALL_P (XVECEXP (body, 0, 0)))
2178	      {
2179		CC_STATUS_INIT;
2180	      }
2181	    break;
2182	  }
2183
2184	/* We have a real machine instruction as rtl.  */
2185
2186	body = PATTERN (insn);
2187
2188#ifdef HAVE_cc0
2189	set = single_set (insn);
2190
2191	/* Check for redundant test and compare instructions
2192	   (when the condition codes are already set up as desired).
2193	   This is done only when optimizing; if not optimizing,
2194	   it should be possible for the user to alter a variable
2195	   with the debugger in between statements
2196	   and the next statement should reexamine the variable
2197	   to compute the condition codes.  */
2198
2199	if (optimize)
2200	  {
2201	    if (set
2202		&& GET_CODE (SET_DEST (set)) == CC0
2203		&& insn != last_ignored_compare)
2204	      {
2205		if (GET_CODE (SET_SRC (set)) == SUBREG)
2206		  SET_SRC (set) = alter_subreg (&SET_SRC (set));
2207		else if (GET_CODE (SET_SRC (set)) == COMPARE)
2208		  {
2209		    if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2210		      XEXP (SET_SRC (set), 0)
2211			= alter_subreg (&XEXP (SET_SRC (set), 0));
2212		    if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2213		      XEXP (SET_SRC (set), 1)
2214			= alter_subreg (&XEXP (SET_SRC (set), 1));
2215		  }
2216		if ((cc_status.value1 != 0
2217		     && rtx_equal_p (SET_SRC (set), cc_status.value1))
2218		    || (cc_status.value2 != 0
2219			&& rtx_equal_p (SET_SRC (set), cc_status.value2)))
2220		  {
2221		    /* Don't delete insn if it has an addressing side-effect.  */
2222		    if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2223			/* or if anything in it is volatile.  */
2224			&& ! volatile_refs_p (PATTERN (insn)))
2225		      {
2226			/* We don't really delete the insn; just ignore it.  */
2227			last_ignored_compare = insn;
2228			break;
2229		      }
2230		  }
2231	      }
2232	  }
2233#endif
2234
2235#ifdef HAVE_cc0
2236	/* If this is a conditional branch, maybe modify it
2237	   if the cc's are in a nonstandard state
2238	   so that it accomplishes the same thing that it would
2239	   do straightforwardly if the cc's were set up normally.  */
2240
2241	if (cc_status.flags != 0
2242	    && JUMP_P (insn)
2243	    && GET_CODE (body) == SET
2244	    && SET_DEST (body) == pc_rtx
2245	    && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2246	    && COMPARISON_P (XEXP (SET_SRC (body), 0))
2247	    && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2248	  {
2249	    /* This function may alter the contents of its argument
2250	       and clear some of the cc_status.flags bits.
2251	       It may also return 1 meaning condition now always true
2252	       or -1 meaning condition now always false
2253	       or 2 meaning condition nontrivial but altered.  */
2254	    int result = alter_cond (XEXP (SET_SRC (body), 0));
2255	    /* If condition now has fixed value, replace the IF_THEN_ELSE
2256	       with its then-operand or its else-operand.  */
2257	    if (result == 1)
2258	      SET_SRC (body) = XEXP (SET_SRC (body), 1);
2259	    if (result == -1)
2260	      SET_SRC (body) = XEXP (SET_SRC (body), 2);
2261
2262	    /* The jump is now either unconditional or a no-op.
2263	       If it has become a no-op, don't try to output it.
2264	       (It would not be recognized.)  */
2265	    if (SET_SRC (body) == pc_rtx)
2266	      {
2267	        delete_insn (insn);
2268		break;
2269	      }
2270	    else if (GET_CODE (SET_SRC (body)) == RETURN)
2271	      /* Replace (set (pc) (return)) with (return).  */
2272	      PATTERN (insn) = body = SET_SRC (body);
2273
2274	    /* Rerecognize the instruction if it has changed.  */
2275	    if (result != 0)
2276	      INSN_CODE (insn) = -1;
2277	  }
2278
2279	/* Make same adjustments to instructions that examine the
2280	   condition codes without jumping and instructions that
2281	   handle conditional moves (if this machine has either one).  */
2282
2283	if (cc_status.flags != 0
2284	    && set != 0)
2285	  {
2286	    rtx cond_rtx, then_rtx, else_rtx;
2287
2288	    if (!JUMP_P (insn)
2289		&& GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2290	      {
2291		cond_rtx = XEXP (SET_SRC (set), 0);
2292		then_rtx = XEXP (SET_SRC (set), 1);
2293		else_rtx = XEXP (SET_SRC (set), 2);
2294	      }
2295	    else
2296	      {
2297		cond_rtx = SET_SRC (set);
2298		then_rtx = const_true_rtx;
2299		else_rtx = const0_rtx;
2300	      }
2301
2302	    switch (GET_CODE (cond_rtx))
2303	      {
2304	      case GTU:
2305	      case GT:
2306	      case LTU:
2307	      case LT:
2308	      case GEU:
2309	      case GE:
2310	      case LEU:
2311	      case LE:
2312	      case EQ:
2313	      case NE:
2314		{
2315		  int result;
2316		  if (XEXP (cond_rtx, 0) != cc0_rtx)
2317		    break;
2318		  result = alter_cond (cond_rtx);
2319		  if (result == 1)
2320		    validate_change (insn, &SET_SRC (set), then_rtx, 0);
2321		  else if (result == -1)
2322		    validate_change (insn, &SET_SRC (set), else_rtx, 0);
2323		  else if (result == 2)
2324		    INSN_CODE (insn) = -1;
2325		  if (SET_DEST (set) == SET_SRC (set))
2326		    delete_insn (insn);
2327		}
2328		break;
2329
2330	      default:
2331		break;
2332	      }
2333	  }
2334
2335#endif
2336
2337#ifdef HAVE_peephole
2338	/* Do machine-specific peephole optimizations if desired.  */
2339
2340	if (optimize && !flag_no_peephole && !nopeepholes)
2341	  {
2342	    rtx next = peephole (insn);
2343	    /* When peepholing, if there were notes within the peephole,
2344	       emit them before the peephole.  */
2345	    if (next != 0 && next != NEXT_INSN (insn))
2346	      {
2347		rtx note, prev = PREV_INSN (insn);
2348
2349		for (note = NEXT_INSN (insn); note != next;
2350		     note = NEXT_INSN (note))
2351		  final_scan_insn (note, file, optimize, nopeepholes, seen);
2352
2353		/* Put the notes in the proper position for a later
2354		   rescan.  For example, the SH target can do this
2355		   when generating a far jump in a delayed branch
2356		   sequence.  */
2357		note = NEXT_INSN (insn);
2358		PREV_INSN (note) = prev;
2359		NEXT_INSN (prev) = note;
2360		NEXT_INSN (PREV_INSN (next)) = insn;
2361		PREV_INSN (insn) = PREV_INSN (next);
2362		NEXT_INSN (insn) = next;
2363		PREV_INSN (next) = insn;
2364	      }
2365
2366	    /* PEEPHOLE might have changed this.  */
2367	    body = PATTERN (insn);
2368	  }
2369#endif
2370
2371	/* Try to recognize the instruction.
2372	   If successful, verify that the operands satisfy the
2373	   constraints for the instruction.  Crash if they don't,
2374	   since `reload' should have changed them so that they do.  */
2375
2376	insn_code_number = recog_memoized (insn);
2377	cleanup_subreg_operands (insn);
2378
2379	/* Dump the insn in the assembly for debugging.  */
2380	if (flag_dump_rtl_in_asm)
2381	  {
2382	    print_rtx_head = ASM_COMMENT_START;
2383	    print_rtl_single (asm_out_file, insn);
2384	    print_rtx_head = "";
2385	  }
2386
2387	if (! constrain_operands_cached (1))
2388	  fatal_insn_not_found (insn);
2389
2390	/* Some target machines need to prescan each insn before
2391	   it is output.  */
2392
2393#ifdef FINAL_PRESCAN_INSN
2394	FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2395#endif
2396
2397#ifdef HAVE_conditional_execution
2398	if (GET_CODE (PATTERN (insn)) == COND_EXEC)
2399	  current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2400#endif
2401
2402#ifdef HAVE_cc0
2403	cc_prev_status = cc_status;
2404
2405	/* Update `cc_status' for this instruction.
2406	   The instruction's output routine may change it further.
2407	   If the output routine for a jump insn needs to depend
2408	   on the cc status, it should look at cc_prev_status.  */
2409
2410	NOTICE_UPDATE_CC (body, insn);
2411#endif
2412
2413	current_output_insn = debug_insn = insn;
2414
2415#if defined (DWARF2_UNWIND_INFO)
2416	if (CALL_P (insn) && dwarf2out_do_frame ())
2417	  dwarf2out_frame_debug (insn, false);
2418#endif
2419
2420	/* Find the proper template for this insn.  */
2421	template = get_insn_template (insn_code_number, insn);
2422
2423	/* If the C code returns 0, it means that it is a jump insn
2424	   which follows a deleted test insn, and that test insn
2425	   needs to be reinserted.  */
2426	if (template == 0)
2427	  {
2428	    rtx prev;
2429
2430	    gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2431
2432	    /* We have already processed the notes between the setter and
2433	       the user.  Make sure we don't process them again, this is
2434	       particularly important if one of the notes is a block
2435	       scope note or an EH note.  */
2436	    for (prev = insn;
2437		 prev != last_ignored_compare;
2438		 prev = PREV_INSN (prev))
2439	      {
2440		if (NOTE_P (prev))
2441		  delete_insn (prev);	/* Use delete_note.  */
2442	      }
2443
2444	    return prev;
2445	  }
2446
2447	/* If the template is the string "#", it means that this insn must
2448	   be split.  */
2449	if (template[0] == '#' && template[1] == '\0')
2450	  {
2451	    rtx new = try_split (body, insn, 0);
2452
2453	    /* If we didn't split the insn, go away.  */
2454	    if (new == insn && PATTERN (new) == body)
2455	      fatal_insn ("could not split insn", insn);
2456
2457#ifdef HAVE_ATTR_length
2458	    /* This instruction should have been split in shorten_branches,
2459	       to ensure that we would have valid length info for the
2460	       splitees.  */
2461	    gcc_unreachable ();
2462#endif
2463
2464	    return new;
2465	  }
2466
2467#ifdef TARGET_UNWIND_INFO
2468	/* ??? This will put the directives in the wrong place if
2469	   get_insn_template outputs assembly directly.  However calling it
2470	   before get_insn_template breaks if the insns is split.  */
2471	targetm.asm_out.unwind_emit (asm_out_file, insn);
2472#endif
2473
2474	/* Output assembler code from the template.  */
2475	output_asm_insn (template, recog_data.operand);
2476
2477	/* If necessary, report the effect that the instruction has on
2478	   the unwind info.   We've already done this for delay slots
2479	   and call instructions.  */
2480#if defined (DWARF2_UNWIND_INFO)
2481	if (final_sequence == 0
2482#if !defined (HAVE_prologue)
2483	    && !ACCUMULATE_OUTGOING_ARGS
2484#endif
2485	    && dwarf2out_do_frame ())
2486	  dwarf2out_frame_debug (insn, true);
2487#endif
2488
2489	current_output_insn = debug_insn = 0;
2490      }
2491    }
2492  return NEXT_INSN (insn);
2493}
2494
2495/* Return whether a source line note needs to be emitted before INSN.  */
2496
2497static bool
2498notice_source_line (rtx insn)
2499{
2500  const char *filename = insn_file (insn);
2501  int linenum = insn_line (insn);
2502
2503  if (filename
2504      && (force_source_line
2505	  || filename != last_filename
2506	  || last_linenum != linenum))
2507    {
2508      force_source_line = false;
2509      last_filename = filename;
2510      last_linenum = linenum;
2511      high_block_linenum = MAX (last_linenum, high_block_linenum);
2512      high_function_linenum = MAX (last_linenum, high_function_linenum);
2513      return true;
2514    }
2515  return false;
2516}
2517
2518/* For each operand in INSN, simplify (subreg (reg)) so that it refers
2519   directly to the desired hard register.  */
2520
2521void
2522cleanup_subreg_operands (rtx insn)
2523{
2524  int i;
2525  extract_insn_cached (insn);
2526  for (i = 0; i < recog_data.n_operands; i++)
2527    {
2528      /* The following test cannot use recog_data.operand when testing
2529	 for a SUBREG: the underlying object might have been changed
2530	 already if we are inside a match_operator expression that
2531	 matches the else clause.  Instead we test the underlying
2532	 expression directly.  */
2533      if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2534	recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i]);
2535      else if (GET_CODE (recog_data.operand[i]) == PLUS
2536	       || GET_CODE (recog_data.operand[i]) == MULT
2537	       || MEM_P (recog_data.operand[i]))
2538	recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i]);
2539    }
2540
2541  for (i = 0; i < recog_data.n_dups; i++)
2542    {
2543      if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2544	*recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i]);
2545      else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2546	       || GET_CODE (*recog_data.dup_loc[i]) == MULT
2547	       || MEM_P (*recog_data.dup_loc[i]))
2548	*recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i]);
2549    }
2550}
2551
2552/* If X is a SUBREG, replace it with a REG or a MEM,
2553   based on the thing it is a subreg of.  */
2554
2555rtx
2556alter_subreg (rtx *xp)
2557{
2558  rtx x = *xp;
2559  rtx y = SUBREG_REG (x);
2560
2561  /* simplify_subreg does not remove subreg from volatile references.
2562     We are required to.  */
2563  if (MEM_P (y))
2564    {
2565      int offset = SUBREG_BYTE (x);
2566
2567      /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
2568	 contains 0 instead of the proper offset.  See simplify_subreg.  */
2569      if (offset == 0
2570	  && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
2571        {
2572          int difference = GET_MODE_SIZE (GET_MODE (y))
2573			   - GET_MODE_SIZE (GET_MODE (x));
2574          if (WORDS_BIG_ENDIAN)
2575            offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
2576          if (BYTES_BIG_ENDIAN)
2577            offset += difference % UNITS_PER_WORD;
2578        }
2579
2580      *xp = adjust_address (y, GET_MODE (x), offset);
2581    }
2582  else
2583    {
2584      rtx new = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
2585				 SUBREG_BYTE (x));
2586
2587      if (new != 0)
2588	*xp = new;
2589      else if (REG_P (y))
2590	{
2591	  /* Simplify_subreg can't handle some REG cases, but we have to.  */
2592	  unsigned int regno = subreg_regno (x);
2593	  *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, SUBREG_BYTE (x));
2594	}
2595    }
2596
2597  return *xp;
2598}
2599
2600/* Do alter_subreg on all the SUBREGs contained in X.  */
2601
2602static rtx
2603walk_alter_subreg (rtx *xp)
2604{
2605  rtx x = *xp;
2606  switch (GET_CODE (x))
2607    {
2608    case PLUS:
2609    case MULT:
2610    case AND:
2611      XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2612      XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1));
2613      break;
2614
2615    case MEM:
2616    case ZERO_EXTEND:
2617      XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2618      break;
2619
2620    case SUBREG:
2621      return alter_subreg (xp);
2622
2623    default:
2624      break;
2625    }
2626
2627  return *xp;
2628}
2629
2630#ifdef HAVE_cc0
2631
2632/* Given BODY, the body of a jump instruction, alter the jump condition
2633   as required by the bits that are set in cc_status.flags.
2634   Not all of the bits there can be handled at this level in all cases.
2635
2636   The value is normally 0.
2637   1 means that the condition has become always true.
2638   -1 means that the condition has become always false.
2639   2 means that COND has been altered.  */
2640
2641static int
2642alter_cond (rtx cond)
2643{
2644  int value = 0;
2645
2646  if (cc_status.flags & CC_REVERSED)
2647    {
2648      value = 2;
2649      PUT_CODE (cond, swap_condition (GET_CODE (cond)));
2650    }
2651
2652  if (cc_status.flags & CC_INVERTED)
2653    {
2654      value = 2;
2655      PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
2656    }
2657
2658  if (cc_status.flags & CC_NOT_POSITIVE)
2659    switch (GET_CODE (cond))
2660      {
2661      case LE:
2662      case LEU:
2663      case GEU:
2664	/* Jump becomes unconditional.  */
2665	return 1;
2666
2667      case GT:
2668      case GTU:
2669      case LTU:
2670	/* Jump becomes no-op.  */
2671	return -1;
2672
2673      case GE:
2674	PUT_CODE (cond, EQ);
2675	value = 2;
2676	break;
2677
2678      case LT:
2679	PUT_CODE (cond, NE);
2680	value = 2;
2681	break;
2682
2683      default:
2684	break;
2685      }
2686
2687  if (cc_status.flags & CC_NOT_NEGATIVE)
2688    switch (GET_CODE (cond))
2689      {
2690      case GE:
2691      case GEU:
2692	/* Jump becomes unconditional.  */
2693	return 1;
2694
2695      case LT:
2696      case LTU:
2697	/* Jump becomes no-op.  */
2698	return -1;
2699
2700      case LE:
2701      case LEU:
2702	PUT_CODE (cond, EQ);
2703	value = 2;
2704	break;
2705
2706      case GT:
2707      case GTU:
2708	PUT_CODE (cond, NE);
2709	value = 2;
2710	break;
2711
2712      default:
2713	break;
2714      }
2715
2716  if (cc_status.flags & CC_NO_OVERFLOW)
2717    switch (GET_CODE (cond))
2718      {
2719      case GEU:
2720	/* Jump becomes unconditional.  */
2721	return 1;
2722
2723      case LEU:
2724	PUT_CODE (cond, EQ);
2725	value = 2;
2726	break;
2727
2728      case GTU:
2729	PUT_CODE (cond, NE);
2730	value = 2;
2731	break;
2732
2733      case LTU:
2734	/* Jump becomes no-op.  */
2735	return -1;
2736
2737      default:
2738	break;
2739      }
2740
2741  if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
2742    switch (GET_CODE (cond))
2743      {
2744      default:
2745	gcc_unreachable ();
2746
2747      case NE:
2748	PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
2749	value = 2;
2750	break;
2751
2752      case EQ:
2753	PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
2754	value = 2;
2755	break;
2756      }
2757
2758  if (cc_status.flags & CC_NOT_SIGNED)
2759    /* The flags are valid if signed condition operators are converted
2760       to unsigned.  */
2761    switch (GET_CODE (cond))
2762      {
2763      case LE:
2764	PUT_CODE (cond, LEU);
2765	value = 2;
2766	break;
2767
2768      case LT:
2769	PUT_CODE (cond, LTU);
2770	value = 2;
2771	break;
2772
2773      case GT:
2774	PUT_CODE (cond, GTU);
2775	value = 2;
2776	break;
2777
2778      case GE:
2779	PUT_CODE (cond, GEU);
2780	value = 2;
2781	break;
2782
2783      default:
2784	break;
2785      }
2786
2787  return value;
2788}
2789#endif
2790
2791/* Report inconsistency between the assembler template and the operands.
2792   In an `asm', it's the user's fault; otherwise, the compiler's fault.  */
2793
2794void
2795output_operand_lossage (const char *cmsgid, ...)
2796{
2797  char *fmt_string;
2798  char *new_message;
2799  const char *pfx_str;
2800  va_list ap;
2801
2802  va_start (ap, cmsgid);
2803
2804  pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
2805  asprintf (&fmt_string, "%s%s", pfx_str, _(cmsgid));
2806  vasprintf (&new_message, fmt_string, ap);
2807
2808  if (this_is_asm_operands)
2809    error_for_asm (this_is_asm_operands, "%s", new_message);
2810  else
2811    internal_error ("%s", new_message);
2812
2813  free (fmt_string);
2814  free (new_message);
2815  va_end (ap);
2816}
2817
2818/* Output of assembler code from a template, and its subroutines.  */
2819
2820/* Annotate the assembly with a comment describing the pattern and
2821   alternative used.  */
2822
2823static void
2824output_asm_name (void)
2825{
2826  if (debug_insn)
2827    {
2828      int num = INSN_CODE (debug_insn);
2829      fprintf (asm_out_file, "\t%s %d\t%s",
2830	       ASM_COMMENT_START, INSN_UID (debug_insn),
2831	       insn_data[num].name);
2832      if (insn_data[num].n_alternatives > 1)
2833	fprintf (asm_out_file, "/%d", which_alternative + 1);
2834#ifdef HAVE_ATTR_length
2835      fprintf (asm_out_file, "\t[length = %d]",
2836	       get_attr_length (debug_insn));
2837#endif
2838      /* Clear this so only the first assembler insn
2839	 of any rtl insn will get the special comment for -dp.  */
2840      debug_insn = 0;
2841    }
2842}
2843
2844/* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
2845   or its address, return that expr .  Set *PADDRESSP to 1 if the expr
2846   corresponds to the address of the object and 0 if to the object.  */
2847
2848static tree
2849get_mem_expr_from_op (rtx op, int *paddressp)
2850{
2851  tree expr;
2852  int inner_addressp;
2853
2854  *paddressp = 0;
2855
2856  if (REG_P (op))
2857    return REG_EXPR (op);
2858  else if (!MEM_P (op))
2859    return 0;
2860
2861  if (MEM_EXPR (op) != 0)
2862    return MEM_EXPR (op);
2863
2864  /* Otherwise we have an address, so indicate it and look at the address.  */
2865  *paddressp = 1;
2866  op = XEXP (op, 0);
2867
2868  /* First check if we have a decl for the address, then look at the right side
2869     if it is a PLUS.  Otherwise, strip off arithmetic and keep looking.
2870     But don't allow the address to itself be indirect.  */
2871  if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
2872    return expr;
2873  else if (GET_CODE (op) == PLUS
2874	   && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
2875    return expr;
2876
2877  while (GET_RTX_CLASS (GET_CODE (op)) == RTX_UNARY
2878	 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
2879    op = XEXP (op, 0);
2880
2881  expr = get_mem_expr_from_op (op, &inner_addressp);
2882  return inner_addressp ? 0 : expr;
2883}
2884
2885/* Output operand names for assembler instructions.  OPERANDS is the
2886   operand vector, OPORDER is the order to write the operands, and NOPS
2887   is the number of operands to write.  */
2888
2889static void
2890output_asm_operand_names (rtx *operands, int *oporder, int nops)
2891{
2892  int wrote = 0;
2893  int i;
2894
2895  for (i = 0; i < nops; i++)
2896    {
2897      int addressp;
2898      rtx op = operands[oporder[i]];
2899      tree expr = get_mem_expr_from_op (op, &addressp);
2900
2901      fprintf (asm_out_file, "%c%s",
2902	       wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
2903      wrote = 1;
2904      if (expr)
2905	{
2906	  fprintf (asm_out_file, "%s",
2907		   addressp ? "*" : "");
2908	  print_mem_expr (asm_out_file, expr);
2909	  wrote = 1;
2910	}
2911      else if (REG_P (op) && ORIGINAL_REGNO (op)
2912	       && ORIGINAL_REGNO (op) != REGNO (op))
2913	fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
2914    }
2915}
2916
2917/* Output text from TEMPLATE to the assembler output file,
2918   obeying %-directions to substitute operands taken from
2919   the vector OPERANDS.
2920
2921   %N (for N a digit) means print operand N in usual manner.
2922   %lN means require operand N to be a CODE_LABEL or LABEL_REF
2923      and print the label name with no punctuation.
2924   %cN means require operand N to be a constant
2925      and print the constant expression with no punctuation.
2926   %aN means expect operand N to be a memory address
2927      (not a memory reference!) and print a reference
2928      to that address.
2929   %nN means expect operand N to be a constant
2930      and print a constant expression for minus the value
2931      of the operand, with no other punctuation.  */
2932
2933void
2934output_asm_insn (const char *template, rtx *operands)
2935{
2936  const char *p;
2937  int c;
2938#ifdef ASSEMBLER_DIALECT
2939  int dialect = 0;
2940#endif
2941  int oporder[MAX_RECOG_OPERANDS];
2942  char opoutput[MAX_RECOG_OPERANDS];
2943  int ops = 0;
2944
2945  /* An insn may return a null string template
2946     in a case where no assembler code is needed.  */
2947  if (*template == 0)
2948    return;
2949
2950  memset (opoutput, 0, sizeof opoutput);
2951  p = template;
2952  putc ('\t', asm_out_file);
2953
2954#ifdef ASM_OUTPUT_OPCODE
2955  ASM_OUTPUT_OPCODE (asm_out_file, p);
2956#endif
2957
2958  while ((c = *p++))
2959    switch (c)
2960      {
2961      case '\n':
2962	if (flag_verbose_asm)
2963	  output_asm_operand_names (operands, oporder, ops);
2964	if (flag_print_asm_name)
2965	  output_asm_name ();
2966
2967	ops = 0;
2968	memset (opoutput, 0, sizeof opoutput);
2969
2970	putc (c, asm_out_file);
2971#ifdef ASM_OUTPUT_OPCODE
2972	while ((c = *p) == '\t')
2973	  {
2974	    putc (c, asm_out_file);
2975	    p++;
2976	  }
2977	ASM_OUTPUT_OPCODE (asm_out_file, p);
2978#endif
2979	break;
2980
2981#ifdef ASSEMBLER_DIALECT
2982      case '{':
2983	{
2984	  int i;
2985
2986	  if (dialect)
2987	    output_operand_lossage ("nested assembly dialect alternatives");
2988	  else
2989	    dialect = 1;
2990
2991	  /* If we want the first dialect, do nothing.  Otherwise, skip
2992	     DIALECT_NUMBER of strings ending with '|'.  */
2993	  for (i = 0; i < dialect_number; i++)
2994	    {
2995	      while (*p && *p != '}' && *p++ != '|')
2996		;
2997	      if (*p == '}')
2998		break;
2999	      if (*p == '|')
3000		p++;
3001	    }
3002
3003	  if (*p == '\0')
3004	    output_operand_lossage ("unterminated assembly dialect alternative");
3005	}
3006	break;
3007
3008      case '|':
3009	if (dialect)
3010	  {
3011	    /* Skip to close brace.  */
3012	    do
3013	      {
3014		if (*p == '\0')
3015		  {
3016		    output_operand_lossage ("unterminated assembly dialect alternative");
3017		    break;
3018		  }
3019	      }
3020	    while (*p++ != '}');
3021	    dialect = 0;
3022	  }
3023	else
3024	  putc (c, asm_out_file);
3025	break;
3026
3027      case '}':
3028	if (! dialect)
3029	  putc (c, asm_out_file);
3030	dialect = 0;
3031	break;
3032#endif
3033
3034      case '%':
3035	/* %% outputs a single %.  */
3036	if (*p == '%')
3037	  {
3038	    p++;
3039	    putc (c, asm_out_file);
3040	  }
3041	/* %= outputs a number which is unique to each insn in the entire
3042	   compilation.  This is useful for making local labels that are
3043	   referred to more than once in a given insn.  */
3044	else if (*p == '=')
3045	  {
3046	    p++;
3047	    fprintf (asm_out_file, "%d", insn_counter);
3048	  }
3049	/* % followed by a letter and some digits
3050	   outputs an operand in a special way depending on the letter.
3051	   Letters `acln' are implemented directly.
3052	   Other letters are passed to `output_operand' so that
3053	   the PRINT_OPERAND macro can define them.  */
3054	else if (ISALPHA (*p))
3055	  {
3056	    int letter = *p++;
3057	    unsigned long opnum;
3058	    char *endptr;
3059
3060	    opnum = strtoul (p, &endptr, 10);
3061
3062	    if (endptr == p)
3063	      output_operand_lossage ("operand number missing "
3064				      "after %%-letter");
3065	    else if (this_is_asm_operands && opnum >= insn_noperands)
3066	      output_operand_lossage ("operand number out of range");
3067	    else if (letter == 'l')
3068	      output_asm_label (operands[opnum]);
3069	    else if (letter == 'a')
3070	      output_address (operands[opnum]);
3071	    else if (letter == 'c')
3072	      {
3073		if (CONSTANT_ADDRESS_P (operands[opnum]))
3074		  output_addr_const (asm_out_file, operands[opnum]);
3075		else
3076		  output_operand (operands[opnum], 'c');
3077	      }
3078	    else if (letter == 'n')
3079	      {
3080		if (GET_CODE (operands[opnum]) == CONST_INT)
3081		  fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3082			   - INTVAL (operands[opnum]));
3083		else
3084		  {
3085		    putc ('-', asm_out_file);
3086		    output_addr_const (asm_out_file, operands[opnum]);
3087		  }
3088	      }
3089	    else
3090	      output_operand (operands[opnum], letter);
3091
3092	    if (!opoutput[opnum])
3093	      oporder[ops++] = opnum;
3094	    opoutput[opnum] = 1;
3095
3096	    p = endptr;
3097	    c = *p;
3098	  }
3099	/* % followed by a digit outputs an operand the default way.  */
3100	else if (ISDIGIT (*p))
3101	  {
3102	    unsigned long opnum;
3103	    char *endptr;
3104
3105	    opnum = strtoul (p, &endptr, 10);
3106	    if (this_is_asm_operands && opnum >= insn_noperands)
3107	      output_operand_lossage ("operand number out of range");
3108	    else
3109	      output_operand (operands[opnum], 0);
3110
3111	    if (!opoutput[opnum])
3112	      oporder[ops++] = opnum;
3113	    opoutput[opnum] = 1;
3114
3115	    p = endptr;
3116	    c = *p;
3117	  }
3118	/* % followed by punctuation: output something for that
3119	   punctuation character alone, with no operand.
3120	   The PRINT_OPERAND macro decides what is actually done.  */
3121#ifdef PRINT_OPERAND_PUNCT_VALID_P
3122	else if (PRINT_OPERAND_PUNCT_VALID_P ((unsigned char) *p))
3123	  output_operand (NULL_RTX, *p++);
3124#endif
3125	else
3126	  output_operand_lossage ("invalid %%-code");
3127	break;
3128
3129      default:
3130	putc (c, asm_out_file);
3131      }
3132
3133  /* Write out the variable names for operands, if we know them.  */
3134  if (flag_verbose_asm)
3135    output_asm_operand_names (operands, oporder, ops);
3136  if (flag_print_asm_name)
3137    output_asm_name ();
3138
3139  putc ('\n', asm_out_file);
3140}
3141
3142/* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol.  */
3143
3144void
3145output_asm_label (rtx x)
3146{
3147  char buf[256];
3148
3149  if (GET_CODE (x) == LABEL_REF)
3150    x = XEXP (x, 0);
3151  if (LABEL_P (x)
3152      || (NOTE_P (x)
3153	  && NOTE_LINE_NUMBER (x) == NOTE_INSN_DELETED_LABEL))
3154    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3155  else
3156    output_operand_lossage ("'%%l' operand isn't a label");
3157
3158  assemble_name (asm_out_file, buf);
3159}
3160
3161/* Print operand X using machine-dependent assembler syntax.
3162   The macro PRINT_OPERAND is defined just to control this function.
3163   CODE is a non-digit that preceded the operand-number in the % spec,
3164   such as 'z' if the spec was `%z3'.  CODE is 0 if there was no char
3165   between the % and the digits.
3166   When CODE is a non-letter, X is 0.
3167
3168   The meanings of the letters are machine-dependent and controlled
3169   by PRINT_OPERAND.  */
3170
3171static void
3172output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3173{
3174  if (x && GET_CODE (x) == SUBREG)
3175    x = alter_subreg (&x);
3176
3177  /* X must not be a pseudo reg.  */
3178  gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3179
3180  PRINT_OPERAND (asm_out_file, x, code);
3181}
3182
3183/* Print a memory reference operand for address X
3184   using machine-dependent assembler syntax.
3185   The macro PRINT_OPERAND_ADDRESS exists just to control this function.  */
3186
3187void
3188output_address (rtx x)
3189{
3190  walk_alter_subreg (&x);
3191  PRINT_OPERAND_ADDRESS (asm_out_file, x);
3192}
3193
3194/* Print an integer constant expression in assembler syntax.
3195   Addition and subtraction are the only arithmetic
3196   that may appear in these expressions.  */
3197
3198void
3199output_addr_const (FILE *file, rtx x)
3200{
3201  char buf[256];
3202
3203 restart:
3204  switch (GET_CODE (x))
3205    {
3206    case PC:
3207      putc ('.', file);
3208      break;
3209
3210    case SYMBOL_REF:
3211      if (SYMBOL_REF_DECL (x))
3212	mark_decl_referenced (SYMBOL_REF_DECL (x));
3213#ifdef ASM_OUTPUT_SYMBOL_REF
3214      ASM_OUTPUT_SYMBOL_REF (file, x);
3215#else
3216      assemble_name (file, XSTR (x, 0));
3217#endif
3218      break;
3219
3220    case LABEL_REF:
3221      x = XEXP (x, 0);
3222      /* Fall through.  */
3223    case CODE_LABEL:
3224      ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3225#ifdef ASM_OUTPUT_LABEL_REF
3226      ASM_OUTPUT_LABEL_REF (file, buf);
3227#else
3228      assemble_name (file, buf);
3229#endif
3230      break;
3231
3232    case CONST_INT:
3233      fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3234      break;
3235
3236    case CONST:
3237      /* This used to output parentheses around the expression,
3238	 but that does not work on the 386 (either ATT or BSD assembler).  */
3239      output_addr_const (file, XEXP (x, 0));
3240      break;
3241
3242    case CONST_DOUBLE:
3243      if (GET_MODE (x) == VOIDmode)
3244	{
3245	  /* We can use %d if the number is one word and positive.  */
3246	  if (CONST_DOUBLE_HIGH (x))
3247	    fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3248		     CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
3249	  else if (CONST_DOUBLE_LOW (x) < 0)
3250	    fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
3251	  else
3252	    fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3253	}
3254      else
3255	/* We can't handle floating point constants;
3256	   PRINT_OPERAND must handle them.  */
3257	output_operand_lossage ("floating constant misused");
3258      break;
3259
3260    case PLUS:
3261      /* Some assemblers need integer constants to appear last (eg masm).  */
3262      if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3263	{
3264	  output_addr_const (file, XEXP (x, 1));
3265	  if (INTVAL (XEXP (x, 0)) >= 0)
3266	    fprintf (file, "+");
3267	  output_addr_const (file, XEXP (x, 0));
3268	}
3269      else
3270	{
3271	  output_addr_const (file, XEXP (x, 0));
3272	  if (GET_CODE (XEXP (x, 1)) != CONST_INT
3273	      || INTVAL (XEXP (x, 1)) >= 0)
3274	    fprintf (file, "+");
3275	  output_addr_const (file, XEXP (x, 1));
3276	}
3277      break;
3278
3279    case MINUS:
3280      /* Avoid outputting things like x-x or x+5-x,
3281	 since some assemblers can't handle that.  */
3282      x = simplify_subtraction (x);
3283      if (GET_CODE (x) != MINUS)
3284	goto restart;
3285
3286      output_addr_const (file, XEXP (x, 0));
3287      fprintf (file, "-");
3288      if ((GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0)
3289	  || GET_CODE (XEXP (x, 1)) == PC
3290	  || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3291	output_addr_const (file, XEXP (x, 1));
3292      else
3293	{
3294	  fputs (targetm.asm_out.open_paren, file);
3295	  output_addr_const (file, XEXP (x, 1));
3296	  fputs (targetm.asm_out.close_paren, file);
3297	}
3298      break;
3299
3300    case ZERO_EXTEND:
3301    case SIGN_EXTEND:
3302    case SUBREG:
3303      output_addr_const (file, XEXP (x, 0));
3304      break;
3305
3306    default:
3307#ifdef OUTPUT_ADDR_CONST_EXTRA
3308      OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
3309      break;
3310
3311    fail:
3312#endif
3313      output_operand_lossage ("invalid expression as operand");
3314    }
3315}
3316
3317/* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3318   %R prints the value of REGISTER_PREFIX.
3319   %L prints the value of LOCAL_LABEL_PREFIX.
3320   %U prints the value of USER_LABEL_PREFIX.
3321   %I prints the value of IMMEDIATE_PREFIX.
3322   %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3323   Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3324
3325   We handle alternate assembler dialects here, just like output_asm_insn.  */
3326
3327void
3328asm_fprintf (FILE *file, const char *p, ...)
3329{
3330  char buf[10];
3331  char *q, c;
3332  va_list argptr;
3333
3334  va_start (argptr, p);
3335
3336  buf[0] = '%';
3337
3338  while ((c = *p++))
3339    switch (c)
3340      {
3341#ifdef ASSEMBLER_DIALECT
3342      case '{':
3343	{
3344	  int i;
3345
3346	  /* If we want the first dialect, do nothing.  Otherwise, skip
3347	     DIALECT_NUMBER of strings ending with '|'.  */
3348	  for (i = 0; i < dialect_number; i++)
3349	    {
3350	      while (*p && *p++ != '|')
3351		;
3352
3353	      if (*p == '|')
3354		p++;
3355	    }
3356	}
3357	break;
3358
3359      case '|':
3360	/* Skip to close brace.  */
3361	while (*p && *p++ != '}')
3362	  ;
3363	break;
3364
3365      case '}':
3366	break;
3367#endif
3368
3369      case '%':
3370	c = *p++;
3371	q = &buf[1];
3372	while (strchr ("-+ #0", c))
3373	  {
3374	    *q++ = c;
3375	    c = *p++;
3376	  }
3377	while (ISDIGIT (c) || c == '.')
3378	  {
3379	    *q++ = c;
3380	    c = *p++;
3381	  }
3382	switch (c)
3383	  {
3384	  case '%':
3385	    putc ('%', file);
3386	    break;
3387
3388	  case 'd':  case 'i':  case 'u':
3389	  case 'x':  case 'X':  case 'o':
3390	  case 'c':
3391	    *q++ = c;
3392	    *q = 0;
3393	    fprintf (file, buf, va_arg (argptr, int));
3394	    break;
3395
3396	  case 'w':
3397	    /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3398	       'o' cases, but we do not check for those cases.  It
3399	       means that the value is a HOST_WIDE_INT, which may be
3400	       either `long' or `long long'.  */
3401	    memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3402	    q += strlen (HOST_WIDE_INT_PRINT);
3403	    *q++ = *p++;
3404	    *q = 0;
3405	    fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3406	    break;
3407
3408	  case 'l':
3409	    *q++ = c;
3410#ifdef HAVE_LONG_LONG
3411	    if (*p == 'l')
3412	      {
3413		*q++ = *p++;
3414		*q++ = *p++;
3415		*q = 0;
3416		fprintf (file, buf, va_arg (argptr, long long));
3417	      }
3418	    else
3419#endif
3420	      {
3421		*q++ = *p++;
3422		*q = 0;
3423		fprintf (file, buf, va_arg (argptr, long));
3424	      }
3425
3426	    break;
3427
3428	  case 's':
3429	    *q++ = c;
3430	    *q = 0;
3431	    fprintf (file, buf, va_arg (argptr, char *));
3432	    break;
3433
3434	  case 'O':
3435#ifdef ASM_OUTPUT_OPCODE
3436	    ASM_OUTPUT_OPCODE (asm_out_file, p);
3437#endif
3438	    break;
3439
3440	  case 'R':
3441#ifdef REGISTER_PREFIX
3442	    fprintf (file, "%s", REGISTER_PREFIX);
3443#endif
3444	    break;
3445
3446	  case 'I':
3447#ifdef IMMEDIATE_PREFIX
3448	    fprintf (file, "%s", IMMEDIATE_PREFIX);
3449#endif
3450	    break;
3451
3452	  case 'L':
3453#ifdef LOCAL_LABEL_PREFIX
3454	    fprintf (file, "%s", LOCAL_LABEL_PREFIX);
3455#endif
3456	    break;
3457
3458	  case 'U':
3459	    fputs (user_label_prefix, file);
3460	    break;
3461
3462#ifdef ASM_FPRINTF_EXTENSIONS
3463	    /* Uppercase letters are reserved for general use by asm_fprintf
3464	       and so are not available to target specific code.  In order to
3465	       prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
3466	       they are defined here.  As they get turned into real extensions
3467	       to asm_fprintf they should be removed from this list.  */
3468	  case 'A': case 'B': case 'C': case 'D': case 'E':
3469	  case 'F': case 'G': case 'H': case 'J': case 'K':
3470	  case 'M': case 'N': case 'P': case 'Q': case 'S':
3471	  case 'T': case 'V': case 'W': case 'Y': case 'Z':
3472	    break;
3473
3474	  ASM_FPRINTF_EXTENSIONS (file, argptr, p)
3475#endif
3476	  default:
3477	    gcc_unreachable ();
3478	  }
3479	break;
3480
3481      default:
3482	putc (c, file);
3483      }
3484  va_end (argptr);
3485}
3486
3487/* Split up a CONST_DOUBLE or integer constant rtx
3488   into two rtx's for single words,
3489   storing in *FIRST the word that comes first in memory in the target
3490   and in *SECOND the other.  */
3491
3492void
3493split_double (rtx value, rtx *first, rtx *second)
3494{
3495  if (GET_CODE (value) == CONST_INT)
3496    {
3497      if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
3498	{
3499	  /* In this case the CONST_INT holds both target words.
3500	     Extract the bits from it into two word-sized pieces.
3501	     Sign extend each half to HOST_WIDE_INT.  */
3502	  unsigned HOST_WIDE_INT low, high;
3503	  unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
3504
3505	  /* Set sign_bit to the most significant bit of a word.  */
3506	  sign_bit = 1;
3507	  sign_bit <<= BITS_PER_WORD - 1;
3508
3509	  /* Set mask so that all bits of the word are set.  We could
3510	     have used 1 << BITS_PER_WORD instead of basing the
3511	     calculation on sign_bit.  However, on machines where
3512	     HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
3513	     compiler warning, even though the code would never be
3514	     executed.  */
3515	  mask = sign_bit << 1;
3516	  mask--;
3517
3518	  /* Set sign_extend as any remaining bits.  */
3519	  sign_extend = ~mask;
3520
3521	  /* Pick the lower word and sign-extend it.  */
3522	  low = INTVAL (value);
3523	  low &= mask;
3524	  if (low & sign_bit)
3525	    low |= sign_extend;
3526
3527	  /* Pick the higher word, shifted to the least significant
3528	     bits, and sign-extend it.  */
3529	  high = INTVAL (value);
3530	  high >>= BITS_PER_WORD - 1;
3531	  high >>= 1;
3532	  high &= mask;
3533	  if (high & sign_bit)
3534	    high |= sign_extend;
3535
3536	  /* Store the words in the target machine order.  */
3537	  if (WORDS_BIG_ENDIAN)
3538	    {
3539	      *first = GEN_INT (high);
3540	      *second = GEN_INT (low);
3541	    }
3542	  else
3543	    {
3544	      *first = GEN_INT (low);
3545	      *second = GEN_INT (high);
3546	    }
3547	}
3548      else
3549	{
3550	  /* The rule for using CONST_INT for a wider mode
3551	     is that we regard the value as signed.
3552	     So sign-extend it.  */
3553	  rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
3554	  if (WORDS_BIG_ENDIAN)
3555	    {
3556	      *first = high;
3557	      *second = value;
3558	    }
3559	  else
3560	    {
3561	      *first = value;
3562	      *second = high;
3563	    }
3564	}
3565    }
3566  else if (GET_CODE (value) != CONST_DOUBLE)
3567    {
3568      if (WORDS_BIG_ENDIAN)
3569	{
3570	  *first = const0_rtx;
3571	  *second = value;
3572	}
3573      else
3574	{
3575	  *first = value;
3576	  *second = const0_rtx;
3577	}
3578    }
3579  else if (GET_MODE (value) == VOIDmode
3580	   /* This is the old way we did CONST_DOUBLE integers.  */
3581	   || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
3582    {
3583      /* In an integer, the words are defined as most and least significant.
3584	 So order them by the target's convention.  */
3585      if (WORDS_BIG_ENDIAN)
3586	{
3587	  *first = GEN_INT (CONST_DOUBLE_HIGH (value));
3588	  *second = GEN_INT (CONST_DOUBLE_LOW (value));
3589	}
3590      else
3591	{
3592	  *first = GEN_INT (CONST_DOUBLE_LOW (value));
3593	  *second = GEN_INT (CONST_DOUBLE_HIGH (value));
3594	}
3595    }
3596  else
3597    {
3598      REAL_VALUE_TYPE r;
3599      long l[2];
3600      REAL_VALUE_FROM_CONST_DOUBLE (r, value);
3601
3602      /* Note, this converts the REAL_VALUE_TYPE to the target's
3603	 format, splits up the floating point double and outputs
3604	 exactly 32 bits of it into each of l[0] and l[1] --
3605	 not necessarily BITS_PER_WORD bits.  */
3606      REAL_VALUE_TO_TARGET_DOUBLE (r, l);
3607
3608      /* If 32 bits is an entire word for the target, but not for the host,
3609	 then sign-extend on the host so that the number will look the same
3610	 way on the host that it would on the target.  See for instance
3611	 simplify_unary_operation.  The #if is needed to avoid compiler
3612	 warnings.  */
3613
3614#if HOST_BITS_PER_LONG > 32
3615      if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
3616	{
3617	  if (l[0] & ((long) 1 << 31))
3618	    l[0] |= ((long) (-1) << 32);
3619	  if (l[1] & ((long) 1 << 31))
3620	    l[1] |= ((long) (-1) << 32);
3621	}
3622#endif
3623
3624      *first = GEN_INT (l[0]);
3625      *second = GEN_INT (l[1]);
3626    }
3627}
3628
3629/* Return nonzero if this function has no function calls.  */
3630
3631int
3632leaf_function_p (void)
3633{
3634  rtx insn;
3635  rtx link;
3636
3637  if (current_function_profile || profile_arc_flag)
3638    return 0;
3639
3640  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3641    {
3642      if (CALL_P (insn)
3643	  && ! SIBLING_CALL_P (insn))
3644	return 0;
3645      if (NONJUMP_INSN_P (insn)
3646	  && GET_CODE (PATTERN (insn)) == SEQUENCE
3647	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3648	  && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3649	return 0;
3650    }
3651  for (link = current_function_epilogue_delay_list;
3652       link;
3653       link = XEXP (link, 1))
3654    {
3655      insn = XEXP (link, 0);
3656
3657      if (CALL_P (insn)
3658	  && ! SIBLING_CALL_P (insn))
3659	return 0;
3660      if (NONJUMP_INSN_P (insn)
3661	  && GET_CODE (PATTERN (insn)) == SEQUENCE
3662	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3663	  && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3664	return 0;
3665    }
3666
3667  return 1;
3668}
3669
3670/* Return 1 if branch is a forward branch.
3671   Uses insn_shuid array, so it works only in the final pass.  May be used by
3672   output templates to customary add branch prediction hints.
3673 */
3674int
3675final_forward_branch_p (rtx insn)
3676{
3677  int insn_id, label_id;
3678
3679  gcc_assert (uid_shuid);
3680  insn_id = INSN_SHUID (insn);
3681  label_id = INSN_SHUID (JUMP_LABEL (insn));
3682  /* We've hit some insns that does not have id information available.  */
3683  gcc_assert (insn_id && label_id);
3684  return insn_id < label_id;
3685}
3686
3687/* On some machines, a function with no call insns
3688   can run faster if it doesn't create its own register window.
3689   When output, the leaf function should use only the "output"
3690   registers.  Ordinarily, the function would be compiled to use
3691   the "input" registers to find its arguments; it is a candidate
3692   for leaf treatment if it uses only the "input" registers.
3693   Leaf function treatment means renumbering so the function
3694   uses the "output" registers instead.  */
3695
3696#ifdef LEAF_REGISTERS
3697
3698/* Return 1 if this function uses only the registers that can be
3699   safely renumbered.  */
3700
3701int
3702only_leaf_regs_used (void)
3703{
3704  int i;
3705  const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
3706
3707  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3708    if ((regs_ever_live[i] || global_regs[i])
3709	&& ! permitted_reg_in_leaf_functions[i])
3710      return 0;
3711
3712  if (current_function_uses_pic_offset_table
3713      && pic_offset_table_rtx != 0
3714      && REG_P (pic_offset_table_rtx)
3715      && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
3716    return 0;
3717
3718  return 1;
3719}
3720
3721/* Scan all instructions and renumber all registers into those
3722   available in leaf functions.  */
3723
3724static void
3725leaf_renumber_regs (rtx first)
3726{
3727  rtx insn;
3728
3729  /* Renumber only the actual patterns.
3730     The reg-notes can contain frame pointer refs,
3731     and renumbering them could crash, and should not be needed.  */
3732  for (insn = first; insn; insn = NEXT_INSN (insn))
3733    if (INSN_P (insn))
3734      leaf_renumber_regs_insn (PATTERN (insn));
3735  for (insn = current_function_epilogue_delay_list;
3736       insn;
3737       insn = XEXP (insn, 1))
3738    if (INSN_P (XEXP (insn, 0)))
3739      leaf_renumber_regs_insn (PATTERN (XEXP (insn, 0)));
3740}
3741
3742/* Scan IN_RTX and its subexpressions, and renumber all regs into those
3743   available in leaf functions.  */
3744
3745void
3746leaf_renumber_regs_insn (rtx in_rtx)
3747{
3748  int i, j;
3749  const char *format_ptr;
3750
3751  if (in_rtx == 0)
3752    return;
3753
3754  /* Renumber all input-registers into output-registers.
3755     renumbered_regs would be 1 for an output-register;
3756     they  */
3757
3758  if (REG_P (in_rtx))
3759    {
3760      int newreg;
3761
3762      /* Don't renumber the same reg twice.  */
3763      if (in_rtx->used)
3764	return;
3765
3766      newreg = REGNO (in_rtx);
3767      /* Don't try to renumber pseudo regs.  It is possible for a pseudo reg
3768	 to reach here as part of a REG_NOTE.  */
3769      if (newreg >= FIRST_PSEUDO_REGISTER)
3770	{
3771	  in_rtx->used = 1;
3772	  return;
3773	}
3774      newreg = LEAF_REG_REMAP (newreg);
3775      gcc_assert (newreg >= 0);
3776      regs_ever_live[REGNO (in_rtx)] = 0;
3777      regs_ever_live[newreg] = 1;
3778      REGNO (in_rtx) = newreg;
3779      in_rtx->used = 1;
3780    }
3781
3782  if (INSN_P (in_rtx))
3783    {
3784      /* Inside a SEQUENCE, we find insns.
3785	 Renumber just the patterns of these insns,
3786	 just as we do for the top-level insns.  */
3787      leaf_renumber_regs_insn (PATTERN (in_rtx));
3788      return;
3789    }
3790
3791  format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
3792
3793  for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
3794    switch (*format_ptr++)
3795      {
3796      case 'e':
3797	leaf_renumber_regs_insn (XEXP (in_rtx, i));
3798	break;
3799
3800      case 'E':
3801	if (NULL != XVEC (in_rtx, i))
3802	  {
3803	    for (j = 0; j < XVECLEN (in_rtx, i); j++)
3804	      leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
3805	  }
3806	break;
3807
3808      case 'S':
3809      case 's':
3810      case '0':
3811      case 'i':
3812      case 'w':
3813      case 'n':
3814      case 'u':
3815	break;
3816
3817      default:
3818	gcc_unreachable ();
3819      }
3820}
3821#endif
3822
3823
3824/* When -gused is used, emit debug info for only used symbols. But in
3825   addition to the standard intercepted debug_hooks there are some direct
3826   calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
3827   Those routines may also be called from a higher level intercepted routine. So
3828   to prevent recording data for an inner call to one of these for an intercept,
3829   we maintain an intercept nesting counter (debug_nesting). We only save the
3830   intercepted arguments if the nesting is 1.  */
3831int debug_nesting = 0;
3832
3833static tree *symbol_queue;
3834int symbol_queue_index = 0;
3835static int symbol_queue_size = 0;
3836
3837/* Generate the symbols for any queued up type symbols we encountered
3838   while generating the type info for some originally used symbol.
3839   This might generate additional entries in the queue.  Only when
3840   the nesting depth goes to 0 is this routine called.  */
3841
3842void
3843debug_flush_symbol_queue (void)
3844{
3845  int i;
3846
3847  /* Make sure that additionally queued items are not flushed
3848     prematurely.  */
3849
3850  ++debug_nesting;
3851
3852  for (i = 0; i < symbol_queue_index; ++i)
3853    {
3854      /* If we pushed queued symbols then such symbols must be
3855         output no matter what anyone else says.  Specifically,
3856         we need to make sure dbxout_symbol() thinks the symbol was
3857         used and also we need to override TYPE_DECL_SUPPRESS_DEBUG
3858         which may be set for outside reasons.  */
3859      int saved_tree_used = TREE_USED (symbol_queue[i]);
3860      int saved_suppress_debug = TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]);
3861      TREE_USED (symbol_queue[i]) = 1;
3862      TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = 0;
3863
3864#ifdef DBX_DEBUGGING_INFO
3865      dbxout_symbol (symbol_queue[i], 0);
3866#endif
3867
3868      TREE_USED (symbol_queue[i]) = saved_tree_used;
3869      TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = saved_suppress_debug;
3870    }
3871
3872  symbol_queue_index = 0;
3873  --debug_nesting;
3874}
3875
3876/* Queue a type symbol needed as part of the definition of a decl
3877   symbol.  These symbols are generated when debug_flush_symbol_queue()
3878   is called.  */
3879
3880void
3881debug_queue_symbol (tree decl)
3882{
3883  if (symbol_queue_index >= symbol_queue_size)
3884    {
3885      symbol_queue_size += 10;
3886      symbol_queue = xrealloc (symbol_queue,
3887			       symbol_queue_size * sizeof (tree));
3888    }
3889
3890  symbol_queue[symbol_queue_index++] = decl;
3891}
3892
3893/* Free symbol queue.  */
3894void
3895debug_free_queue (void)
3896{
3897  if (symbol_queue)
3898    {
3899      free (symbol_queue);
3900      symbol_queue = NULL;
3901      symbol_queue_size = 0;
3902    }
3903}
3904
3905/* Turn the RTL into assembly.  */
3906static unsigned int
3907rest_of_handle_final (void)
3908{
3909  rtx x;
3910  const char *fnname;
3911
3912  /* Get the function's name, as described by its RTL.  This may be
3913     different from the DECL_NAME name used in the source file.  */
3914
3915  x = DECL_RTL (current_function_decl);
3916  gcc_assert (MEM_P (x));
3917  x = XEXP (x, 0);
3918  gcc_assert (GET_CODE (x) == SYMBOL_REF);
3919  fnname = XSTR (x, 0);
3920
3921  assemble_start_function (current_function_decl, fnname);
3922  final_start_function (get_insns (), asm_out_file, optimize);
3923  final (get_insns (), asm_out_file, optimize);
3924  final_end_function ();
3925
3926#ifdef TARGET_UNWIND_INFO
3927  /* ??? The IA-64 ".handlerdata" directive must be issued before
3928     the ".endp" directive that closes the procedure descriptor.  */
3929  output_function_exception_table ();
3930#endif
3931
3932  assemble_end_function (current_function_decl, fnname);
3933
3934#ifndef TARGET_UNWIND_INFO
3935  /* Otherwise, it feels unclean to switch sections in the middle.  */
3936  output_function_exception_table ();
3937#endif
3938
3939  user_defined_section_attribute = false;
3940
3941  if (! quiet_flag)
3942    fflush (asm_out_file);
3943
3944  /* Release all memory allocated by flow.  */
3945  free_basic_block_vars ();
3946
3947  /* Write DBX symbols if requested.  */
3948
3949  /* Note that for those inline functions where we don't initially
3950     know for certain that we will be generating an out-of-line copy,
3951     the first invocation of this routine (rest_of_compilation) will
3952     skip over this code by doing a `goto exit_rest_of_compilation;'.
3953     Later on, wrapup_global_declarations will (indirectly) call
3954     rest_of_compilation again for those inline functions that need
3955     to have out-of-line copies generated.  During that call, we
3956     *will* be routed past here.  */
3957
3958  timevar_push (TV_SYMOUT);
3959  (*debug_hooks->function_decl) (current_function_decl);
3960  timevar_pop (TV_SYMOUT);
3961  return 0;
3962}
3963
3964struct tree_opt_pass pass_final =
3965{
3966  NULL,                                 /* name */
3967  NULL,                                 /* gate */
3968  rest_of_handle_final,                 /* execute */
3969  NULL,                                 /* sub */
3970  NULL,                                 /* next */
3971  0,                                    /* static_pass_number */
3972  TV_FINAL,                             /* tv_id */
3973  0,                                    /* properties_required */
3974  0,                                    /* properties_provided */
3975  0,                                    /* properties_destroyed */
3976  0,                                    /* todo_flags_start */
3977  TODO_ggc_collect,                     /* todo_flags_finish */
3978  0                                     /* letter */
3979};
3980
3981
3982static unsigned int
3983rest_of_handle_shorten_branches (void)
3984{
3985  /* Shorten branches.  */
3986  shorten_branches (get_insns ());
3987  return 0;
3988}
3989
3990struct tree_opt_pass pass_shorten_branches =
3991{
3992  "shorten",                            /* name */
3993  NULL,                                 /* gate */
3994  rest_of_handle_shorten_branches,      /* execute */
3995  NULL,                                 /* sub */
3996  NULL,                                 /* next */
3997  0,                                    /* static_pass_number */
3998  TV_FINAL,                             /* tv_id */
3999  0,                                    /* properties_required */
4000  0,                                    /* properties_provided */
4001  0,                                    /* properties_destroyed */
4002  0,                                    /* todo_flags_start */
4003  TODO_dump_func,                       /* todo_flags_finish */
4004  0                                     /* letter */
4005};
4006
4007
4008static unsigned int
4009rest_of_clean_state (void)
4010{
4011  rtx insn, next;
4012
4013  /* It is very important to decompose the RTL instruction chain here:
4014     debug information keeps pointing into CODE_LABEL insns inside the function
4015     body.  If these remain pointing to the other insns, we end up preserving
4016     whole RTL chain and attached detailed debug info in memory.  */
4017  for (insn = get_insns (); insn; insn = next)
4018    {
4019      next = NEXT_INSN (insn);
4020      NEXT_INSN (insn) = NULL;
4021      PREV_INSN (insn) = NULL;
4022    }
4023
4024  /* In case the function was not output,
4025     don't leave any temporary anonymous types
4026     queued up for sdb output.  */
4027#ifdef SDB_DEBUGGING_INFO
4028  if (write_symbols == SDB_DEBUG)
4029    sdbout_types (NULL_TREE);
4030#endif
4031
4032  reload_completed = 0;
4033  epilogue_completed = 0;
4034  flow2_completed = 0;
4035  no_new_pseudos = 0;
4036#ifdef STACK_REGS
4037  regstack_completed = 0;
4038#endif
4039
4040  /* Clear out the insn_length contents now that they are no
4041     longer valid.  */
4042  init_insn_lengths ();
4043
4044  /* Show no temporary slots allocated.  */
4045  init_temp_slots ();
4046
4047  free_basic_block_vars ();
4048  free_bb_for_insn ();
4049
4050
4051  if (targetm.binds_local_p (current_function_decl))
4052    {
4053      int pref = cfun->preferred_stack_boundary;
4054      if (cfun->stack_alignment_needed > cfun->preferred_stack_boundary)
4055        pref = cfun->stack_alignment_needed;
4056      cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
4057        = pref;
4058    }
4059
4060  /* Make sure volatile mem refs aren't considered valid operands for
4061     arithmetic insns.  We must call this here if this is a nested inline
4062     function, since the above code leaves us in the init_recog state,
4063     and the function context push/pop code does not save/restore volatile_ok.
4064
4065     ??? Maybe it isn't necessary for expand_start_function to call this
4066     anymore if we do it here?  */
4067
4068  init_recog_no_volatile ();
4069
4070  /* We're done with this function.  Free up memory if we can.  */
4071  free_after_parsing (cfun);
4072  free_after_compilation (cfun);
4073  return 0;
4074}
4075
4076struct tree_opt_pass pass_clean_state =
4077{
4078  NULL,                                 /* name */
4079  NULL,                                 /* gate */
4080  rest_of_clean_state,                  /* execute */
4081  NULL,                                 /* sub */
4082  NULL,                                 /* next */
4083  0,                                    /* static_pass_number */
4084  TV_FINAL,                             /* tv_id */
4085  0,                                    /* properties_required */
4086  0,                                    /* properties_provided */
4087  PROP_rtl,                             /* properties_destroyed */
4088  0,                                    /* todo_flags_start */
4089  0,                                    /* todo_flags_finish */
4090  0                                     /* letter */
4091};
4092