final.c revision 259406
11590Srgrimes/* Convert RTL to assembler code and output it, for GNU compiler.
21590Srgrimes   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
31590Srgrimes   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
41590Srgrimes   Free Software Foundation, Inc.
51590Srgrimes
61590SrgrimesThis file is part of GCC.
71590Srgrimes
81590SrgrimesGCC is free software; you can redistribute it and/or modify it under
91590Srgrimesthe terms of the GNU General Public License as published by the Free
101590SrgrimesSoftware Foundation; either version 2, or (at your option) any later
111590Srgrimesversion.
121590Srgrimes
131590SrgrimesGCC is distributed in the hope that it will be useful, but WITHOUT ANY
141590SrgrimesWARRANTY; without even the implied warranty of MERCHANTABILITY or
151590SrgrimesFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
161590Srgrimesfor more details.
171590Srgrimes
181590SrgrimesYou should have received a copy of the GNU General Public License
191590Srgrimesalong with GCC; see the file COPYING.  If not, write to the Free
201590SrgrimesSoftware Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
211590Srgrimes02110-1301, USA.  */
221590Srgrimes
231590Srgrimes/* This is the final pass of the compiler.
241590Srgrimes   It looks at the rtl code for a function and outputs assembler code.
251590Srgrimes
261590Srgrimes   Call `final_start_function' to output the assembler code for function entry,
271590Srgrimes   `final' to output assembler code for some RTL code,
281590Srgrimes   `final_end_function' to output assembler code for function exit.
291590Srgrimes   If a function is compiled in several pieces, each piece is
3087701Smarkm   output separately with `final'.
3187701Smarkm
3287701Smarkm   Some optimizations are also done at this level.
3387701Smarkm   Move instructions that were made unnecessary by good register allocation
341590Srgrimes   are detected and omitted from the output.  (Though most of these
3587701Smarkm   are removed by the last jump pass.)
3628370Scharnier
371590Srgrimes   Instructions to set the condition codes are omitted when it can be
381590Srgrimes   seen that the condition codes already had the desired values.
3987701Smarkm
4028370Scharnier   In some cases it is sufficient if the inherited condition codes
4128370Scharnier   have related values, but this may require the following insn
421590Srgrimes   (the one that tests the condition codes) to be modified.
431590Srgrimes
4487701Smarkm   The code for the function prologue and epilogue are generated
451590Srgrimes   directly in assembler by the target functions function_prologue and
461590Srgrimes   function_epilogue.  Those instructions never exist as rtl.  */
471590Srgrimes
481590Srgrimes#include "config.h"
491590Srgrimes#include "system.h"
501590Srgrimes#include "coretypes.h"
511590Srgrimes#include "tm.h"
52200419Sdelphij
531590Srgrimes#include "tree.h"
541590Srgrimes#include "rtl.h"
551590Srgrimes#include "tm_p.h"
561590Srgrimes#include "regs.h"
571590Srgrimes#include "insn-config.h"
581590Srgrimes#include "insn-attr.h"
59229403Sed#include "recog.h"
6028370Scharnier#include "conditions.h"
611590Srgrimes#include "flags.h"
621590Srgrimes#include "real.h"
631590Srgrimes#include "hard-reg-set.h"
6487701Smarkm#include "output.h"
651590Srgrimes#include "except.h"
661590Srgrimes#include "function.h"
671590Srgrimes#include "toplev.h"
6887701Smarkm#include "reload.h"
691590Srgrimes#include "intl.h"
701590Srgrimes#include "basic-block.h"
711590Srgrimes#include "target.h"
721590Srgrimes#include "debug.h"
731590Srgrimes#include "expr.h"
741590Srgrimes#include "cfglayout.h"
751590Srgrimes#include "tree-pass.h"
761590Srgrimes#include "timevar.h"
771590Srgrimes#include "cgraph.h"
781590Srgrimes#include "coverage.h"
791590Srgrimes
801590Srgrimes#ifdef XCOFF_DEBUGGING_INFO
811590Srgrimes#include "xcoffout.h"		/* Needed for external data
821590Srgrimes				   declarations for e.g. AIX 4.x.  */
831590Srgrimes#endif
841590Srgrimes
851590Srgrimes#if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
861590Srgrimes#include "dwarf2out.h"
871590Srgrimes#endif
881590Srgrimes
891590Srgrimes#ifdef DBX_DEBUGGING_INFO
901590Srgrimes#include "dbxout.h"
911590Srgrimes#endif
921590Srgrimes
931590Srgrimes#ifdef SDB_DEBUGGING_INFO
941590Srgrimes#include "sdbout.h"
951590Srgrimes#endif
961590Srgrimes
971590Srgrimes/* If we aren't using cc0, CC_STATUS_INIT shouldn't exist.  So define a
981590Srgrimes   null default for it to save conditionalization later.  */
991590Srgrimes#ifndef CC_STATUS_INIT
1001590Srgrimes#define CC_STATUS_INIT
1011590Srgrimes#endif
1021590Srgrimes
1031590Srgrimes/* How to start an assembler comment.  */
1041590Srgrimes#ifndef ASM_COMMENT_START
1051590Srgrimes#define ASM_COMMENT_START ";#"
1061590Srgrimes#endif
1071590Srgrimes
1081590Srgrimes/* Is the given character a logical line separator for the assembler?  */
1091590Srgrimes#ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
1101590Srgrimes#define IS_ASM_LOGICAL_LINE_SEPARATOR(C) ((C) == ';')
1111590Srgrimes#endif
1121590Srgrimes
1131590Srgrimes#ifndef JUMP_TABLES_IN_TEXT_SECTION
1141590Srgrimes#define JUMP_TABLES_IN_TEXT_SECTION 0
115#endif
116
117/* Bitflags used by final_scan_insn.  */
118#define SEEN_BB		1
119#define SEEN_NOTE	2
120#define SEEN_EMITTED	4
121
122/* Last insn processed by final_scan_insn.  */
123static rtx debug_insn;
124rtx current_output_insn;
125
126/* Line number of last NOTE.  */
127static int last_linenum;
128
129/* Highest line number in current block.  */
130static int high_block_linenum;
131
132/* Likewise for function.  */
133static int high_function_linenum;
134
135/* Filename of last NOTE.  */
136static const char *last_filename;
137
138/* Whether to force emission of a line note before the next insn.  */
139static bool force_source_line = false;
140
141extern const int length_unit_log; /* This is defined in insn-attrtab.c.  */
142
143/* Nonzero while outputting an `asm' with operands.
144   This means that inconsistencies are the user's fault, so don't die.
145   The precise value is the insn being output, to pass to error_for_asm.  */
146rtx this_is_asm_operands;
147
148/* Number of operands of this insn, for an `asm' with operands.  */
149static unsigned int insn_noperands;
150
151/* Compare optimization flag.  */
152
153static rtx last_ignored_compare = 0;
154
155/* Assign a unique number to each insn that is output.
156   This can be used to generate unique local labels.  */
157
158static int insn_counter = 0;
159
160#ifdef HAVE_cc0
161/* This variable contains machine-dependent flags (defined in tm.h)
162   set and examined by output routines
163   that describe how to interpret the condition codes properly.  */
164
165CC_STATUS cc_status;
166
167/* During output of an insn, this contains a copy of cc_status
168   from before the insn.  */
169
170CC_STATUS cc_prev_status;
171#endif
172
173/* Indexed by hardware reg number, is 1 if that register is ever
174   used in the current function.
175
176   In life_analysis, or in stupid_life_analysis, this is set
177   up to record the hard regs used explicitly.  Reload adds
178   in the hard regs used for holding pseudo regs.  Final uses
179   it to generate the code in the function prologue and epilogue
180   to save and restore registers as needed.  */
181
182char regs_ever_live[FIRST_PSEUDO_REGISTER];
183
184/* Like regs_ever_live, but 1 if a reg is set or clobbered from an asm.
185   Unlike regs_ever_live, elements of this array corresponding to
186   eliminable regs like the frame pointer are set if an asm sets them.  */
187
188char regs_asm_clobbered[FIRST_PSEUDO_REGISTER];
189
190/* Nonzero means current function must be given a frame pointer.
191   Initialized in function.c to 0.  Set only in reload1.c as per
192   the needs of the function.  */
193
194int frame_pointer_needed;
195
196/* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen.  */
197
198static int block_depth;
199
200/* Nonzero if have enabled APP processing of our assembler output.  */
201
202static int app_on;
203
204/* If we are outputting an insn sequence, this contains the sequence rtx.
205   Zero otherwise.  */
206
207rtx final_sequence;
208
209#ifdef ASSEMBLER_DIALECT
210
211/* Number of the assembler dialect to use, starting at 0.  */
212static int dialect_number;
213#endif
214
215#ifdef HAVE_conditional_execution
216/* Nonnull if the insn currently being emitted was a COND_EXEC pattern.  */
217rtx current_insn_predicate;
218#endif
219
220#ifdef HAVE_ATTR_length
221static int asm_insn_count (rtx);
222#endif
223static void profile_function (FILE *);
224static void profile_after_prologue (FILE *);
225static bool notice_source_line (rtx);
226static rtx walk_alter_subreg (rtx *);
227static void output_asm_name (void);
228static void output_alternate_entry_point (FILE *, rtx);
229static tree get_mem_expr_from_op (rtx, int *);
230static void output_asm_operand_names (rtx *, int *, int);
231static void output_operand (rtx, int);
232#ifdef LEAF_REGISTERS
233static void leaf_renumber_regs (rtx);
234#endif
235#ifdef HAVE_cc0
236static int alter_cond (rtx);
237#endif
238#ifndef ADDR_VEC_ALIGN
239static int final_addr_vec_align (rtx);
240#endif
241#ifdef HAVE_ATTR_length
242static int align_fuzz (rtx, rtx, int, unsigned);
243#endif
244
245/* Initialize data in final at the beginning of a compilation.  */
246
247void
248init_final (const char *filename ATTRIBUTE_UNUSED)
249{
250  app_on = 0;
251  final_sequence = 0;
252
253#ifdef ASSEMBLER_DIALECT
254  dialect_number = ASSEMBLER_DIALECT;
255#endif
256}
257
258/* Default target function prologue and epilogue assembler output.
259
260   If not overridden for epilogue code, then the function body itself
261   contains return instructions wherever needed.  */
262void
263default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
264			       HOST_WIDE_INT size ATTRIBUTE_UNUSED)
265{
266}
267
268/* Default target hook that outputs nothing to a stream.  */
269void
270no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
271{
272}
273
274/* Enable APP processing of subsequent output.
275   Used before the output from an `asm' statement.  */
276
277void
278app_enable (void)
279{
280  if (! app_on)
281    {
282      fputs (ASM_APP_ON, asm_out_file);
283      app_on = 1;
284    }
285}
286
287/* Disable APP processing of subsequent output.
288   Called from varasm.c before most kinds of output.  */
289
290void
291app_disable (void)
292{
293  if (app_on)
294    {
295      fputs (ASM_APP_OFF, asm_out_file);
296      app_on = 0;
297    }
298}
299
300/* Return the number of slots filled in the current
301   delayed branch sequence (we don't count the insn needing the
302   delay slot).   Zero if not in a delayed branch sequence.  */
303
304#ifdef DELAY_SLOTS
305int
306dbr_sequence_length (void)
307{
308  if (final_sequence != 0)
309    return XVECLEN (final_sequence, 0) - 1;
310  else
311    return 0;
312}
313#endif
314
315/* The next two pages contain routines used to compute the length of an insn
316   and to shorten branches.  */
317
318/* Arrays for insn lengths, and addresses.  The latter is referenced by
319   `insn_current_length'.  */
320
321static int *insn_lengths;
322
323varray_type insn_addresses_;
324
325/* Max uid for which the above arrays are valid.  */
326static int insn_lengths_max_uid;
327
328/* Address of insn being processed.  Used by `insn_current_length'.  */
329int insn_current_address;
330
331/* Address of insn being processed in previous iteration.  */
332int insn_last_address;
333
334/* known invariant alignment of insn being processed.  */
335int insn_current_align;
336
337/* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
338   gives the next following alignment insn that increases the known
339   alignment, or NULL_RTX if there is no such insn.
340   For any alignment obtained this way, we can again index uid_align with
341   its uid to obtain the next following align that in turn increases the
342   alignment, till we reach NULL_RTX; the sequence obtained this way
343   for each insn we'll call the alignment chain of this insn in the following
344   comments.  */
345
346struct label_alignment
347{
348  short alignment;
349  short max_skip;
350};
351
352static rtx *uid_align;
353static int *uid_shuid;
354static struct label_alignment *label_align;
355
356/* Indicate that branch shortening hasn't yet been done.  */
357
358void
359init_insn_lengths (void)
360{
361  if (uid_shuid)
362    {
363      free (uid_shuid);
364      uid_shuid = 0;
365    }
366  if (insn_lengths)
367    {
368      free (insn_lengths);
369      insn_lengths = 0;
370      insn_lengths_max_uid = 0;
371    }
372#ifdef HAVE_ATTR_length
373  INSN_ADDRESSES_FREE ();
374#endif
375  if (uid_align)
376    {
377      free (uid_align);
378      uid_align = 0;
379    }
380}
381
382/* Obtain the current length of an insn.  If branch shortening has been done,
383   get its actual length.  Otherwise, use FALLBACK_FN to calculate the
384   length.  */
385static inline int
386get_attr_length_1 (rtx insn ATTRIBUTE_UNUSED,
387		   int (*fallback_fn) (rtx) ATTRIBUTE_UNUSED)
388{
389#ifdef HAVE_ATTR_length
390  rtx body;
391  int i;
392  int length = 0;
393
394  if (insn_lengths_max_uid > INSN_UID (insn))
395    return insn_lengths[INSN_UID (insn)];
396  else
397    switch (GET_CODE (insn))
398      {
399      case NOTE:
400      case BARRIER:
401      case CODE_LABEL:
402	return 0;
403
404      case CALL_INSN:
405	length = fallback_fn (insn);
406	break;
407
408      case JUMP_INSN:
409	body = PATTERN (insn);
410	if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
411	  {
412	    /* Alignment is machine-dependent and should be handled by
413	       ADDR_VEC_ALIGN.  */
414	  }
415	else
416	  length = fallback_fn (insn);
417	break;
418
419      case INSN:
420	body = PATTERN (insn);
421	if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
422	  return 0;
423
424	else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
425	  length = asm_insn_count (body) * fallback_fn (insn);
426	else if (GET_CODE (body) == SEQUENCE)
427	  for (i = 0; i < XVECLEN (body, 0); i++)
428	    length += get_attr_length (XVECEXP (body, 0, i));
429	else
430	  length = fallback_fn (insn);
431	break;
432
433      default:
434	break;
435      }
436
437#ifdef ADJUST_INSN_LENGTH
438  ADJUST_INSN_LENGTH (insn, length);
439#endif
440  return length;
441#else /* not HAVE_ATTR_length */
442  return 0;
443#define insn_default_length 0
444#define insn_min_length 0
445#endif /* not HAVE_ATTR_length */
446}
447
448/* Obtain the current length of an insn.  If branch shortening has been done,
449   get its actual length.  Otherwise, get its maximum length.  */
450int
451get_attr_length (rtx insn)
452{
453  return get_attr_length_1 (insn, insn_default_length);
454}
455
456/* Obtain the current length of an insn.  If branch shortening has been done,
457   get its actual length.  Otherwise, get its minimum length.  */
458int
459get_attr_min_length (rtx insn)
460{
461  return get_attr_length_1 (insn, insn_min_length);
462}
463
464/* Code to handle alignment inside shorten_branches.  */
465
466/* Here is an explanation how the algorithm in align_fuzz can give
467   proper results:
468
469   Call a sequence of instructions beginning with alignment point X
470   and continuing until the next alignment point `block X'.  When `X'
471   is used in an expression, it means the alignment value of the
472   alignment point.
473
474   Call the distance between the start of the first insn of block X, and
475   the end of the last insn of block X `IX', for the `inner size of X'.
476   This is clearly the sum of the instruction lengths.
477
478   Likewise with the next alignment-delimited block following X, which we
479   shall call block Y.
480
481   Call the distance between the start of the first insn of block X, and
482   the start of the first insn of block Y `OX', for the `outer size of X'.
483
484   The estimated padding is then OX - IX.
485
486   OX can be safely estimated as
487
488           if (X >= Y)
489                   OX = round_up(IX, Y)
490           else
491                   OX = round_up(IX, X) + Y - X
492
493   Clearly est(IX) >= real(IX), because that only depends on the
494   instruction lengths, and those being overestimated is a given.
495
496   Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
497   we needn't worry about that when thinking about OX.
498
499   When X >= Y, the alignment provided by Y adds no uncertainty factor
500   for branch ranges starting before X, so we can just round what we have.
501   But when X < Y, we don't know anything about the, so to speak,
502   `middle bits', so we have to assume the worst when aligning up from an
503   address mod X to one mod Y, which is Y - X.  */
504
505#ifndef LABEL_ALIGN
506#define LABEL_ALIGN(LABEL) align_labels_log
507#endif
508
509#ifndef LABEL_ALIGN_MAX_SKIP
510#define LABEL_ALIGN_MAX_SKIP align_labels_max_skip
511#endif
512
513#ifndef LOOP_ALIGN
514#define LOOP_ALIGN(LABEL) align_loops_log
515#endif
516
517#ifndef LOOP_ALIGN_MAX_SKIP
518#define LOOP_ALIGN_MAX_SKIP align_loops_max_skip
519#endif
520
521#ifndef LABEL_ALIGN_AFTER_BARRIER
522#define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
523#endif
524
525#ifndef LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
526#define LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP 0
527#endif
528
529#ifndef JUMP_ALIGN
530#define JUMP_ALIGN(LABEL) align_jumps_log
531#endif
532
533#ifndef JUMP_ALIGN_MAX_SKIP
534#define JUMP_ALIGN_MAX_SKIP align_jumps_max_skip
535#endif
536
537#ifndef ADDR_VEC_ALIGN
538static int
539final_addr_vec_align (rtx addr_vec)
540{
541  int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
542
543  if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
544    align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
545  return exact_log2 (align);
546
547}
548
549#define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
550#endif
551
552#ifndef INSN_LENGTH_ALIGNMENT
553#define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
554#endif
555
556#define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
557
558static int min_labelno, max_labelno;
559
560#define LABEL_TO_ALIGNMENT(LABEL) \
561  (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
562
563#define LABEL_TO_MAX_SKIP(LABEL) \
564  (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
565
566/* For the benefit of port specific code do this also as a function.  */
567
568int
569label_to_alignment (rtx label)
570{
571  return LABEL_TO_ALIGNMENT (label);
572}
573
574#ifdef HAVE_ATTR_length
575/* The differences in addresses
576   between a branch and its target might grow or shrink depending on
577   the alignment the start insn of the range (the branch for a forward
578   branch or the label for a backward branch) starts out on; if these
579   differences are used naively, they can even oscillate infinitely.
580   We therefore want to compute a 'worst case' address difference that
581   is independent of the alignment the start insn of the range end
582   up on, and that is at least as large as the actual difference.
583   The function align_fuzz calculates the amount we have to add to the
584   naively computed difference, by traversing the part of the alignment
585   chain of the start insn of the range that is in front of the end insn
586   of the range, and considering for each alignment the maximum amount
587   that it might contribute to a size increase.
588
589   For casesi tables, we also want to know worst case minimum amounts of
590   address difference, in case a machine description wants to introduce
591   some common offset that is added to all offsets in a table.
592   For this purpose, align_fuzz with a growth argument of 0 computes the
593   appropriate adjustment.  */
594
595/* Compute the maximum delta by which the difference of the addresses of
596   START and END might grow / shrink due to a different address for start
597   which changes the size of alignment insns between START and END.
598   KNOWN_ALIGN_LOG is the alignment known for START.
599   GROWTH should be ~0 if the objective is to compute potential code size
600   increase, and 0 if the objective is to compute potential shrink.
601   The return value is undefined for any other value of GROWTH.  */
602
603static int
604align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
605{
606  int uid = INSN_UID (start);
607  rtx align_label;
608  int known_align = 1 << known_align_log;
609  int end_shuid = INSN_SHUID (end);
610  int fuzz = 0;
611
612  for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
613    {
614      int align_addr, new_align;
615
616      uid = INSN_UID (align_label);
617      align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
618      if (uid_shuid[uid] > end_shuid)
619	break;
620      known_align_log = LABEL_TO_ALIGNMENT (align_label);
621      new_align = 1 << known_align_log;
622      if (new_align < known_align)
623	continue;
624      fuzz += (-align_addr ^ growth) & (new_align - known_align);
625      known_align = new_align;
626    }
627  return fuzz;
628}
629
630/* Compute a worst-case reference address of a branch so that it
631   can be safely used in the presence of aligned labels.  Since the
632   size of the branch itself is unknown, the size of the branch is
633   not included in the range.  I.e. for a forward branch, the reference
634   address is the end address of the branch as known from the previous
635   branch shortening pass, minus a value to account for possible size
636   increase due to alignment.  For a backward branch, it is the start
637   address of the branch as known from the current pass, plus a value
638   to account for possible size increase due to alignment.
639   NB.: Therefore, the maximum offset allowed for backward branches needs
640   to exclude the branch size.  */
641
642int
643insn_current_reference_address (rtx branch)
644{
645  rtx dest, seq;
646  int seq_uid;
647
648  if (! INSN_ADDRESSES_SET_P ())
649    return 0;
650
651  seq = NEXT_INSN (PREV_INSN (branch));
652  seq_uid = INSN_UID (seq);
653  if (!JUMP_P (branch))
654    /* This can happen for example on the PA; the objective is to know the
655       offset to address something in front of the start of the function.
656       Thus, we can treat it like a backward branch.
657       We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
658       any alignment we'd encounter, so we skip the call to align_fuzz.  */
659    return insn_current_address;
660  dest = JUMP_LABEL (branch);
661
662  /* BRANCH has no proper alignment chain set, so use SEQ.
663     BRANCH also has no INSN_SHUID.  */
664  if (INSN_SHUID (seq) < INSN_SHUID (dest))
665    {
666      /* Forward branch.  */
667      return (insn_last_address + insn_lengths[seq_uid]
668	      - align_fuzz (seq, dest, length_unit_log, ~0));
669    }
670  else
671    {
672      /* Backward branch.  */
673      return (insn_current_address
674	      + align_fuzz (dest, seq, length_unit_log, ~0));
675    }
676}
677#endif /* HAVE_ATTR_length */
678
679/* Compute branch alignments based on frequency information in the
680   CFG.  */
681
682static unsigned int
683compute_alignments (void)
684{
685  int log, max_skip, max_log;
686  basic_block bb;
687
688  if (label_align)
689    {
690      free (label_align);
691      label_align = 0;
692    }
693
694  max_labelno = max_label_num ();
695  min_labelno = get_first_label_num ();
696  label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
697
698  /* If not optimizing or optimizing for size, don't assign any alignments.  */
699  if (! optimize || optimize_size)
700    return 0;
701
702  FOR_EACH_BB (bb)
703    {
704      rtx label = BB_HEAD (bb);
705      int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
706      edge e;
707      edge_iterator ei;
708
709      if (!LABEL_P (label)
710	  || probably_never_executed_bb_p (bb))
711	continue;
712      max_log = LABEL_ALIGN (label);
713      max_skip = LABEL_ALIGN_MAX_SKIP;
714
715      FOR_EACH_EDGE (e, ei, bb->preds)
716	{
717	  if (e->flags & EDGE_FALLTHRU)
718	    has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
719	  else
720	    branch_frequency += EDGE_FREQUENCY (e);
721	}
722
723      /* There are two purposes to align block with no fallthru incoming edge:
724	 1) to avoid fetch stalls when branch destination is near cache boundary
725	 2) to improve cache efficiency in case the previous block is not executed
726	    (so it does not need to be in the cache).
727
728	 We to catch first case, we align frequently executed blocks.
729	 To catch the second, we align blocks that are executed more frequently
730	 than the predecessor and the predecessor is likely to not be executed
731	 when function is called.  */
732
733      if (!has_fallthru
734	  && (branch_frequency > BB_FREQ_MAX / 10
735	      || (bb->frequency > bb->prev_bb->frequency * 10
736		  && (bb->prev_bb->frequency
737		      <= ENTRY_BLOCK_PTR->frequency / 2))))
738	{
739	  log = JUMP_ALIGN (label);
740	  if (max_log < log)
741	    {
742	      max_log = log;
743	      max_skip = JUMP_ALIGN_MAX_SKIP;
744	    }
745	}
746      /* In case block is frequent and reached mostly by non-fallthru edge,
747	 align it.  It is most likely a first block of loop.  */
748      if (has_fallthru
749	  && maybe_hot_bb_p (bb)
750	  && branch_frequency + fallthru_frequency > BB_FREQ_MAX / 10
751	  && branch_frequency > fallthru_frequency * 2)
752	{
753	  log = LOOP_ALIGN (label);
754	  if (max_log < log)
755	    {
756	      max_log = log;
757	      max_skip = LOOP_ALIGN_MAX_SKIP;
758	    }
759	}
760      LABEL_TO_ALIGNMENT (label) = max_log;
761      LABEL_TO_MAX_SKIP (label) = max_skip;
762    }
763  return 0;
764}
765
766struct tree_opt_pass pass_compute_alignments =
767{
768  NULL,                                 /* name */
769  NULL,                                 /* gate */
770  compute_alignments,                   /* execute */
771  NULL,                                 /* sub */
772  NULL,                                 /* next */
773  0,                                    /* static_pass_number */
774  0,                                    /* tv_id */
775  0,                                    /* properties_required */
776  0,                                    /* properties_provided */
777  0,                                    /* properties_destroyed */
778  0,                                    /* todo_flags_start */
779  0,                                    /* todo_flags_finish */
780  0                                     /* letter */
781};
782
783
784/* Make a pass over all insns and compute their actual lengths by shortening
785   any branches of variable length if possible.  */
786
787/* shorten_branches might be called multiple times:  for example, the SH
788   port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
789   In order to do this, it needs proper length information, which it obtains
790   by calling shorten_branches.  This cannot be collapsed with
791   shorten_branches itself into a single pass unless we also want to integrate
792   reorg.c, since the branch splitting exposes new instructions with delay
793   slots.  */
794
795void
796shorten_branches (rtx first ATTRIBUTE_UNUSED)
797{
798  rtx insn;
799  int max_uid;
800  int i;
801  int max_log;
802  int max_skip;
803#ifdef HAVE_ATTR_length
804#define MAX_CODE_ALIGN 16
805  rtx seq;
806  int something_changed = 1;
807  char *varying_length;
808  rtx body;
809  int uid;
810  rtx align_tab[MAX_CODE_ALIGN];
811
812#endif
813
814  /* Compute maximum UID and allocate label_align / uid_shuid.  */
815  max_uid = get_max_uid ();
816
817  /* Free uid_shuid before reallocating it.  */
818  free (uid_shuid);
819
820  uid_shuid = XNEWVEC (int, max_uid);
821
822  if (max_labelno != max_label_num ())
823    {
824      int old = max_labelno;
825      int n_labels;
826      int n_old_labels;
827
828      max_labelno = max_label_num ();
829
830      n_labels = max_labelno - min_labelno + 1;
831      n_old_labels = old - min_labelno + 1;
832
833      label_align = xrealloc (label_align,
834			      n_labels * sizeof (struct label_alignment));
835
836      /* Range of labels grows monotonically in the function.  Failing here
837         means that the initialization of array got lost.  */
838      gcc_assert (n_old_labels <= n_labels);
839
840      memset (label_align + n_old_labels, 0,
841	      (n_labels - n_old_labels) * sizeof (struct label_alignment));
842    }
843
844  /* Initialize label_align and set up uid_shuid to be strictly
845     monotonically rising with insn order.  */
846  /* We use max_log here to keep track of the maximum alignment we want to
847     impose on the next CODE_LABEL (or the current one if we are processing
848     the CODE_LABEL itself).  */
849
850  max_log = 0;
851  max_skip = 0;
852
853  for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
854    {
855      int log;
856
857      INSN_SHUID (insn) = i++;
858      if (INSN_P (insn))
859	continue;
860
861      if (LABEL_P (insn))
862	{
863	  rtx next;
864
865	  /* Merge in alignments computed by compute_alignments.  */
866	  log = LABEL_TO_ALIGNMENT (insn);
867	  if (max_log < log)
868	    {
869	      max_log = log;
870	      max_skip = LABEL_TO_MAX_SKIP (insn);
871	    }
872
873	  log = LABEL_ALIGN (insn);
874	  if (max_log < log)
875	    {
876	      max_log = log;
877	      max_skip = LABEL_ALIGN_MAX_SKIP;
878	    }
879	  next = next_nonnote_insn (insn);
880	  /* ADDR_VECs only take room if read-only data goes into the text
881	     section.  */
882	  if (JUMP_TABLES_IN_TEXT_SECTION
883	      || readonly_data_section == text_section)
884	    if (next && JUMP_P (next))
885	      {
886		rtx nextbody = PATTERN (next);
887		if (GET_CODE (nextbody) == ADDR_VEC
888		    || GET_CODE (nextbody) == ADDR_DIFF_VEC)
889		  {
890		    log = ADDR_VEC_ALIGN (next);
891		    if (max_log < log)
892		      {
893			max_log = log;
894			max_skip = LABEL_ALIGN_MAX_SKIP;
895		      }
896		  }
897	      }
898	  LABEL_TO_ALIGNMENT (insn) = max_log;
899	  LABEL_TO_MAX_SKIP (insn) = max_skip;
900	  max_log = 0;
901	  max_skip = 0;
902	}
903      else if (BARRIER_P (insn))
904	{
905	  rtx label;
906
907	  for (label = insn; label && ! INSN_P (label);
908	       label = NEXT_INSN (label))
909	    if (LABEL_P (label))
910	      {
911		log = LABEL_ALIGN_AFTER_BARRIER (insn);
912		if (max_log < log)
913		  {
914		    max_log = log;
915		    max_skip = LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP;
916		  }
917		break;
918	      }
919	}
920    }
921#ifdef HAVE_ATTR_length
922
923  /* Allocate the rest of the arrays.  */
924  insn_lengths = XNEWVEC (int, max_uid);
925  insn_lengths_max_uid = max_uid;
926  /* Syntax errors can lead to labels being outside of the main insn stream.
927     Initialize insn_addresses, so that we get reproducible results.  */
928  INSN_ADDRESSES_ALLOC (max_uid);
929
930  varying_length = XCNEWVEC (char, max_uid);
931
932  /* Initialize uid_align.  We scan instructions
933     from end to start, and keep in align_tab[n] the last seen insn
934     that does an alignment of at least n+1, i.e. the successor
935     in the alignment chain for an insn that does / has a known
936     alignment of n.  */
937  uid_align = XCNEWVEC (rtx, max_uid);
938
939  for (i = MAX_CODE_ALIGN; --i >= 0;)
940    align_tab[i] = NULL_RTX;
941  seq = get_last_insn ();
942  for (; seq; seq = PREV_INSN (seq))
943    {
944      int uid = INSN_UID (seq);
945      int log;
946      log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
947      uid_align[uid] = align_tab[0];
948      if (log)
949	{
950	  /* Found an alignment label.  */
951	  uid_align[uid] = align_tab[log];
952	  for (i = log - 1; i >= 0; i--)
953	    align_tab[i] = seq;
954	}
955    }
956#ifdef CASE_VECTOR_SHORTEN_MODE
957  if (optimize)
958    {
959      /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
960         label fields.  */
961
962      int min_shuid = INSN_SHUID (get_insns ()) - 1;
963      int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
964      int rel;
965
966      for (insn = first; insn != 0; insn = NEXT_INSN (insn))
967	{
968	  rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
969	  int len, i, min, max, insn_shuid;
970	  int min_align;
971	  addr_diff_vec_flags flags;
972
973	  if (!JUMP_P (insn)
974	      || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
975	    continue;
976	  pat = PATTERN (insn);
977	  len = XVECLEN (pat, 1);
978	  gcc_assert (len > 0);
979	  min_align = MAX_CODE_ALIGN;
980	  for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
981	    {
982	      rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
983	      int shuid = INSN_SHUID (lab);
984	      if (shuid < min)
985		{
986		  min = shuid;
987		  min_lab = lab;
988		}
989	      if (shuid > max)
990		{
991		  max = shuid;
992		  max_lab = lab;
993		}
994	      if (min_align > LABEL_TO_ALIGNMENT (lab))
995		min_align = LABEL_TO_ALIGNMENT (lab);
996	    }
997	  XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
998	  XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
999	  insn_shuid = INSN_SHUID (insn);
1000	  rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1001	  memset (&flags, 0, sizeof (flags));
1002	  flags.min_align = min_align;
1003	  flags.base_after_vec = rel > insn_shuid;
1004	  flags.min_after_vec  = min > insn_shuid;
1005	  flags.max_after_vec  = max > insn_shuid;
1006	  flags.min_after_base = min > rel;
1007	  flags.max_after_base = max > rel;
1008	  ADDR_DIFF_VEC_FLAGS (pat) = flags;
1009	}
1010    }
1011#endif /* CASE_VECTOR_SHORTEN_MODE */
1012
1013  /* Compute initial lengths, addresses, and varying flags for each insn.  */
1014  for (insn_current_address = 0, insn = first;
1015       insn != 0;
1016       insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1017    {
1018      uid = INSN_UID (insn);
1019
1020      insn_lengths[uid] = 0;
1021
1022      if (LABEL_P (insn))
1023	{
1024	  int log = LABEL_TO_ALIGNMENT (insn);
1025	  if (log)
1026	    {
1027	      int align = 1 << log;
1028	      int new_address = (insn_current_address + align - 1) & -align;
1029	      insn_lengths[uid] = new_address - insn_current_address;
1030	    }
1031	}
1032
1033      INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1034
1035      if (NOTE_P (insn) || BARRIER_P (insn)
1036	  || LABEL_P (insn))
1037	continue;
1038      if (INSN_DELETED_P (insn))
1039	continue;
1040
1041      body = PATTERN (insn);
1042      if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1043	{
1044	  /* This only takes room if read-only data goes into the text
1045	     section.  */
1046	  if (JUMP_TABLES_IN_TEXT_SECTION
1047	      || readonly_data_section == text_section)
1048	    insn_lengths[uid] = (XVECLEN (body,
1049					  GET_CODE (body) == ADDR_DIFF_VEC)
1050				 * GET_MODE_SIZE (GET_MODE (body)));
1051	  /* Alignment is handled by ADDR_VEC_ALIGN.  */
1052	}
1053      else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1054	insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1055      else if (GET_CODE (body) == SEQUENCE)
1056	{
1057	  int i;
1058	  int const_delay_slots;
1059#ifdef DELAY_SLOTS
1060	  const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1061#else
1062	  const_delay_slots = 0;
1063#endif
1064	  /* Inside a delay slot sequence, we do not do any branch shortening
1065	     if the shortening could change the number of delay slots
1066	     of the branch.  */
1067	  for (i = 0; i < XVECLEN (body, 0); i++)
1068	    {
1069	      rtx inner_insn = XVECEXP (body, 0, i);
1070	      int inner_uid = INSN_UID (inner_insn);
1071	      int inner_length;
1072
1073	      if (GET_CODE (body) == ASM_INPUT
1074		  || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1075		inner_length = (asm_insn_count (PATTERN (inner_insn))
1076				* insn_default_length (inner_insn));
1077	      else
1078		inner_length = insn_default_length (inner_insn);
1079
1080	      insn_lengths[inner_uid] = inner_length;
1081	      if (const_delay_slots)
1082		{
1083		  if ((varying_length[inner_uid]
1084		       = insn_variable_length_p (inner_insn)) != 0)
1085		    varying_length[uid] = 1;
1086		  INSN_ADDRESSES (inner_uid) = (insn_current_address
1087						+ insn_lengths[uid]);
1088		}
1089	      else
1090		varying_length[inner_uid] = 0;
1091	      insn_lengths[uid] += inner_length;
1092	    }
1093	}
1094      else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1095	{
1096	  insn_lengths[uid] = insn_default_length (insn);
1097	  varying_length[uid] = insn_variable_length_p (insn);
1098	}
1099
1100      /* If needed, do any adjustment.  */
1101#ifdef ADJUST_INSN_LENGTH
1102      ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1103      if (insn_lengths[uid] < 0)
1104	fatal_insn ("negative insn length", insn);
1105#endif
1106    }
1107
1108  /* Now loop over all the insns finding varying length insns.  For each,
1109     get the current insn length.  If it has changed, reflect the change.
1110     When nothing changes for a full pass, we are done.  */
1111
1112  while (something_changed)
1113    {
1114      something_changed = 0;
1115      insn_current_align = MAX_CODE_ALIGN - 1;
1116      for (insn_current_address = 0, insn = first;
1117	   insn != 0;
1118	   insn = NEXT_INSN (insn))
1119	{
1120	  int new_length;
1121#ifdef ADJUST_INSN_LENGTH
1122	  int tmp_length;
1123#endif
1124	  int length_align;
1125
1126	  uid = INSN_UID (insn);
1127
1128	  if (LABEL_P (insn))
1129	    {
1130	      int log = LABEL_TO_ALIGNMENT (insn);
1131	      if (log > insn_current_align)
1132		{
1133		  int align = 1 << log;
1134		  int new_address= (insn_current_address + align - 1) & -align;
1135		  insn_lengths[uid] = new_address - insn_current_address;
1136		  insn_current_align = log;
1137		  insn_current_address = new_address;
1138		}
1139	      else
1140		insn_lengths[uid] = 0;
1141	      INSN_ADDRESSES (uid) = insn_current_address;
1142	      continue;
1143	    }
1144
1145	  length_align = INSN_LENGTH_ALIGNMENT (insn);
1146	  if (length_align < insn_current_align)
1147	    insn_current_align = length_align;
1148
1149	  insn_last_address = INSN_ADDRESSES (uid);
1150	  INSN_ADDRESSES (uid) = insn_current_address;
1151
1152#ifdef CASE_VECTOR_SHORTEN_MODE
1153	  if (optimize && JUMP_P (insn)
1154	      && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1155	    {
1156	      rtx body = PATTERN (insn);
1157	      int old_length = insn_lengths[uid];
1158	      rtx rel_lab = XEXP (XEXP (body, 0), 0);
1159	      rtx min_lab = XEXP (XEXP (body, 2), 0);
1160	      rtx max_lab = XEXP (XEXP (body, 3), 0);
1161	      int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1162	      int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1163	      int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1164	      rtx prev;
1165	      int rel_align = 0;
1166	      addr_diff_vec_flags flags;
1167
1168	      /* Avoid automatic aggregate initialization.  */
1169	      flags = ADDR_DIFF_VEC_FLAGS (body);
1170
1171	      /* Try to find a known alignment for rel_lab.  */
1172	      for (prev = rel_lab;
1173		   prev
1174		   && ! insn_lengths[INSN_UID (prev)]
1175		   && ! (varying_length[INSN_UID (prev)] & 1);
1176		   prev = PREV_INSN (prev))
1177		if (varying_length[INSN_UID (prev)] & 2)
1178		  {
1179		    rel_align = LABEL_TO_ALIGNMENT (prev);
1180		    break;
1181		  }
1182
1183	      /* See the comment on addr_diff_vec_flags in rtl.h for the
1184		 meaning of the flags values.  base: REL_LAB   vec: INSN  */
1185	      /* Anything after INSN has still addresses from the last
1186		 pass; adjust these so that they reflect our current
1187		 estimate for this pass.  */
1188	      if (flags.base_after_vec)
1189		rel_addr += insn_current_address - insn_last_address;
1190	      if (flags.min_after_vec)
1191		min_addr += insn_current_address - insn_last_address;
1192	      if (flags.max_after_vec)
1193		max_addr += insn_current_address - insn_last_address;
1194	      /* We want to know the worst case, i.e. lowest possible value
1195		 for the offset of MIN_LAB.  If MIN_LAB is after REL_LAB,
1196		 its offset is positive, and we have to be wary of code shrink;
1197		 otherwise, it is negative, and we have to be vary of code
1198		 size increase.  */
1199	      if (flags.min_after_base)
1200		{
1201		  /* If INSN is between REL_LAB and MIN_LAB, the size
1202		     changes we are about to make can change the alignment
1203		     within the observed offset, therefore we have to break
1204		     it up into two parts that are independent.  */
1205		  if (! flags.base_after_vec && flags.min_after_vec)
1206		    {
1207		      min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1208		      min_addr -= align_fuzz (insn, min_lab, 0, 0);
1209		    }
1210		  else
1211		    min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1212		}
1213	      else
1214		{
1215		  if (flags.base_after_vec && ! flags.min_after_vec)
1216		    {
1217		      min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1218		      min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1219		    }
1220		  else
1221		    min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1222		}
1223	      /* Likewise, determine the highest lowest possible value
1224		 for the offset of MAX_LAB.  */
1225	      if (flags.max_after_base)
1226		{
1227		  if (! flags.base_after_vec && flags.max_after_vec)
1228		    {
1229		      max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1230		      max_addr += align_fuzz (insn, max_lab, 0, ~0);
1231		    }
1232		  else
1233		    max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1234		}
1235	      else
1236		{
1237		  if (flags.base_after_vec && ! flags.max_after_vec)
1238		    {
1239		      max_addr += align_fuzz (max_lab, insn, 0, 0);
1240		      max_addr += align_fuzz (insn, rel_lab, 0, 0);
1241		    }
1242		  else
1243		    max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1244		}
1245	      PUT_MODE (body, CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1246							max_addr - rel_addr,
1247							body));
1248	      if (JUMP_TABLES_IN_TEXT_SECTION
1249		  || readonly_data_section == text_section)
1250		{
1251		  insn_lengths[uid]
1252		    = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1253		  insn_current_address += insn_lengths[uid];
1254		  if (insn_lengths[uid] != old_length)
1255		    something_changed = 1;
1256		}
1257
1258	      continue;
1259	    }
1260#endif /* CASE_VECTOR_SHORTEN_MODE */
1261
1262	  if (! (varying_length[uid]))
1263	    {
1264	      if (NONJUMP_INSN_P (insn)
1265		  && GET_CODE (PATTERN (insn)) == SEQUENCE)
1266		{
1267		  int i;
1268
1269		  body = PATTERN (insn);
1270		  for (i = 0; i < XVECLEN (body, 0); i++)
1271		    {
1272		      rtx inner_insn = XVECEXP (body, 0, i);
1273		      int inner_uid = INSN_UID (inner_insn);
1274
1275		      INSN_ADDRESSES (inner_uid) = insn_current_address;
1276
1277		      insn_current_address += insn_lengths[inner_uid];
1278		    }
1279		}
1280	      else
1281		insn_current_address += insn_lengths[uid];
1282
1283	      continue;
1284	    }
1285
1286	  if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1287	    {
1288	      int i;
1289
1290	      body = PATTERN (insn);
1291	      new_length = 0;
1292	      for (i = 0; i < XVECLEN (body, 0); i++)
1293		{
1294		  rtx inner_insn = XVECEXP (body, 0, i);
1295		  int inner_uid = INSN_UID (inner_insn);
1296		  int inner_length;
1297
1298		  INSN_ADDRESSES (inner_uid) = insn_current_address;
1299
1300		  /* insn_current_length returns 0 for insns with a
1301		     non-varying length.  */
1302		  if (! varying_length[inner_uid])
1303		    inner_length = insn_lengths[inner_uid];
1304		  else
1305		    inner_length = insn_current_length (inner_insn);
1306
1307		  if (inner_length != insn_lengths[inner_uid])
1308		    {
1309		      insn_lengths[inner_uid] = inner_length;
1310		      something_changed = 1;
1311		    }
1312		  insn_current_address += insn_lengths[inner_uid];
1313		  new_length += inner_length;
1314		}
1315	    }
1316	  else
1317	    {
1318	      new_length = insn_current_length (insn);
1319	      insn_current_address += new_length;
1320	    }
1321
1322#ifdef ADJUST_INSN_LENGTH
1323	  /* If needed, do any adjustment.  */
1324	  tmp_length = new_length;
1325	  ADJUST_INSN_LENGTH (insn, new_length);
1326	  insn_current_address += (new_length - tmp_length);
1327#endif
1328
1329	  if (new_length != insn_lengths[uid])
1330	    {
1331	      insn_lengths[uid] = new_length;
1332	      something_changed = 1;
1333	    }
1334	}
1335      /* For a non-optimizing compile, do only a single pass.  */
1336      if (!optimize)
1337	break;
1338    }
1339
1340  free (varying_length);
1341
1342#endif /* HAVE_ATTR_length */
1343}
1344
1345#ifdef HAVE_ATTR_length
1346/* Given the body of an INSN known to be generated by an ASM statement, return
1347   the number of machine instructions likely to be generated for this insn.
1348   This is used to compute its length.  */
1349
1350static int
1351asm_insn_count (rtx body)
1352{
1353  const char *template;
1354  int count = 1;
1355
1356  if (GET_CODE (body) == ASM_INPUT)
1357    template = XSTR (body, 0);
1358  else
1359    template = decode_asm_operands (body, NULL, NULL, NULL, NULL);
1360
1361  for (; *template; template++)
1362    if (IS_ASM_LOGICAL_LINE_SEPARATOR (*template) || *template == '\n')
1363      count++;
1364
1365  return count;
1366}
1367#endif
1368
1369/* Output assembler code for the start of a function,
1370   and initialize some of the variables in this file
1371   for the new function.  The label for the function and associated
1372   assembler pseudo-ops have already been output in `assemble_start_function'.
1373
1374   FIRST is the first insn of the rtl for the function being compiled.
1375   FILE is the file to write assembler code to.
1376   OPTIMIZE is nonzero if we should eliminate redundant
1377     test and compare insns.  */
1378
1379void
1380final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1381		      int optimize ATTRIBUTE_UNUSED)
1382{
1383  block_depth = 0;
1384
1385  this_is_asm_operands = 0;
1386
1387  last_filename = locator_file (prologue_locator);
1388  last_linenum = locator_line (prologue_locator);
1389
1390  high_block_linenum = high_function_linenum = last_linenum;
1391
1392  (*debug_hooks->begin_prologue) (last_linenum, last_filename);
1393
1394#if defined (DWARF2_UNWIND_INFO) || defined (TARGET_UNWIND_INFO)
1395  if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1396    dwarf2out_begin_prologue (0, NULL);
1397#endif
1398
1399#ifdef LEAF_REG_REMAP
1400  if (current_function_uses_only_leaf_regs)
1401    leaf_renumber_regs (first);
1402#endif
1403
1404  /* The Sun386i and perhaps other machines don't work right
1405     if the profiling code comes after the prologue.  */
1406#ifdef PROFILE_BEFORE_PROLOGUE
1407  if (current_function_profile)
1408    profile_function (file);
1409#endif /* PROFILE_BEFORE_PROLOGUE */
1410
1411#if defined (DWARF2_UNWIND_INFO) && defined (HAVE_prologue)
1412  if (dwarf2out_do_frame ())
1413    dwarf2out_frame_debug (NULL_RTX, false);
1414#endif
1415
1416  /* If debugging, assign block numbers to all of the blocks in this
1417     function.  */
1418  if (write_symbols)
1419    {
1420      reemit_insn_block_notes ();
1421      number_blocks (current_function_decl);
1422      /* We never actually put out begin/end notes for the top-level
1423	 block in the function.  But, conceptually, that block is
1424	 always needed.  */
1425      TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1426    }
1427
1428  if (warn_frame_larger_than
1429    && get_frame_size () > frame_larger_than_size)
1430  {
1431      /* Issue a warning */
1432      warning (OPT_Wframe_larger_than_,
1433               "the frame size of %wd bytes is larger than %wd bytes",
1434               get_frame_size (), frame_larger_than_size);
1435  }
1436
1437  /* First output the function prologue: code to set up the stack frame.  */
1438  targetm.asm_out.function_prologue (file, get_frame_size ());
1439
1440  /* If the machine represents the prologue as RTL, the profiling code must
1441     be emitted when NOTE_INSN_PROLOGUE_END is scanned.  */
1442#ifdef HAVE_prologue
1443  if (! HAVE_prologue)
1444#endif
1445    profile_after_prologue (file);
1446}
1447
1448static void
1449profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1450{
1451#ifndef PROFILE_BEFORE_PROLOGUE
1452  if (current_function_profile)
1453    profile_function (file);
1454#endif /* not PROFILE_BEFORE_PROLOGUE */
1455}
1456
1457static void
1458profile_function (FILE *file ATTRIBUTE_UNUSED)
1459{
1460#ifndef NO_PROFILE_COUNTERS
1461# define NO_PROFILE_COUNTERS	0
1462#endif
1463#if defined(ASM_OUTPUT_REG_PUSH)
1464  int sval = current_function_returns_struct;
1465  rtx svrtx = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl), 1);
1466#if defined(STATIC_CHAIN_INCOMING_REGNUM) || defined(STATIC_CHAIN_REGNUM)
1467  int cxt = cfun->static_chain_decl != NULL;
1468#endif
1469#endif /* ASM_OUTPUT_REG_PUSH */
1470
1471  if (! NO_PROFILE_COUNTERS)
1472    {
1473      int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1474      switch_to_section (data_section);
1475      ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1476      targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1477      assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1478    }
1479
1480  switch_to_section (current_function_section ());
1481
1482#if defined(ASM_OUTPUT_REG_PUSH)
1483  if (sval && svrtx != NULL_RTX && REG_P (svrtx))
1484    ASM_OUTPUT_REG_PUSH (file, REGNO (svrtx));
1485#endif
1486
1487#if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1488  if (cxt)
1489    ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_INCOMING_REGNUM);
1490#else
1491#if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1492  if (cxt)
1493    {
1494      ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_REGNUM);
1495    }
1496#endif
1497#endif
1498
1499  FUNCTION_PROFILER (file, current_function_funcdef_no);
1500
1501#if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1502  if (cxt)
1503    ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_INCOMING_REGNUM);
1504#else
1505#if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1506  if (cxt)
1507    {
1508      ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_REGNUM);
1509    }
1510#endif
1511#endif
1512
1513#if defined(ASM_OUTPUT_REG_PUSH)
1514  if (sval && svrtx != NULL_RTX && REG_P (svrtx))
1515    ASM_OUTPUT_REG_POP (file, REGNO (svrtx));
1516#endif
1517}
1518
1519/* Output assembler code for the end of a function.
1520   For clarity, args are same as those of `final_start_function'
1521   even though not all of them are needed.  */
1522
1523void
1524final_end_function (void)
1525{
1526  app_disable ();
1527
1528  (*debug_hooks->end_function) (high_function_linenum);
1529
1530  /* Finally, output the function epilogue:
1531     code to restore the stack frame and return to the caller.  */
1532  targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1533
1534  /* And debug output.  */
1535  (*debug_hooks->end_epilogue) (last_linenum, last_filename);
1536
1537#if defined (DWARF2_UNWIND_INFO)
1538  if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG
1539      && dwarf2out_do_frame ())
1540    dwarf2out_end_epilogue (last_linenum, last_filename);
1541#endif
1542}
1543
1544/* Output assembler code for some insns: all or part of a function.
1545   For description of args, see `final_start_function', above.  */
1546
1547void
1548final (rtx first, FILE *file, int optimize)
1549{
1550  rtx insn;
1551  int max_uid = 0;
1552  int seen = 0;
1553
1554  last_ignored_compare = 0;
1555
1556#ifdef SDB_DEBUGGING_INFO
1557  /* When producing SDB debugging info, delete troublesome line number
1558     notes from inlined functions in other files as well as duplicate
1559     line number notes.  */
1560  if (write_symbols == SDB_DEBUG)
1561    {
1562      rtx last = 0;
1563      for (insn = first; insn; insn = NEXT_INSN (insn))
1564	if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
1565	  {
1566	    if (last != 0
1567#ifdef USE_MAPPED_LOCATION
1568		&& NOTE_SOURCE_LOCATION (insn) == NOTE_SOURCE_LOCATION (last)
1569#else
1570		&& NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last)
1571		&& NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last)
1572#endif
1573	      )
1574	      {
1575		delete_insn (insn);	/* Use delete_note.  */
1576		continue;
1577	      }
1578	    last = insn;
1579	  }
1580    }
1581#endif
1582
1583  for (insn = first; insn; insn = NEXT_INSN (insn))
1584    {
1585      if (INSN_UID (insn) > max_uid)       /* Find largest UID.  */
1586	max_uid = INSN_UID (insn);
1587#ifdef HAVE_cc0
1588      /* If CC tracking across branches is enabled, record the insn which
1589	 jumps to each branch only reached from one place.  */
1590      if (optimize && JUMP_P (insn))
1591	{
1592	  rtx lab = JUMP_LABEL (insn);
1593	  if (lab && LABEL_NUSES (lab) == 1)
1594	    {
1595	      LABEL_REFS (lab) = insn;
1596	    }
1597	}
1598#endif
1599    }
1600
1601  init_recog ();
1602
1603  CC_STATUS_INIT;
1604
1605  /* Output the insns.  */
1606  for (insn = NEXT_INSN (first); insn;)
1607    {
1608#ifdef HAVE_ATTR_length
1609      if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1610	{
1611	  /* This can be triggered by bugs elsewhere in the compiler if
1612	     new insns are created after init_insn_lengths is called.  */
1613	  gcc_assert (NOTE_P (insn));
1614	  insn_current_address = -1;
1615	}
1616      else
1617	insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1618#endif /* HAVE_ATTR_length */
1619
1620      insn = final_scan_insn (insn, file, optimize, 0, &seen);
1621    }
1622}
1623
1624const char *
1625get_insn_template (int code, rtx insn)
1626{
1627  switch (insn_data[code].output_format)
1628    {
1629    case INSN_OUTPUT_FORMAT_SINGLE:
1630      return insn_data[code].output.single;
1631    case INSN_OUTPUT_FORMAT_MULTI:
1632      return insn_data[code].output.multi[which_alternative];
1633    case INSN_OUTPUT_FORMAT_FUNCTION:
1634      gcc_assert (insn);
1635      return (*insn_data[code].output.function) (recog_data.operand, insn);
1636
1637    default:
1638      gcc_unreachable ();
1639    }
1640}
1641
1642/* Emit the appropriate declaration for an alternate-entry-point
1643   symbol represented by INSN, to FILE.  INSN is a CODE_LABEL with
1644   LABEL_KIND != LABEL_NORMAL.
1645
1646   The case fall-through in this function is intentional.  */
1647static void
1648output_alternate_entry_point (FILE *file, rtx insn)
1649{
1650  const char *name = LABEL_NAME (insn);
1651
1652  switch (LABEL_KIND (insn))
1653    {
1654    case LABEL_WEAK_ENTRY:
1655#ifdef ASM_WEAKEN_LABEL
1656      ASM_WEAKEN_LABEL (file, name);
1657#endif
1658    case LABEL_GLOBAL_ENTRY:
1659      targetm.asm_out.globalize_label (file, name);
1660    case LABEL_STATIC_ENTRY:
1661#ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1662      ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1663#endif
1664      ASM_OUTPUT_LABEL (file, name);
1665      break;
1666
1667    case LABEL_NORMAL:
1668    default:
1669      gcc_unreachable ();
1670    }
1671}
1672
1673/* The final scan for one insn, INSN.
1674   Args are same as in `final', except that INSN
1675   is the insn being scanned.
1676   Value returned is the next insn to be scanned.
1677
1678   NOPEEPHOLES is the flag to disallow peephole processing (currently
1679   used for within delayed branch sequence output).
1680
1681   SEEN is used to track the end of the prologue, for emitting
1682   debug information.  We force the emission of a line note after
1683   both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
1684   at the beginning of the second basic block, whichever comes
1685   first.  */
1686
1687rtx
1688final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
1689		 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
1690{
1691#ifdef HAVE_cc0
1692  rtx set;
1693#endif
1694  rtx next;
1695
1696  insn_counter++;
1697
1698  /* Ignore deleted insns.  These can occur when we split insns (due to a
1699     template of "#") while not optimizing.  */
1700  if (INSN_DELETED_P (insn))
1701    return NEXT_INSN (insn);
1702
1703  switch (GET_CODE (insn))
1704    {
1705    case NOTE:
1706      switch (NOTE_LINE_NUMBER (insn))
1707	{
1708	case NOTE_INSN_DELETED:
1709	case NOTE_INSN_FUNCTION_END:
1710	case NOTE_INSN_REPEATED_LINE_NUMBER:
1711	case NOTE_INSN_EXPECTED_VALUE:
1712	  break;
1713
1714	case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1715	  in_cold_section_p = !in_cold_section_p;
1716	  (*debug_hooks->switch_text_section) ();
1717	  switch_to_section (current_function_section ());
1718	  break;
1719
1720	case NOTE_INSN_BASIC_BLOCK:
1721#ifdef TARGET_UNWIND_INFO
1722	  targetm.asm_out.unwind_emit (asm_out_file, insn);
1723#endif
1724
1725	  if (flag_debug_asm)
1726	    fprintf (asm_out_file, "\t%s basic block %d\n",
1727		     ASM_COMMENT_START, NOTE_BASIC_BLOCK (insn)->index);
1728
1729	  if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
1730	    {
1731	      *seen |= SEEN_EMITTED;
1732	      force_source_line = true;
1733	    }
1734	  else
1735	    *seen |= SEEN_BB;
1736
1737	  break;
1738
1739	case NOTE_INSN_EH_REGION_BEG:
1740	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
1741				  NOTE_EH_HANDLER (insn));
1742	  break;
1743
1744	case NOTE_INSN_EH_REGION_END:
1745	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
1746				  NOTE_EH_HANDLER (insn));
1747	  break;
1748
1749	case NOTE_INSN_PROLOGUE_END:
1750	  targetm.asm_out.function_end_prologue (file);
1751	  profile_after_prologue (file);
1752
1753	  if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1754	    {
1755	      *seen |= SEEN_EMITTED;
1756	      force_source_line = true;
1757	    }
1758	  else
1759	    *seen |= SEEN_NOTE;
1760
1761	  break;
1762
1763	case NOTE_INSN_EPILOGUE_BEG:
1764	  targetm.asm_out.function_begin_epilogue (file);
1765	  break;
1766
1767	case NOTE_INSN_FUNCTION_BEG:
1768	  app_disable ();
1769	  (*debug_hooks->end_prologue) (last_linenum, last_filename);
1770
1771	  if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1772	    {
1773	      *seen |= SEEN_EMITTED;
1774	      force_source_line = true;
1775	    }
1776	  else
1777	    *seen |= SEEN_NOTE;
1778
1779	  break;
1780
1781	case NOTE_INSN_BLOCK_BEG:
1782	  if (debug_info_level == DINFO_LEVEL_NORMAL
1783	      || debug_info_level == DINFO_LEVEL_VERBOSE
1784	      || write_symbols == DWARF2_DEBUG
1785	      || write_symbols == VMS_AND_DWARF2_DEBUG
1786	      || write_symbols == VMS_DEBUG)
1787	    {
1788	      int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1789
1790	      app_disable ();
1791	      ++block_depth;
1792	      high_block_linenum = last_linenum;
1793
1794	      /* Output debugging info about the symbol-block beginning.  */
1795	      (*debug_hooks->begin_block) (last_linenum, n);
1796
1797	      /* Mark this block as output.  */
1798	      TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
1799	    }
1800	  break;
1801
1802	case NOTE_INSN_BLOCK_END:
1803	  if (debug_info_level == DINFO_LEVEL_NORMAL
1804	      || debug_info_level == DINFO_LEVEL_VERBOSE
1805	      || write_symbols == DWARF2_DEBUG
1806	      || write_symbols == VMS_AND_DWARF2_DEBUG
1807	      || write_symbols == VMS_DEBUG)
1808	    {
1809	      int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1810
1811	      app_disable ();
1812
1813	      /* End of a symbol-block.  */
1814	      --block_depth;
1815	      gcc_assert (block_depth >= 0);
1816
1817	      (*debug_hooks->end_block) (high_block_linenum, n);
1818	    }
1819	  break;
1820
1821	case NOTE_INSN_DELETED_LABEL:
1822	  /* Emit the label.  We may have deleted the CODE_LABEL because
1823	     the label could be proved to be unreachable, though still
1824	     referenced (in the form of having its address taken.  */
1825	  ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
1826	  break;
1827
1828	case NOTE_INSN_VAR_LOCATION:
1829	  (*debug_hooks->var_location) (insn);
1830	  break;
1831
1832	case 0:
1833	  break;
1834
1835	default:
1836	  gcc_assert (NOTE_LINE_NUMBER (insn) > 0);
1837	  break;
1838	}
1839      break;
1840
1841    case BARRIER:
1842#if defined (DWARF2_UNWIND_INFO)
1843      if (dwarf2out_do_frame ())
1844	dwarf2out_frame_debug (insn, false);
1845#endif
1846      break;
1847
1848    case CODE_LABEL:
1849      /* The target port might emit labels in the output function for
1850	 some insn, e.g. sh.c output_branchy_insn.  */
1851      if (CODE_LABEL_NUMBER (insn) <= max_labelno)
1852	{
1853	  int align = LABEL_TO_ALIGNMENT (insn);
1854#ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1855	  int max_skip = LABEL_TO_MAX_SKIP (insn);
1856#endif
1857
1858	  if (align && NEXT_INSN (insn))
1859	    {
1860#ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1861	      ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
1862#else
1863#ifdef ASM_OUTPUT_ALIGN_WITH_NOP
1864              ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
1865#else
1866	      ASM_OUTPUT_ALIGN (file, align);
1867#endif
1868#endif
1869	    }
1870	}
1871#ifdef HAVE_cc0
1872      CC_STATUS_INIT;
1873      /* If this label is reached from only one place, set the condition
1874	 codes from the instruction just before the branch.  */
1875
1876      /* Disabled because some insns set cc_status in the C output code
1877	 and NOTICE_UPDATE_CC alone can set incorrect status.  */
1878      if (0 /* optimize && LABEL_NUSES (insn) == 1*/)
1879	{
1880	  rtx jump = LABEL_REFS (insn);
1881	  rtx barrier = prev_nonnote_insn (insn);
1882	  rtx prev;
1883	  /* If the LABEL_REFS field of this label has been set to point
1884	     at a branch, the predecessor of the branch is a regular
1885	     insn, and that branch is the only way to reach this label,
1886	     set the condition codes based on the branch and its
1887	     predecessor.  */
1888	  if (barrier && BARRIER_P (barrier)
1889	      && jump && JUMP_P (jump)
1890	      && (prev = prev_nonnote_insn (jump))
1891	      && NONJUMP_INSN_P (prev))
1892	    {
1893	      NOTICE_UPDATE_CC (PATTERN (prev), prev);
1894	      NOTICE_UPDATE_CC (PATTERN (jump), jump);
1895	    }
1896	}
1897#endif
1898
1899      if (LABEL_NAME (insn))
1900	(*debug_hooks->label) (insn);
1901
1902      if (app_on)
1903	{
1904	  fputs (ASM_APP_OFF, file);
1905	  app_on = 0;
1906	}
1907
1908      next = next_nonnote_insn (insn);
1909      if (next != 0 && JUMP_P (next))
1910	{
1911	  rtx nextbody = PATTERN (next);
1912
1913	  /* If this label is followed by a jump-table,
1914	     make sure we put the label in the read-only section.  Also
1915	     possibly write the label and jump table together.  */
1916
1917	  if (GET_CODE (nextbody) == ADDR_VEC
1918	      || GET_CODE (nextbody) == ADDR_DIFF_VEC)
1919	    {
1920#if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
1921	      /* In this case, the case vector is being moved by the
1922		 target, so don't output the label at all.  Leave that
1923		 to the back end macros.  */
1924#else
1925	      if (! JUMP_TABLES_IN_TEXT_SECTION)
1926		{
1927		  int log_align;
1928
1929		  switch_to_section (targetm.asm_out.function_rodata_section
1930				     (current_function_decl));
1931
1932#ifdef ADDR_VEC_ALIGN
1933		  log_align = ADDR_VEC_ALIGN (next);
1934#else
1935		  log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1936#endif
1937		  ASM_OUTPUT_ALIGN (file, log_align);
1938		}
1939	      else
1940		switch_to_section (current_function_section ());
1941
1942#ifdef ASM_OUTPUT_CASE_LABEL
1943	      ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
1944				     next);
1945#else
1946	      targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
1947#endif
1948#endif
1949	      break;
1950	    }
1951	}
1952      if (LABEL_ALT_ENTRY_P (insn))
1953	output_alternate_entry_point (file, insn);
1954      else
1955	targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
1956      break;
1957
1958    default:
1959      {
1960	rtx body = PATTERN (insn);
1961	int insn_code_number;
1962	const char *template;
1963
1964#ifdef HAVE_conditional_execution
1965	/* Reset this early so it is correct for ASM statements.  */
1966	current_insn_predicate = NULL_RTX;
1967#endif
1968	/* An INSN, JUMP_INSN or CALL_INSN.
1969	   First check for special kinds that recog doesn't recognize.  */
1970
1971	if (GET_CODE (body) == USE /* These are just declarations.  */
1972	    || GET_CODE (body) == CLOBBER)
1973	  break;
1974
1975#ifdef HAVE_cc0
1976	{
1977	  /* If there is a REG_CC_SETTER note on this insn, it means that
1978	     the setting of the condition code was done in the delay slot
1979	     of the insn that branched here.  So recover the cc status
1980	     from the insn that set it.  */
1981
1982	  rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
1983	  if (note)
1984	    {
1985	      NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
1986	      cc_prev_status = cc_status;
1987	    }
1988	}
1989#endif
1990
1991	/* Detect insns that are really jump-tables
1992	   and output them as such.  */
1993
1994	if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1995	  {
1996#if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
1997	    int vlen, idx;
1998#endif
1999
2000	    if (! JUMP_TABLES_IN_TEXT_SECTION)
2001	      switch_to_section (targetm.asm_out.function_rodata_section
2002				 (current_function_decl));
2003	    else
2004	      switch_to_section (current_function_section ());
2005
2006	    if (app_on)
2007	      {
2008		fputs (ASM_APP_OFF, file);
2009		app_on = 0;
2010	      }
2011
2012#if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2013	    if (GET_CODE (body) == ADDR_VEC)
2014	      {
2015#ifdef ASM_OUTPUT_ADDR_VEC
2016		ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2017#else
2018		gcc_unreachable ();
2019#endif
2020	      }
2021	    else
2022	      {
2023#ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2024		ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2025#else
2026		gcc_unreachable ();
2027#endif
2028	      }
2029#else
2030	    vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2031	    for (idx = 0; idx < vlen; idx++)
2032	      {
2033		if (GET_CODE (body) == ADDR_VEC)
2034		  {
2035#ifdef ASM_OUTPUT_ADDR_VEC_ELT
2036		    ASM_OUTPUT_ADDR_VEC_ELT
2037		      (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2038#else
2039		    gcc_unreachable ();
2040#endif
2041		  }
2042		else
2043		  {
2044#ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2045		    ASM_OUTPUT_ADDR_DIFF_ELT
2046		      (file,
2047		       body,
2048		       CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2049		       CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2050#else
2051		    gcc_unreachable ();
2052#endif
2053		  }
2054	      }
2055#ifdef ASM_OUTPUT_CASE_END
2056	    ASM_OUTPUT_CASE_END (file,
2057				 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2058				 insn);
2059#endif
2060#endif
2061
2062	    switch_to_section (current_function_section ());
2063
2064	    break;
2065	  }
2066	/* Output this line note if it is the first or the last line
2067	   note in a row.  */
2068	if (notice_source_line (insn))
2069	  {
2070	    (*debug_hooks->source_line) (last_linenum, last_filename);
2071	  }
2072
2073	if (GET_CODE (body) == ASM_INPUT)
2074	  {
2075	    const char *string = XSTR (body, 0);
2076
2077	    /* There's no telling what that did to the condition codes.  */
2078	    CC_STATUS_INIT;
2079
2080	    if (string[0])
2081	      {
2082		if (! app_on)
2083		  {
2084		    fputs (ASM_APP_ON, file);
2085		    app_on = 1;
2086		  }
2087		fprintf (asm_out_file, "\t%s\n", string);
2088	      }
2089	    break;
2090	  }
2091
2092	/* Detect `asm' construct with operands.  */
2093	if (asm_noperands (body) >= 0)
2094	  {
2095	    unsigned int noperands = asm_noperands (body);
2096	    rtx *ops = alloca (noperands * sizeof (rtx));
2097	    const char *string;
2098
2099	    /* There's no telling what that did to the condition codes.  */
2100	    CC_STATUS_INIT;
2101
2102	    /* Get out the operand values.  */
2103	    string = decode_asm_operands (body, ops, NULL, NULL, NULL);
2104	    /* Inhibit dieing on what would otherwise be compiler bugs.  */
2105	    insn_noperands = noperands;
2106	    this_is_asm_operands = insn;
2107
2108#ifdef FINAL_PRESCAN_INSN
2109	    FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2110#endif
2111
2112	    /* Output the insn using them.  */
2113	    if (string[0])
2114	      {
2115		if (! app_on)
2116		  {
2117		    fputs (ASM_APP_ON, file);
2118		    app_on = 1;
2119		  }
2120	        output_asm_insn (string, ops);
2121	      }
2122
2123	    this_is_asm_operands = 0;
2124	    break;
2125	  }
2126
2127	if (app_on)
2128	  {
2129	    fputs (ASM_APP_OFF, file);
2130	    app_on = 0;
2131	  }
2132
2133	if (GET_CODE (body) == SEQUENCE)
2134	  {
2135	    /* A delayed-branch sequence */
2136	    int i;
2137
2138	    final_sequence = body;
2139
2140	    /* Record the delay slots' frame information before the branch.
2141	       This is needed for delayed calls: see execute_cfa_program().  */
2142#if defined (DWARF2_UNWIND_INFO)
2143	    if (dwarf2out_do_frame ())
2144	      for (i = 1; i < XVECLEN (body, 0); i++)
2145		dwarf2out_frame_debug (XVECEXP (body, 0, i), false);
2146#endif
2147
2148	    /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2149	       force the restoration of a comparison that was previously
2150	       thought unnecessary.  If that happens, cancel this sequence
2151	       and cause that insn to be restored.  */
2152
2153	    next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, 1, seen);
2154	    if (next != XVECEXP (body, 0, 1))
2155	      {
2156		final_sequence = 0;
2157		return next;
2158	      }
2159
2160	    for (i = 1; i < XVECLEN (body, 0); i++)
2161	      {
2162		rtx insn = XVECEXP (body, 0, i);
2163		rtx next = NEXT_INSN (insn);
2164		/* We loop in case any instruction in a delay slot gets
2165		   split.  */
2166		do
2167		  insn = final_scan_insn (insn, file, 0, 1, seen);
2168		while (insn != next);
2169	      }
2170#ifdef DBR_OUTPUT_SEQEND
2171	    DBR_OUTPUT_SEQEND (file);
2172#endif
2173	    final_sequence = 0;
2174
2175	    /* If the insn requiring the delay slot was a CALL_INSN, the
2176	       insns in the delay slot are actually executed before the
2177	       called function.  Hence we don't preserve any CC-setting
2178	       actions in these insns and the CC must be marked as being
2179	       clobbered by the function.  */
2180	    if (CALL_P (XVECEXP (body, 0, 0)))
2181	      {
2182		CC_STATUS_INIT;
2183	      }
2184	    break;
2185	  }
2186
2187	/* We have a real machine instruction as rtl.  */
2188
2189	body = PATTERN (insn);
2190
2191#ifdef HAVE_cc0
2192	set = single_set (insn);
2193
2194	/* Check for redundant test and compare instructions
2195	   (when the condition codes are already set up as desired).
2196	   This is done only when optimizing; if not optimizing,
2197	   it should be possible for the user to alter a variable
2198	   with the debugger in between statements
2199	   and the next statement should reexamine the variable
2200	   to compute the condition codes.  */
2201
2202	if (optimize)
2203	  {
2204	    if (set
2205		&& GET_CODE (SET_DEST (set)) == CC0
2206		&& insn != last_ignored_compare)
2207	      {
2208		if (GET_CODE (SET_SRC (set)) == SUBREG)
2209		  SET_SRC (set) = alter_subreg (&SET_SRC (set));
2210		else if (GET_CODE (SET_SRC (set)) == COMPARE)
2211		  {
2212		    if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2213		      XEXP (SET_SRC (set), 0)
2214			= alter_subreg (&XEXP (SET_SRC (set), 0));
2215		    if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2216		      XEXP (SET_SRC (set), 1)
2217			= alter_subreg (&XEXP (SET_SRC (set), 1));
2218		  }
2219		if ((cc_status.value1 != 0
2220		     && rtx_equal_p (SET_SRC (set), cc_status.value1))
2221		    || (cc_status.value2 != 0
2222			&& rtx_equal_p (SET_SRC (set), cc_status.value2)))
2223		  {
2224		    /* Don't delete insn if it has an addressing side-effect.  */
2225		    if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2226			/* or if anything in it is volatile.  */
2227			&& ! volatile_refs_p (PATTERN (insn)))
2228		      {
2229			/* We don't really delete the insn; just ignore it.  */
2230			last_ignored_compare = insn;
2231			break;
2232		      }
2233		  }
2234	      }
2235	  }
2236#endif
2237
2238#ifdef HAVE_cc0
2239	/* If this is a conditional branch, maybe modify it
2240	   if the cc's are in a nonstandard state
2241	   so that it accomplishes the same thing that it would
2242	   do straightforwardly if the cc's were set up normally.  */
2243
2244	if (cc_status.flags != 0
2245	    && JUMP_P (insn)
2246	    && GET_CODE (body) == SET
2247	    && SET_DEST (body) == pc_rtx
2248	    && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2249	    && COMPARISON_P (XEXP (SET_SRC (body), 0))
2250	    && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2251	  {
2252	    /* This function may alter the contents of its argument
2253	       and clear some of the cc_status.flags bits.
2254	       It may also return 1 meaning condition now always true
2255	       or -1 meaning condition now always false
2256	       or 2 meaning condition nontrivial but altered.  */
2257	    int result = alter_cond (XEXP (SET_SRC (body), 0));
2258	    /* If condition now has fixed value, replace the IF_THEN_ELSE
2259	       with its then-operand or its else-operand.  */
2260	    if (result == 1)
2261	      SET_SRC (body) = XEXP (SET_SRC (body), 1);
2262	    if (result == -1)
2263	      SET_SRC (body) = XEXP (SET_SRC (body), 2);
2264
2265	    /* The jump is now either unconditional or a no-op.
2266	       If it has become a no-op, don't try to output it.
2267	       (It would not be recognized.)  */
2268	    if (SET_SRC (body) == pc_rtx)
2269	      {
2270	        delete_insn (insn);
2271		break;
2272	      }
2273	    else if (GET_CODE (SET_SRC (body)) == RETURN)
2274	      /* Replace (set (pc) (return)) with (return).  */
2275	      PATTERN (insn) = body = SET_SRC (body);
2276
2277	    /* Rerecognize the instruction if it has changed.  */
2278	    if (result != 0)
2279	      INSN_CODE (insn) = -1;
2280	  }
2281
2282	/* Make same adjustments to instructions that examine the
2283	   condition codes without jumping and instructions that
2284	   handle conditional moves (if this machine has either one).  */
2285
2286	if (cc_status.flags != 0
2287	    && set != 0)
2288	  {
2289	    rtx cond_rtx, then_rtx, else_rtx;
2290
2291	    if (!JUMP_P (insn)
2292		&& GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2293	      {
2294		cond_rtx = XEXP (SET_SRC (set), 0);
2295		then_rtx = XEXP (SET_SRC (set), 1);
2296		else_rtx = XEXP (SET_SRC (set), 2);
2297	      }
2298	    else
2299	      {
2300		cond_rtx = SET_SRC (set);
2301		then_rtx = const_true_rtx;
2302		else_rtx = const0_rtx;
2303	      }
2304
2305	    switch (GET_CODE (cond_rtx))
2306	      {
2307	      case GTU:
2308	      case GT:
2309	      case LTU:
2310	      case LT:
2311	      case GEU:
2312	      case GE:
2313	      case LEU:
2314	      case LE:
2315	      case EQ:
2316	      case NE:
2317		{
2318		  int result;
2319		  if (XEXP (cond_rtx, 0) != cc0_rtx)
2320		    break;
2321		  result = alter_cond (cond_rtx);
2322		  if (result == 1)
2323		    validate_change (insn, &SET_SRC (set), then_rtx, 0);
2324		  else if (result == -1)
2325		    validate_change (insn, &SET_SRC (set), else_rtx, 0);
2326		  else if (result == 2)
2327		    INSN_CODE (insn) = -1;
2328		  if (SET_DEST (set) == SET_SRC (set))
2329		    delete_insn (insn);
2330		}
2331		break;
2332
2333	      default:
2334		break;
2335	      }
2336	  }
2337
2338#endif
2339
2340#ifdef HAVE_peephole
2341	/* Do machine-specific peephole optimizations if desired.  */
2342
2343	if (optimize && !flag_no_peephole && !nopeepholes)
2344	  {
2345	    rtx next = peephole (insn);
2346	    /* When peepholing, if there were notes within the peephole,
2347	       emit them before the peephole.  */
2348	    if (next != 0 && next != NEXT_INSN (insn))
2349	      {
2350		rtx note, prev = PREV_INSN (insn);
2351
2352		for (note = NEXT_INSN (insn); note != next;
2353		     note = NEXT_INSN (note))
2354		  final_scan_insn (note, file, optimize, nopeepholes, seen);
2355
2356		/* Put the notes in the proper position for a later
2357		   rescan.  For example, the SH target can do this
2358		   when generating a far jump in a delayed branch
2359		   sequence.  */
2360		note = NEXT_INSN (insn);
2361		PREV_INSN (note) = prev;
2362		NEXT_INSN (prev) = note;
2363		NEXT_INSN (PREV_INSN (next)) = insn;
2364		PREV_INSN (insn) = PREV_INSN (next);
2365		NEXT_INSN (insn) = next;
2366		PREV_INSN (next) = insn;
2367	      }
2368
2369	    /* PEEPHOLE might have changed this.  */
2370	    body = PATTERN (insn);
2371	  }
2372#endif
2373
2374	/* Try to recognize the instruction.
2375	   If successful, verify that the operands satisfy the
2376	   constraints for the instruction.  Crash if they don't,
2377	   since `reload' should have changed them so that they do.  */
2378
2379	insn_code_number = recog_memoized (insn);
2380	cleanup_subreg_operands (insn);
2381
2382	/* Dump the insn in the assembly for debugging.  */
2383	if (flag_dump_rtl_in_asm)
2384	  {
2385	    print_rtx_head = ASM_COMMENT_START;
2386	    print_rtl_single (asm_out_file, insn);
2387	    print_rtx_head = "";
2388	  }
2389
2390	if (! constrain_operands_cached (1))
2391	  fatal_insn_not_found (insn);
2392
2393	/* Some target machines need to prescan each insn before
2394	   it is output.  */
2395
2396#ifdef FINAL_PRESCAN_INSN
2397	FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2398#endif
2399
2400#ifdef HAVE_conditional_execution
2401	if (GET_CODE (PATTERN (insn)) == COND_EXEC)
2402	  current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2403#endif
2404
2405#ifdef HAVE_cc0
2406	cc_prev_status = cc_status;
2407
2408	/* Update `cc_status' for this instruction.
2409	   The instruction's output routine may change it further.
2410	   If the output routine for a jump insn needs to depend
2411	   on the cc status, it should look at cc_prev_status.  */
2412
2413	NOTICE_UPDATE_CC (body, insn);
2414#endif
2415
2416	current_output_insn = debug_insn = insn;
2417
2418#if defined (DWARF2_UNWIND_INFO)
2419	if (CALL_P (insn) && dwarf2out_do_frame ())
2420	  dwarf2out_frame_debug (insn, false);
2421#endif
2422
2423	/* Find the proper template for this insn.  */
2424	template = get_insn_template (insn_code_number, insn);
2425
2426	/* If the C code returns 0, it means that it is a jump insn
2427	   which follows a deleted test insn, and that test insn
2428	   needs to be reinserted.  */
2429	if (template == 0)
2430	  {
2431	    rtx prev;
2432
2433	    gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2434
2435	    /* We have already processed the notes between the setter and
2436	       the user.  Make sure we don't process them again, this is
2437	       particularly important if one of the notes is a block
2438	       scope note or an EH note.  */
2439	    for (prev = insn;
2440		 prev != last_ignored_compare;
2441		 prev = PREV_INSN (prev))
2442	      {
2443		if (NOTE_P (prev))
2444		  delete_insn (prev);	/* Use delete_note.  */
2445	      }
2446
2447	    return prev;
2448	  }
2449
2450	/* If the template is the string "#", it means that this insn must
2451	   be split.  */
2452	if (template[0] == '#' && template[1] == '\0')
2453	  {
2454	    rtx new = try_split (body, insn, 0);
2455
2456	    /* If we didn't split the insn, go away.  */
2457	    if (new == insn && PATTERN (new) == body)
2458	      fatal_insn ("could not split insn", insn);
2459
2460#ifdef HAVE_ATTR_length
2461	    /* This instruction should have been split in shorten_branches,
2462	       to ensure that we would have valid length info for the
2463	       splitees.  */
2464	    gcc_unreachable ();
2465#endif
2466
2467	    return new;
2468	  }
2469
2470#ifdef TARGET_UNWIND_INFO
2471	/* ??? This will put the directives in the wrong place if
2472	   get_insn_template outputs assembly directly.  However calling it
2473	   before get_insn_template breaks if the insns is split.  */
2474	targetm.asm_out.unwind_emit (asm_out_file, insn);
2475#endif
2476
2477	/* Output assembler code from the template.  */
2478	output_asm_insn (template, recog_data.operand);
2479
2480	/* If necessary, report the effect that the instruction has on
2481	   the unwind info.   We've already done this for delay slots
2482	   and call instructions.  */
2483#if defined (DWARF2_UNWIND_INFO)
2484	if (final_sequence == 0
2485#if !defined (HAVE_prologue)
2486	    && !ACCUMULATE_OUTGOING_ARGS
2487#endif
2488	    && dwarf2out_do_frame ())
2489	  dwarf2out_frame_debug (insn, true);
2490#endif
2491
2492	current_output_insn = debug_insn = 0;
2493      }
2494    }
2495  return NEXT_INSN (insn);
2496}
2497
2498/* Return whether a source line note needs to be emitted before INSN.  */
2499
2500static bool
2501notice_source_line (rtx insn)
2502{
2503  const char *filename = insn_file (insn);
2504  int linenum = insn_line (insn);
2505
2506  if (filename
2507      && (force_source_line
2508	  || filename != last_filename
2509	  || last_linenum != linenum))
2510    {
2511      force_source_line = false;
2512      last_filename = filename;
2513      last_linenum = linenum;
2514      high_block_linenum = MAX (last_linenum, high_block_linenum);
2515      high_function_linenum = MAX (last_linenum, high_function_linenum);
2516      return true;
2517    }
2518  return false;
2519}
2520
2521/* For each operand in INSN, simplify (subreg (reg)) so that it refers
2522   directly to the desired hard register.  */
2523
2524void
2525cleanup_subreg_operands (rtx insn)
2526{
2527  int i;
2528  extract_insn_cached (insn);
2529  for (i = 0; i < recog_data.n_operands; i++)
2530    {
2531      /* The following test cannot use recog_data.operand when testing
2532	 for a SUBREG: the underlying object might have been changed
2533	 already if we are inside a match_operator expression that
2534	 matches the else clause.  Instead we test the underlying
2535	 expression directly.  */
2536      if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2537	recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i]);
2538      else if (GET_CODE (recog_data.operand[i]) == PLUS
2539	       || GET_CODE (recog_data.operand[i]) == MULT
2540	       || MEM_P (recog_data.operand[i]))
2541	recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i]);
2542    }
2543
2544  for (i = 0; i < recog_data.n_dups; i++)
2545    {
2546      if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2547	*recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i]);
2548      else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2549	       || GET_CODE (*recog_data.dup_loc[i]) == MULT
2550	       || MEM_P (*recog_data.dup_loc[i]))
2551	*recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i]);
2552    }
2553}
2554
2555/* If X is a SUBREG, replace it with a REG or a MEM,
2556   based on the thing it is a subreg of.  */
2557
2558rtx
2559alter_subreg (rtx *xp)
2560{
2561  rtx x = *xp;
2562  rtx y = SUBREG_REG (x);
2563
2564  /* simplify_subreg does not remove subreg from volatile references.
2565     We are required to.  */
2566  if (MEM_P (y))
2567    {
2568      int offset = SUBREG_BYTE (x);
2569
2570      /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
2571	 contains 0 instead of the proper offset.  See simplify_subreg.  */
2572      if (offset == 0
2573	  && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
2574        {
2575          int difference = GET_MODE_SIZE (GET_MODE (y))
2576			   - GET_MODE_SIZE (GET_MODE (x));
2577          if (WORDS_BIG_ENDIAN)
2578            offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
2579          if (BYTES_BIG_ENDIAN)
2580            offset += difference % UNITS_PER_WORD;
2581        }
2582
2583      *xp = adjust_address (y, GET_MODE (x), offset);
2584    }
2585  else
2586    {
2587      rtx new = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
2588				 SUBREG_BYTE (x));
2589
2590      if (new != 0)
2591	*xp = new;
2592      else if (REG_P (y))
2593	{
2594	  /* Simplify_subreg can't handle some REG cases, but we have to.  */
2595	  unsigned int regno = subreg_regno (x);
2596	  *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, SUBREG_BYTE (x));
2597	}
2598    }
2599
2600  return *xp;
2601}
2602
2603/* Do alter_subreg on all the SUBREGs contained in X.  */
2604
2605static rtx
2606walk_alter_subreg (rtx *xp)
2607{
2608  rtx x = *xp;
2609  switch (GET_CODE (x))
2610    {
2611    case PLUS:
2612    case MULT:
2613    case AND:
2614      XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2615      XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1));
2616      break;
2617
2618    case MEM:
2619    case ZERO_EXTEND:
2620      XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2621      break;
2622
2623    case SUBREG:
2624      return alter_subreg (xp);
2625
2626    default:
2627      break;
2628    }
2629
2630  return *xp;
2631}
2632
2633#ifdef HAVE_cc0
2634
2635/* Given BODY, the body of a jump instruction, alter the jump condition
2636   as required by the bits that are set in cc_status.flags.
2637   Not all of the bits there can be handled at this level in all cases.
2638
2639   The value is normally 0.
2640   1 means that the condition has become always true.
2641   -1 means that the condition has become always false.
2642   2 means that COND has been altered.  */
2643
2644static int
2645alter_cond (rtx cond)
2646{
2647  int value = 0;
2648
2649  if (cc_status.flags & CC_REVERSED)
2650    {
2651      value = 2;
2652      PUT_CODE (cond, swap_condition (GET_CODE (cond)));
2653    }
2654
2655  if (cc_status.flags & CC_INVERTED)
2656    {
2657      value = 2;
2658      PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
2659    }
2660
2661  if (cc_status.flags & CC_NOT_POSITIVE)
2662    switch (GET_CODE (cond))
2663      {
2664      case LE:
2665      case LEU:
2666      case GEU:
2667	/* Jump becomes unconditional.  */
2668	return 1;
2669
2670      case GT:
2671      case GTU:
2672      case LTU:
2673	/* Jump becomes no-op.  */
2674	return -1;
2675
2676      case GE:
2677	PUT_CODE (cond, EQ);
2678	value = 2;
2679	break;
2680
2681      case LT:
2682	PUT_CODE (cond, NE);
2683	value = 2;
2684	break;
2685
2686      default:
2687	break;
2688      }
2689
2690  if (cc_status.flags & CC_NOT_NEGATIVE)
2691    switch (GET_CODE (cond))
2692      {
2693      case GE:
2694      case GEU:
2695	/* Jump becomes unconditional.  */
2696	return 1;
2697
2698      case LT:
2699      case LTU:
2700	/* Jump becomes no-op.  */
2701	return -1;
2702
2703      case LE:
2704      case LEU:
2705	PUT_CODE (cond, EQ);
2706	value = 2;
2707	break;
2708
2709      case GT:
2710      case GTU:
2711	PUT_CODE (cond, NE);
2712	value = 2;
2713	break;
2714
2715      default:
2716	break;
2717      }
2718
2719  if (cc_status.flags & CC_NO_OVERFLOW)
2720    switch (GET_CODE (cond))
2721      {
2722      case GEU:
2723	/* Jump becomes unconditional.  */
2724	return 1;
2725
2726      case LEU:
2727	PUT_CODE (cond, EQ);
2728	value = 2;
2729	break;
2730
2731      case GTU:
2732	PUT_CODE (cond, NE);
2733	value = 2;
2734	break;
2735
2736      case LTU:
2737	/* Jump becomes no-op.  */
2738	return -1;
2739
2740      default:
2741	break;
2742      }
2743
2744  if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
2745    switch (GET_CODE (cond))
2746      {
2747      default:
2748	gcc_unreachable ();
2749
2750      case NE:
2751	PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
2752	value = 2;
2753	break;
2754
2755      case EQ:
2756	PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
2757	value = 2;
2758	break;
2759      }
2760
2761  if (cc_status.flags & CC_NOT_SIGNED)
2762    /* The flags are valid if signed condition operators are converted
2763       to unsigned.  */
2764    switch (GET_CODE (cond))
2765      {
2766      case LE:
2767	PUT_CODE (cond, LEU);
2768	value = 2;
2769	break;
2770
2771      case LT:
2772	PUT_CODE (cond, LTU);
2773	value = 2;
2774	break;
2775
2776      case GT:
2777	PUT_CODE (cond, GTU);
2778	value = 2;
2779	break;
2780
2781      case GE:
2782	PUT_CODE (cond, GEU);
2783	value = 2;
2784	break;
2785
2786      default:
2787	break;
2788      }
2789
2790  return value;
2791}
2792#endif
2793
2794/* Report inconsistency between the assembler template and the operands.
2795   In an `asm', it's the user's fault; otherwise, the compiler's fault.  */
2796
2797void
2798output_operand_lossage (const char *cmsgid, ...)
2799{
2800  char *fmt_string;
2801  char *new_message;
2802  const char *pfx_str;
2803  va_list ap;
2804
2805  va_start (ap, cmsgid);
2806
2807  pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
2808  asprintf (&fmt_string, "%s%s", pfx_str, _(cmsgid));
2809  vasprintf (&new_message, fmt_string, ap);
2810
2811  if (this_is_asm_operands)
2812    error_for_asm (this_is_asm_operands, "%s", new_message);
2813  else
2814    internal_error ("%s", new_message);
2815
2816  free (fmt_string);
2817  free (new_message);
2818  va_end (ap);
2819}
2820
2821/* Output of assembler code from a template, and its subroutines.  */
2822
2823/* Annotate the assembly with a comment describing the pattern and
2824   alternative used.  */
2825
2826static void
2827output_asm_name (void)
2828{
2829  if (debug_insn)
2830    {
2831      int num = INSN_CODE (debug_insn);
2832      fprintf (asm_out_file, "\t%s %d\t%s",
2833	       ASM_COMMENT_START, INSN_UID (debug_insn),
2834	       insn_data[num].name);
2835      if (insn_data[num].n_alternatives > 1)
2836	fprintf (asm_out_file, "/%d", which_alternative + 1);
2837#ifdef HAVE_ATTR_length
2838      fprintf (asm_out_file, "\t[length = %d]",
2839	       get_attr_length (debug_insn));
2840#endif
2841      /* Clear this so only the first assembler insn
2842	 of any rtl insn will get the special comment for -dp.  */
2843      debug_insn = 0;
2844    }
2845}
2846
2847/* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
2848   or its address, return that expr .  Set *PADDRESSP to 1 if the expr
2849   corresponds to the address of the object and 0 if to the object.  */
2850
2851static tree
2852get_mem_expr_from_op (rtx op, int *paddressp)
2853{
2854  tree expr;
2855  int inner_addressp;
2856
2857  *paddressp = 0;
2858
2859  if (REG_P (op))
2860    return REG_EXPR (op);
2861  else if (!MEM_P (op))
2862    return 0;
2863
2864  if (MEM_EXPR (op) != 0)
2865    return MEM_EXPR (op);
2866
2867  /* Otherwise we have an address, so indicate it and look at the address.  */
2868  *paddressp = 1;
2869  op = XEXP (op, 0);
2870
2871  /* First check if we have a decl for the address, then look at the right side
2872     if it is a PLUS.  Otherwise, strip off arithmetic and keep looking.
2873     But don't allow the address to itself be indirect.  */
2874  if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
2875    return expr;
2876  else if (GET_CODE (op) == PLUS
2877	   && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
2878    return expr;
2879
2880  while (GET_RTX_CLASS (GET_CODE (op)) == RTX_UNARY
2881	 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
2882    op = XEXP (op, 0);
2883
2884  expr = get_mem_expr_from_op (op, &inner_addressp);
2885  return inner_addressp ? 0 : expr;
2886}
2887
2888/* Output operand names for assembler instructions.  OPERANDS is the
2889   operand vector, OPORDER is the order to write the operands, and NOPS
2890   is the number of operands to write.  */
2891
2892static void
2893output_asm_operand_names (rtx *operands, int *oporder, int nops)
2894{
2895  int wrote = 0;
2896  int i;
2897
2898  for (i = 0; i < nops; i++)
2899    {
2900      int addressp;
2901      rtx op = operands[oporder[i]];
2902      tree expr = get_mem_expr_from_op (op, &addressp);
2903
2904      fprintf (asm_out_file, "%c%s",
2905	       wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
2906      wrote = 1;
2907      if (expr)
2908	{
2909	  fprintf (asm_out_file, "%s",
2910		   addressp ? "*" : "");
2911	  print_mem_expr (asm_out_file, expr);
2912	  wrote = 1;
2913	}
2914      else if (REG_P (op) && ORIGINAL_REGNO (op)
2915	       && ORIGINAL_REGNO (op) != REGNO (op))
2916	fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
2917    }
2918}
2919
2920/* Output text from TEMPLATE to the assembler output file,
2921   obeying %-directions to substitute operands taken from
2922   the vector OPERANDS.
2923
2924   %N (for N a digit) means print operand N in usual manner.
2925   %lN means require operand N to be a CODE_LABEL or LABEL_REF
2926      and print the label name with no punctuation.
2927   %cN means require operand N to be a constant
2928      and print the constant expression with no punctuation.
2929   %aN means expect operand N to be a memory address
2930      (not a memory reference!) and print a reference
2931      to that address.
2932   %nN means expect operand N to be a constant
2933      and print a constant expression for minus the value
2934      of the operand, with no other punctuation.  */
2935
2936void
2937output_asm_insn (const char *template, rtx *operands)
2938{
2939  const char *p;
2940  int c;
2941#ifdef ASSEMBLER_DIALECT
2942  int dialect = 0;
2943#endif
2944  int oporder[MAX_RECOG_OPERANDS];
2945  char opoutput[MAX_RECOG_OPERANDS];
2946  int ops = 0;
2947
2948  /* An insn may return a null string template
2949     in a case where no assembler code is needed.  */
2950  if (*template == 0)
2951    return;
2952
2953  memset (opoutput, 0, sizeof opoutput);
2954  p = template;
2955  putc ('\t', asm_out_file);
2956
2957#ifdef ASM_OUTPUT_OPCODE
2958  ASM_OUTPUT_OPCODE (asm_out_file, p);
2959#endif
2960
2961  while ((c = *p++))
2962    switch (c)
2963      {
2964      case '\n':
2965	if (flag_verbose_asm)
2966	  output_asm_operand_names (operands, oporder, ops);
2967	if (flag_print_asm_name)
2968	  output_asm_name ();
2969
2970	ops = 0;
2971	memset (opoutput, 0, sizeof opoutput);
2972
2973	putc (c, asm_out_file);
2974#ifdef ASM_OUTPUT_OPCODE
2975	while ((c = *p) == '\t')
2976	  {
2977	    putc (c, asm_out_file);
2978	    p++;
2979	  }
2980	ASM_OUTPUT_OPCODE (asm_out_file, p);
2981#endif
2982	break;
2983
2984#ifdef ASSEMBLER_DIALECT
2985      case '{':
2986	{
2987	  int i;
2988
2989	  if (dialect)
2990	    output_operand_lossage ("nested assembly dialect alternatives");
2991	  else
2992	    dialect = 1;
2993
2994	  /* If we want the first dialect, do nothing.  Otherwise, skip
2995	     DIALECT_NUMBER of strings ending with '|'.  */
2996	  for (i = 0; i < dialect_number; i++)
2997	    {
2998	      while (*p && *p != '}' && *p++ != '|')
2999		;
3000	      if (*p == '}')
3001		break;
3002	      if (*p == '|')
3003		p++;
3004	    }
3005
3006	  if (*p == '\0')
3007	    output_operand_lossage ("unterminated assembly dialect alternative");
3008	}
3009	break;
3010
3011      case '|':
3012	if (dialect)
3013	  {
3014	    /* Skip to close brace.  */
3015	    do
3016	      {
3017		if (*p == '\0')
3018		  {
3019		    output_operand_lossage ("unterminated assembly dialect alternative");
3020		    break;
3021		  }
3022	      }
3023	    while (*p++ != '}');
3024	    dialect = 0;
3025	  }
3026	else
3027	  putc (c, asm_out_file);
3028	break;
3029
3030      case '}':
3031	if (! dialect)
3032	  putc (c, asm_out_file);
3033	dialect = 0;
3034	break;
3035#endif
3036
3037      case '%':
3038	/* %% outputs a single %.  */
3039	if (*p == '%')
3040	  {
3041	    p++;
3042	    putc (c, asm_out_file);
3043	  }
3044	/* %= outputs a number which is unique to each insn in the entire
3045	   compilation.  This is useful for making local labels that are
3046	   referred to more than once in a given insn.  */
3047	else if (*p == '=')
3048	  {
3049	    p++;
3050	    fprintf (asm_out_file, "%d", insn_counter);
3051	  }
3052	/* % followed by a letter and some digits
3053	   outputs an operand in a special way depending on the letter.
3054	   Letters `acln' are implemented directly.
3055	   Other letters are passed to `output_operand' so that
3056	   the PRINT_OPERAND macro can define them.  */
3057	else if (ISALPHA (*p))
3058	  {
3059	    int letter = *p++;
3060	    unsigned long opnum;
3061	    char *endptr;
3062
3063	    opnum = strtoul (p, &endptr, 10);
3064
3065	    if (endptr == p)
3066	      output_operand_lossage ("operand number missing "
3067				      "after %%-letter");
3068	    else if (this_is_asm_operands && opnum >= insn_noperands)
3069	      output_operand_lossage ("operand number out of range");
3070	    else if (letter == 'l')
3071	      output_asm_label (operands[opnum]);
3072	    else if (letter == 'a')
3073	      output_address (operands[opnum]);
3074	    else if (letter == 'c')
3075	      {
3076		if (CONSTANT_ADDRESS_P (operands[opnum]))
3077		  output_addr_const (asm_out_file, operands[opnum]);
3078		else
3079		  output_operand (operands[opnum], 'c');
3080	      }
3081	    else if (letter == 'n')
3082	      {
3083		if (GET_CODE (operands[opnum]) == CONST_INT)
3084		  fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3085			   - INTVAL (operands[opnum]));
3086		else
3087		  {
3088		    putc ('-', asm_out_file);
3089		    output_addr_const (asm_out_file, operands[opnum]);
3090		  }
3091	      }
3092	    else
3093	      output_operand (operands[opnum], letter);
3094
3095	    if (!opoutput[opnum])
3096	      oporder[ops++] = opnum;
3097	    opoutput[opnum] = 1;
3098
3099	    p = endptr;
3100	    c = *p;
3101	  }
3102	/* % followed by a digit outputs an operand the default way.  */
3103	else if (ISDIGIT (*p))
3104	  {
3105	    unsigned long opnum;
3106	    char *endptr;
3107
3108	    opnum = strtoul (p, &endptr, 10);
3109	    if (this_is_asm_operands && opnum >= insn_noperands)
3110	      output_operand_lossage ("operand number out of range");
3111	    else
3112	      output_operand (operands[opnum], 0);
3113
3114	    if (!opoutput[opnum])
3115	      oporder[ops++] = opnum;
3116	    opoutput[opnum] = 1;
3117
3118	    p = endptr;
3119	    c = *p;
3120	  }
3121	/* % followed by punctuation: output something for that
3122	   punctuation character alone, with no operand.
3123	   The PRINT_OPERAND macro decides what is actually done.  */
3124#ifdef PRINT_OPERAND_PUNCT_VALID_P
3125	else if (PRINT_OPERAND_PUNCT_VALID_P ((unsigned char) *p))
3126	  output_operand (NULL_RTX, *p++);
3127#endif
3128	else
3129	  output_operand_lossage ("invalid %%-code");
3130	break;
3131
3132      default:
3133	putc (c, asm_out_file);
3134      }
3135
3136  /* Write out the variable names for operands, if we know them.  */
3137  if (flag_verbose_asm)
3138    output_asm_operand_names (operands, oporder, ops);
3139  if (flag_print_asm_name)
3140    output_asm_name ();
3141
3142  putc ('\n', asm_out_file);
3143}
3144
3145/* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol.  */
3146
3147void
3148output_asm_label (rtx x)
3149{
3150  char buf[256];
3151
3152  if (GET_CODE (x) == LABEL_REF)
3153    x = XEXP (x, 0);
3154  if (LABEL_P (x)
3155      || (NOTE_P (x)
3156	  && NOTE_LINE_NUMBER (x) == NOTE_INSN_DELETED_LABEL))
3157    ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3158  else
3159    output_operand_lossage ("'%%l' operand isn't a label");
3160
3161  assemble_name (asm_out_file, buf);
3162}
3163
3164/* Print operand X using machine-dependent assembler syntax.
3165   The macro PRINT_OPERAND is defined just to control this function.
3166   CODE is a non-digit that preceded the operand-number in the % spec,
3167   such as 'z' if the spec was `%z3'.  CODE is 0 if there was no char
3168   between the % and the digits.
3169   When CODE is a non-letter, X is 0.
3170
3171   The meanings of the letters are machine-dependent and controlled
3172   by PRINT_OPERAND.  */
3173
3174static void
3175output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3176{
3177  if (x && GET_CODE (x) == SUBREG)
3178    x = alter_subreg (&x);
3179
3180  /* X must not be a pseudo reg.  */
3181  gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3182
3183  PRINT_OPERAND (asm_out_file, x, code);
3184}
3185
3186/* Print a memory reference operand for address X
3187   using machine-dependent assembler syntax.
3188   The macro PRINT_OPERAND_ADDRESS exists just to control this function.  */
3189
3190void
3191output_address (rtx x)
3192{
3193  walk_alter_subreg (&x);
3194  PRINT_OPERAND_ADDRESS (asm_out_file, x);
3195}
3196
3197/* Print an integer constant expression in assembler syntax.
3198   Addition and subtraction are the only arithmetic
3199   that may appear in these expressions.  */
3200
3201void
3202output_addr_const (FILE *file, rtx x)
3203{
3204  char buf[256];
3205
3206 restart:
3207  switch (GET_CODE (x))
3208    {
3209    case PC:
3210      putc ('.', file);
3211      break;
3212
3213    case SYMBOL_REF:
3214      if (SYMBOL_REF_DECL (x))
3215	mark_decl_referenced (SYMBOL_REF_DECL (x));
3216#ifdef ASM_OUTPUT_SYMBOL_REF
3217      ASM_OUTPUT_SYMBOL_REF (file, x);
3218#else
3219      assemble_name (file, XSTR (x, 0));
3220#endif
3221      break;
3222
3223    case LABEL_REF:
3224      x = XEXP (x, 0);
3225      /* Fall through.  */
3226    case CODE_LABEL:
3227      ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3228#ifdef ASM_OUTPUT_LABEL_REF
3229      ASM_OUTPUT_LABEL_REF (file, buf);
3230#else
3231      assemble_name (file, buf);
3232#endif
3233      break;
3234
3235    case CONST_INT:
3236      fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3237      break;
3238
3239    case CONST:
3240      /* This used to output parentheses around the expression,
3241	 but that does not work on the 386 (either ATT or BSD assembler).  */
3242      output_addr_const (file, XEXP (x, 0));
3243      break;
3244
3245    case CONST_DOUBLE:
3246      if (GET_MODE (x) == VOIDmode)
3247	{
3248	  /* We can use %d if the number is one word and positive.  */
3249	  if (CONST_DOUBLE_HIGH (x))
3250	    fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3251		     CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
3252	  else if (CONST_DOUBLE_LOW (x) < 0)
3253	    fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
3254	  else
3255	    fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3256	}
3257      else
3258	/* We can't handle floating point constants;
3259	   PRINT_OPERAND must handle them.  */
3260	output_operand_lossage ("floating constant misused");
3261      break;
3262
3263    case PLUS:
3264      /* Some assemblers need integer constants to appear last (eg masm).  */
3265      if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3266	{
3267	  output_addr_const (file, XEXP (x, 1));
3268	  if (INTVAL (XEXP (x, 0)) >= 0)
3269	    fprintf (file, "+");
3270	  output_addr_const (file, XEXP (x, 0));
3271	}
3272      else
3273	{
3274	  output_addr_const (file, XEXP (x, 0));
3275	  if (GET_CODE (XEXP (x, 1)) != CONST_INT
3276	      || INTVAL (XEXP (x, 1)) >= 0)
3277	    fprintf (file, "+");
3278	  output_addr_const (file, XEXP (x, 1));
3279	}
3280      break;
3281
3282    case MINUS:
3283      /* Avoid outputting things like x-x or x+5-x,
3284	 since some assemblers can't handle that.  */
3285      x = simplify_subtraction (x);
3286      if (GET_CODE (x) != MINUS)
3287	goto restart;
3288
3289      output_addr_const (file, XEXP (x, 0));
3290      fprintf (file, "-");
3291      if ((GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0)
3292	  || GET_CODE (XEXP (x, 1)) == PC
3293	  || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3294	output_addr_const (file, XEXP (x, 1));
3295      else
3296	{
3297	  fputs (targetm.asm_out.open_paren, file);
3298	  output_addr_const (file, XEXP (x, 1));
3299	  fputs (targetm.asm_out.close_paren, file);
3300	}
3301      break;
3302
3303    case ZERO_EXTEND:
3304    case SIGN_EXTEND:
3305    case SUBREG:
3306      output_addr_const (file, XEXP (x, 0));
3307      break;
3308
3309    default:
3310#ifdef OUTPUT_ADDR_CONST_EXTRA
3311      OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
3312      break;
3313
3314    fail:
3315#endif
3316      output_operand_lossage ("invalid expression as operand");
3317    }
3318}
3319
3320/* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3321   %R prints the value of REGISTER_PREFIX.
3322   %L prints the value of LOCAL_LABEL_PREFIX.
3323   %U prints the value of USER_LABEL_PREFIX.
3324   %I prints the value of IMMEDIATE_PREFIX.
3325   %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3326   Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3327
3328   We handle alternate assembler dialects here, just like output_asm_insn.  */
3329
3330void
3331asm_fprintf (FILE *file, const char *p, ...)
3332{
3333  char buf[10];
3334  char *q, c;
3335  va_list argptr;
3336
3337  va_start (argptr, p);
3338
3339  buf[0] = '%';
3340
3341  while ((c = *p++))
3342    switch (c)
3343      {
3344#ifdef ASSEMBLER_DIALECT
3345      case '{':
3346	{
3347	  int i;
3348
3349	  /* If we want the first dialect, do nothing.  Otherwise, skip
3350	     DIALECT_NUMBER of strings ending with '|'.  */
3351	  for (i = 0; i < dialect_number; i++)
3352	    {
3353	      while (*p && *p++ != '|')
3354		;
3355
3356	      if (*p == '|')
3357		p++;
3358	    }
3359	}
3360	break;
3361
3362      case '|':
3363	/* Skip to close brace.  */
3364	while (*p && *p++ != '}')
3365	  ;
3366	break;
3367
3368      case '}':
3369	break;
3370#endif
3371
3372      case '%':
3373	c = *p++;
3374	q = &buf[1];
3375	while (strchr ("-+ #0", c))
3376	  {
3377	    *q++ = c;
3378	    c = *p++;
3379	  }
3380	while (ISDIGIT (c) || c == '.')
3381	  {
3382	    *q++ = c;
3383	    c = *p++;
3384	  }
3385	switch (c)
3386	  {
3387	  case '%':
3388	    putc ('%', file);
3389	    break;
3390
3391	  case 'd':  case 'i':  case 'u':
3392	  case 'x':  case 'X':  case 'o':
3393	  case 'c':
3394	    *q++ = c;
3395	    *q = 0;
3396	    fprintf (file, buf, va_arg (argptr, int));
3397	    break;
3398
3399	  case 'w':
3400	    /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3401	       'o' cases, but we do not check for those cases.  It
3402	       means that the value is a HOST_WIDE_INT, which may be
3403	       either `long' or `long long'.  */
3404	    memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3405	    q += strlen (HOST_WIDE_INT_PRINT);
3406	    *q++ = *p++;
3407	    *q = 0;
3408	    fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3409	    break;
3410
3411	  case 'l':
3412	    *q++ = c;
3413#ifdef HAVE_LONG_LONG
3414	    if (*p == 'l')
3415	      {
3416		*q++ = *p++;
3417		*q++ = *p++;
3418		*q = 0;
3419		fprintf (file, buf, va_arg (argptr, long long));
3420	      }
3421	    else
3422#endif
3423	      {
3424		*q++ = *p++;
3425		*q = 0;
3426		fprintf (file, buf, va_arg (argptr, long));
3427	      }
3428
3429	    break;
3430
3431	  case 's':
3432	    *q++ = c;
3433	    *q = 0;
3434	    fprintf (file, buf, va_arg (argptr, char *));
3435	    break;
3436
3437	  case 'O':
3438#ifdef ASM_OUTPUT_OPCODE
3439	    ASM_OUTPUT_OPCODE (asm_out_file, p);
3440#endif
3441	    break;
3442
3443	  case 'R':
3444#ifdef REGISTER_PREFIX
3445	    fprintf (file, "%s", REGISTER_PREFIX);
3446#endif
3447	    break;
3448
3449	  case 'I':
3450#ifdef IMMEDIATE_PREFIX
3451	    fprintf (file, "%s", IMMEDIATE_PREFIX);
3452#endif
3453	    break;
3454
3455	  case 'L':
3456#ifdef LOCAL_LABEL_PREFIX
3457	    fprintf (file, "%s", LOCAL_LABEL_PREFIX);
3458#endif
3459	    break;
3460
3461	  case 'U':
3462	    fputs (user_label_prefix, file);
3463	    break;
3464
3465#ifdef ASM_FPRINTF_EXTENSIONS
3466	    /* Uppercase letters are reserved for general use by asm_fprintf
3467	       and so are not available to target specific code.  In order to
3468	       prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
3469	       they are defined here.  As they get turned into real extensions
3470	       to asm_fprintf they should be removed from this list.  */
3471	  case 'A': case 'B': case 'C': case 'D': case 'E':
3472	  case 'F': case 'G': case 'H': case 'J': case 'K':
3473	  case 'M': case 'N': case 'P': case 'Q': case 'S':
3474	  case 'T': case 'V': case 'W': case 'Y': case 'Z':
3475	    break;
3476
3477	  ASM_FPRINTF_EXTENSIONS (file, argptr, p)
3478#endif
3479	  default:
3480	    gcc_unreachable ();
3481	  }
3482	break;
3483
3484      default:
3485	putc (c, file);
3486      }
3487  va_end (argptr);
3488}
3489
3490/* Split up a CONST_DOUBLE or integer constant rtx
3491   into two rtx's for single words,
3492   storing in *FIRST the word that comes first in memory in the target
3493   and in *SECOND the other.  */
3494
3495void
3496split_double (rtx value, rtx *first, rtx *second)
3497{
3498  if (GET_CODE (value) == CONST_INT)
3499    {
3500      if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
3501	{
3502	  /* In this case the CONST_INT holds both target words.
3503	     Extract the bits from it into two word-sized pieces.
3504	     Sign extend each half to HOST_WIDE_INT.  */
3505	  unsigned HOST_WIDE_INT low, high;
3506	  unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
3507
3508	  /* Set sign_bit to the most significant bit of a word.  */
3509	  sign_bit = 1;
3510	  sign_bit <<= BITS_PER_WORD - 1;
3511
3512	  /* Set mask so that all bits of the word are set.  We could
3513	     have used 1 << BITS_PER_WORD instead of basing the
3514	     calculation on sign_bit.  However, on machines where
3515	     HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
3516	     compiler warning, even though the code would never be
3517	     executed.  */
3518	  mask = sign_bit << 1;
3519	  mask--;
3520
3521	  /* Set sign_extend as any remaining bits.  */
3522	  sign_extend = ~mask;
3523
3524	  /* Pick the lower word and sign-extend it.  */
3525	  low = INTVAL (value);
3526	  low &= mask;
3527	  if (low & sign_bit)
3528	    low |= sign_extend;
3529
3530	  /* Pick the higher word, shifted to the least significant
3531	     bits, and sign-extend it.  */
3532	  high = INTVAL (value);
3533	  high >>= BITS_PER_WORD - 1;
3534	  high >>= 1;
3535	  high &= mask;
3536	  if (high & sign_bit)
3537	    high |= sign_extend;
3538
3539	  /* Store the words in the target machine order.  */
3540	  if (WORDS_BIG_ENDIAN)
3541	    {
3542	      *first = GEN_INT (high);
3543	      *second = GEN_INT (low);
3544	    }
3545	  else
3546	    {
3547	      *first = GEN_INT (low);
3548	      *second = GEN_INT (high);
3549	    }
3550	}
3551      else
3552	{
3553	  /* The rule for using CONST_INT for a wider mode
3554	     is that we regard the value as signed.
3555	     So sign-extend it.  */
3556	  rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
3557	  if (WORDS_BIG_ENDIAN)
3558	    {
3559	      *first = high;
3560	      *second = value;
3561	    }
3562	  else
3563	    {
3564	      *first = value;
3565	      *second = high;
3566	    }
3567	}
3568    }
3569  else if (GET_CODE (value) != CONST_DOUBLE)
3570    {
3571      if (WORDS_BIG_ENDIAN)
3572	{
3573	  *first = const0_rtx;
3574	  *second = value;
3575	}
3576      else
3577	{
3578	  *first = value;
3579	  *second = const0_rtx;
3580	}
3581    }
3582  else if (GET_MODE (value) == VOIDmode
3583	   /* This is the old way we did CONST_DOUBLE integers.  */
3584	   || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
3585    {
3586      /* In an integer, the words are defined as most and least significant.
3587	 So order them by the target's convention.  */
3588      if (WORDS_BIG_ENDIAN)
3589	{
3590	  *first = GEN_INT (CONST_DOUBLE_HIGH (value));
3591	  *second = GEN_INT (CONST_DOUBLE_LOW (value));
3592	}
3593      else
3594	{
3595	  *first = GEN_INT (CONST_DOUBLE_LOW (value));
3596	  *second = GEN_INT (CONST_DOUBLE_HIGH (value));
3597	}
3598    }
3599  else
3600    {
3601      REAL_VALUE_TYPE r;
3602      long l[2];
3603      REAL_VALUE_FROM_CONST_DOUBLE (r, value);
3604
3605      /* Note, this converts the REAL_VALUE_TYPE to the target's
3606	 format, splits up the floating point double and outputs
3607	 exactly 32 bits of it into each of l[0] and l[1] --
3608	 not necessarily BITS_PER_WORD bits.  */
3609      REAL_VALUE_TO_TARGET_DOUBLE (r, l);
3610
3611      /* If 32 bits is an entire word for the target, but not for the host,
3612	 then sign-extend on the host so that the number will look the same
3613	 way on the host that it would on the target.  See for instance
3614	 simplify_unary_operation.  The #if is needed to avoid compiler
3615	 warnings.  */
3616
3617#if HOST_BITS_PER_LONG > 32
3618      if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
3619	{
3620	  if (l[0] & ((long) 1 << 31))
3621	    l[0] |= ((long) (-1) << 32);
3622	  if (l[1] & ((long) 1 << 31))
3623	    l[1] |= ((long) (-1) << 32);
3624	}
3625#endif
3626
3627      *first = GEN_INT (l[0]);
3628      *second = GEN_INT (l[1]);
3629    }
3630}
3631
3632/* Return nonzero if this function has no function calls.  */
3633
3634int
3635leaf_function_p (void)
3636{
3637  rtx insn;
3638  rtx link;
3639
3640  if (current_function_profile || profile_arc_flag)
3641    return 0;
3642
3643  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3644    {
3645      if (CALL_P (insn)
3646	  && ! SIBLING_CALL_P (insn))
3647	return 0;
3648      if (NONJUMP_INSN_P (insn)
3649	  && GET_CODE (PATTERN (insn)) == SEQUENCE
3650	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3651	  && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3652	return 0;
3653    }
3654  for (link = current_function_epilogue_delay_list;
3655       link;
3656       link = XEXP (link, 1))
3657    {
3658      insn = XEXP (link, 0);
3659
3660      if (CALL_P (insn)
3661	  && ! SIBLING_CALL_P (insn))
3662	return 0;
3663      if (NONJUMP_INSN_P (insn)
3664	  && GET_CODE (PATTERN (insn)) == SEQUENCE
3665	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3666	  && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3667	return 0;
3668    }
3669
3670  return 1;
3671}
3672
3673/* Return 1 if branch is a forward branch.
3674   Uses insn_shuid array, so it works only in the final pass.  May be used by
3675   output templates to customary add branch prediction hints.
3676 */
3677int
3678final_forward_branch_p (rtx insn)
3679{
3680  int insn_id, label_id;
3681
3682  gcc_assert (uid_shuid);
3683  insn_id = INSN_SHUID (insn);
3684  label_id = INSN_SHUID (JUMP_LABEL (insn));
3685  /* We've hit some insns that does not have id information available.  */
3686  gcc_assert (insn_id && label_id);
3687  return insn_id < label_id;
3688}
3689
3690/* On some machines, a function with no call insns
3691   can run faster if it doesn't create its own register window.
3692   When output, the leaf function should use only the "output"
3693   registers.  Ordinarily, the function would be compiled to use
3694   the "input" registers to find its arguments; it is a candidate
3695   for leaf treatment if it uses only the "input" registers.
3696   Leaf function treatment means renumbering so the function
3697   uses the "output" registers instead.  */
3698
3699#ifdef LEAF_REGISTERS
3700
3701/* Return 1 if this function uses only the registers that can be
3702   safely renumbered.  */
3703
3704int
3705only_leaf_regs_used (void)
3706{
3707  int i;
3708  const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
3709
3710  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3711    if ((regs_ever_live[i] || global_regs[i])
3712	&& ! permitted_reg_in_leaf_functions[i])
3713      return 0;
3714
3715  if (current_function_uses_pic_offset_table
3716      && pic_offset_table_rtx != 0
3717      && REG_P (pic_offset_table_rtx)
3718      && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
3719    return 0;
3720
3721  return 1;
3722}
3723
3724/* Scan all instructions and renumber all registers into those
3725   available in leaf functions.  */
3726
3727static void
3728leaf_renumber_regs (rtx first)
3729{
3730  rtx insn;
3731
3732  /* Renumber only the actual patterns.
3733     The reg-notes can contain frame pointer refs,
3734     and renumbering them could crash, and should not be needed.  */
3735  for (insn = first; insn; insn = NEXT_INSN (insn))
3736    if (INSN_P (insn))
3737      leaf_renumber_regs_insn (PATTERN (insn));
3738  for (insn = current_function_epilogue_delay_list;
3739       insn;
3740       insn = XEXP (insn, 1))
3741    if (INSN_P (XEXP (insn, 0)))
3742      leaf_renumber_regs_insn (PATTERN (XEXP (insn, 0)));
3743}
3744
3745/* Scan IN_RTX and its subexpressions, and renumber all regs into those
3746   available in leaf functions.  */
3747
3748void
3749leaf_renumber_regs_insn (rtx in_rtx)
3750{
3751  int i, j;
3752  const char *format_ptr;
3753
3754  if (in_rtx == 0)
3755    return;
3756
3757  /* Renumber all input-registers into output-registers.
3758     renumbered_regs would be 1 for an output-register;
3759     they  */
3760
3761  if (REG_P (in_rtx))
3762    {
3763      int newreg;
3764
3765      /* Don't renumber the same reg twice.  */
3766      if (in_rtx->used)
3767	return;
3768
3769      newreg = REGNO (in_rtx);
3770      /* Don't try to renumber pseudo regs.  It is possible for a pseudo reg
3771	 to reach here as part of a REG_NOTE.  */
3772      if (newreg >= FIRST_PSEUDO_REGISTER)
3773	{
3774	  in_rtx->used = 1;
3775	  return;
3776	}
3777      newreg = LEAF_REG_REMAP (newreg);
3778      gcc_assert (newreg >= 0);
3779      regs_ever_live[REGNO (in_rtx)] = 0;
3780      regs_ever_live[newreg] = 1;
3781      REGNO (in_rtx) = newreg;
3782      in_rtx->used = 1;
3783    }
3784
3785  if (INSN_P (in_rtx))
3786    {
3787      /* Inside a SEQUENCE, we find insns.
3788	 Renumber just the patterns of these insns,
3789	 just as we do for the top-level insns.  */
3790      leaf_renumber_regs_insn (PATTERN (in_rtx));
3791      return;
3792    }
3793
3794  format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
3795
3796  for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
3797    switch (*format_ptr++)
3798      {
3799      case 'e':
3800	leaf_renumber_regs_insn (XEXP (in_rtx, i));
3801	break;
3802
3803      case 'E':
3804	if (NULL != XVEC (in_rtx, i))
3805	  {
3806	    for (j = 0; j < XVECLEN (in_rtx, i); j++)
3807	      leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
3808	  }
3809	break;
3810
3811      case 'S':
3812      case 's':
3813      case '0':
3814      case 'i':
3815      case 'w':
3816      case 'n':
3817      case 'u':
3818	break;
3819
3820      default:
3821	gcc_unreachable ();
3822      }
3823}
3824#endif
3825
3826
3827/* When -gused is used, emit debug info for only used symbols. But in
3828   addition to the standard intercepted debug_hooks there are some direct
3829   calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
3830   Those routines may also be called from a higher level intercepted routine. So
3831   to prevent recording data for an inner call to one of these for an intercept,
3832   we maintain an intercept nesting counter (debug_nesting). We only save the
3833   intercepted arguments if the nesting is 1.  */
3834int debug_nesting = 0;
3835
3836static tree *symbol_queue;
3837int symbol_queue_index = 0;
3838static int symbol_queue_size = 0;
3839
3840/* Generate the symbols for any queued up type symbols we encountered
3841   while generating the type info for some originally used symbol.
3842   This might generate additional entries in the queue.  Only when
3843   the nesting depth goes to 0 is this routine called.  */
3844
3845void
3846debug_flush_symbol_queue (void)
3847{
3848  int i;
3849
3850  /* Make sure that additionally queued items are not flushed
3851     prematurely.  */
3852
3853  ++debug_nesting;
3854
3855  for (i = 0; i < symbol_queue_index; ++i)
3856    {
3857      /* If we pushed queued symbols then such symbols must be
3858         output no matter what anyone else says.  Specifically,
3859         we need to make sure dbxout_symbol() thinks the symbol was
3860         used and also we need to override TYPE_DECL_SUPPRESS_DEBUG
3861         which may be set for outside reasons.  */
3862      int saved_tree_used = TREE_USED (symbol_queue[i]);
3863      int saved_suppress_debug = TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]);
3864      TREE_USED (symbol_queue[i]) = 1;
3865      TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = 0;
3866
3867#ifdef DBX_DEBUGGING_INFO
3868      dbxout_symbol (symbol_queue[i], 0);
3869#endif
3870
3871      TREE_USED (symbol_queue[i]) = saved_tree_used;
3872      TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = saved_suppress_debug;
3873    }
3874
3875  symbol_queue_index = 0;
3876  --debug_nesting;
3877}
3878
3879/* Queue a type symbol needed as part of the definition of a decl
3880   symbol.  These symbols are generated when debug_flush_symbol_queue()
3881   is called.  */
3882
3883void
3884debug_queue_symbol (tree decl)
3885{
3886  if (symbol_queue_index >= symbol_queue_size)
3887    {
3888      symbol_queue_size += 10;
3889      symbol_queue = xrealloc (symbol_queue,
3890			       symbol_queue_size * sizeof (tree));
3891    }
3892
3893  symbol_queue[symbol_queue_index++] = decl;
3894}
3895
3896/* Free symbol queue.  */
3897void
3898debug_free_queue (void)
3899{
3900  if (symbol_queue)
3901    {
3902      free (symbol_queue);
3903      symbol_queue = NULL;
3904      symbol_queue_size = 0;
3905    }
3906}
3907
3908/* Turn the RTL into assembly.  */
3909static unsigned int
3910rest_of_handle_final (void)
3911{
3912  rtx x;
3913  const char *fnname;
3914
3915  /* Get the function's name, as described by its RTL.  This may be
3916     different from the DECL_NAME name used in the source file.  */
3917
3918  x = DECL_RTL (current_function_decl);
3919  gcc_assert (MEM_P (x));
3920  x = XEXP (x, 0);
3921  gcc_assert (GET_CODE (x) == SYMBOL_REF);
3922  fnname = XSTR (x, 0);
3923
3924  assemble_start_function (current_function_decl, fnname);
3925  final_start_function (get_insns (), asm_out_file, optimize);
3926  final (get_insns (), asm_out_file, optimize);
3927  final_end_function ();
3928
3929#ifdef TARGET_UNWIND_INFO
3930  /* ??? The IA-64 ".handlerdata" directive must be issued before
3931     the ".endp" directive that closes the procedure descriptor.  */
3932  output_function_exception_table ();
3933#endif
3934
3935  assemble_end_function (current_function_decl, fnname);
3936
3937#ifndef TARGET_UNWIND_INFO
3938  /* Otherwise, it feels unclean to switch sections in the middle.  */
3939  output_function_exception_table ();
3940#endif
3941
3942  user_defined_section_attribute = false;
3943
3944  if (! quiet_flag)
3945    fflush (asm_out_file);
3946
3947  /* Release all memory allocated by flow.  */
3948  free_basic_block_vars ();
3949
3950  /* Write DBX symbols if requested.  */
3951
3952  /* Note that for those inline functions where we don't initially
3953     know for certain that we will be generating an out-of-line copy,
3954     the first invocation of this routine (rest_of_compilation) will
3955     skip over this code by doing a `goto exit_rest_of_compilation;'.
3956     Later on, wrapup_global_declarations will (indirectly) call
3957     rest_of_compilation again for those inline functions that need
3958     to have out-of-line copies generated.  During that call, we
3959     *will* be routed past here.  */
3960
3961  timevar_push (TV_SYMOUT);
3962  (*debug_hooks->function_decl) (current_function_decl);
3963  timevar_pop (TV_SYMOUT);
3964  return 0;
3965}
3966
3967struct tree_opt_pass pass_final =
3968{
3969  NULL,                                 /* name */
3970  NULL,                                 /* gate */
3971  rest_of_handle_final,                 /* execute */
3972  NULL,                                 /* sub */
3973  NULL,                                 /* next */
3974  0,                                    /* static_pass_number */
3975  TV_FINAL,                             /* tv_id */
3976  0,                                    /* properties_required */
3977  0,                                    /* properties_provided */
3978  0,                                    /* properties_destroyed */
3979  0,                                    /* todo_flags_start */
3980  TODO_ggc_collect,                     /* todo_flags_finish */
3981  0                                     /* letter */
3982};
3983
3984
3985static unsigned int
3986rest_of_handle_shorten_branches (void)
3987{
3988  /* Shorten branches.  */
3989  shorten_branches (get_insns ());
3990  return 0;
3991}
3992
3993struct tree_opt_pass pass_shorten_branches =
3994{
3995  "shorten",                            /* name */
3996  NULL,                                 /* gate */
3997  rest_of_handle_shorten_branches,      /* execute */
3998  NULL,                                 /* sub */
3999  NULL,                                 /* next */
4000  0,                                    /* static_pass_number */
4001  TV_FINAL,                             /* tv_id */
4002  0,                                    /* properties_required */
4003  0,                                    /* properties_provided */
4004  0,                                    /* properties_destroyed */
4005  0,                                    /* todo_flags_start */
4006  TODO_dump_func,                       /* todo_flags_finish */
4007  0                                     /* letter */
4008};
4009
4010
4011static unsigned int
4012rest_of_clean_state (void)
4013{
4014  rtx insn, next;
4015
4016  /* It is very important to decompose the RTL instruction chain here:
4017     debug information keeps pointing into CODE_LABEL insns inside the function
4018     body.  If these remain pointing to the other insns, we end up preserving
4019     whole RTL chain and attached detailed debug info in memory.  */
4020  for (insn = get_insns (); insn; insn = next)
4021    {
4022      next = NEXT_INSN (insn);
4023      NEXT_INSN (insn) = NULL;
4024      PREV_INSN (insn) = NULL;
4025    }
4026
4027  /* In case the function was not output,
4028     don't leave any temporary anonymous types
4029     queued up for sdb output.  */
4030#ifdef SDB_DEBUGGING_INFO
4031  if (write_symbols == SDB_DEBUG)
4032    sdbout_types (NULL_TREE);
4033#endif
4034
4035  reload_completed = 0;
4036  epilogue_completed = 0;
4037  flow2_completed = 0;
4038  no_new_pseudos = 0;
4039#ifdef STACK_REGS
4040  regstack_completed = 0;
4041#endif
4042
4043  /* Clear out the insn_length contents now that they are no
4044     longer valid.  */
4045  init_insn_lengths ();
4046
4047  /* Show no temporary slots allocated.  */
4048  init_temp_slots ();
4049
4050  free_basic_block_vars ();
4051  free_bb_for_insn ();
4052
4053
4054  if (targetm.binds_local_p (current_function_decl))
4055    {
4056      int pref = cfun->preferred_stack_boundary;
4057      if (cfun->stack_alignment_needed > cfun->preferred_stack_boundary)
4058        pref = cfun->stack_alignment_needed;
4059      cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
4060        = pref;
4061    }
4062
4063  /* Make sure volatile mem refs aren't considered valid operands for
4064     arithmetic insns.  We must call this here if this is a nested inline
4065     function, since the above code leaves us in the init_recog state,
4066     and the function context push/pop code does not save/restore volatile_ok.
4067
4068     ??? Maybe it isn't necessary for expand_start_function to call this
4069     anymore if we do it here?  */
4070
4071  init_recog_no_volatile ();
4072
4073  /* We're done with this function.  Free up memory if we can.  */
4074  free_after_parsing (cfun);
4075  free_after_compilation (cfun);
4076  return 0;
4077}
4078
4079struct tree_opt_pass pass_clean_state =
4080{
4081  NULL,                                 /* name */
4082  NULL,                                 /* gate */
4083  rest_of_clean_state,                  /* execute */
4084  NULL,                                 /* sub */
4085  NULL,                                 /* next */
4086  0,                                    /* static_pass_number */
4087  TV_FINAL,                             /* tv_id */
4088  0,                                    /* properties_required */
4089  0,                                    /* properties_provided */
4090  PROP_rtl,                             /* properties_destroyed */
4091  0,                                    /* todo_flags_start */
4092  0,                                    /* todo_flags_finish */
4093  0                                     /* letter */
4094};
4095