reorg.c revision 50397
118334Speter/* Perform instruction reorganizations for delay slot filling.
250397Sobrien   Copyright (C) 1992, 93, 94, 95, 96, 97, 1998 Free Software Foundation, Inc.
318334Speter   Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
418334Speter   Hacked by Michael Tiemann (tiemann@cygnus.com).
518334Speter
618334SpeterThis file is part of GNU CC.
718334Speter
818334SpeterGNU CC is free software; you can redistribute it and/or modify
918334Speterit under the terms of the GNU General Public License as published by
1018334Speterthe Free Software Foundation; either version 2, or (at your option)
1118334Speterany later version.
1218334Speter
1318334SpeterGNU CC is distributed in the hope that it will be useful,
1418334Speterbut WITHOUT ANY WARRANTY; without even the implied warranty of
1518334SpeterMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
1618334SpeterGNU General Public License for more details.
1718334Speter
1818334SpeterYou should have received a copy of the GNU General Public License
1918334Speteralong with GNU CC; see the file COPYING.  If not, write to
2018334Speterthe Free Software Foundation, 59 Temple Place - Suite 330,
2118334SpeterBoston, MA 02111-1307, USA.  */
2218334Speter
2318334Speter/* Instruction reorganization pass.
2418334Speter
2518334Speter   This pass runs after register allocation and final jump
2618334Speter   optimization.  It should be the last pass to run before peephole.
2718334Speter   It serves primarily to fill delay slots of insns, typically branch
2818334Speter   and call insns.  Other insns typically involve more complicated
2918334Speter   interactions of data dependencies and resource constraints, and
3018334Speter   are better handled by scheduling before register allocation (by the
3118334Speter   function `schedule_insns').
3218334Speter
3318334Speter   The Branch Penalty is the number of extra cycles that are needed to
3418334Speter   execute a branch insn.  On an ideal machine, branches take a single
3518334Speter   cycle, and the Branch Penalty is 0.  Several RISC machines approach
3618334Speter   branch delays differently:
3718334Speter
3818334Speter   The MIPS and AMD 29000 have a single branch delay slot.  Most insns
3918334Speter   (except other branches) can be used to fill this slot.  When the
4018334Speter   slot is filled, two insns execute in two cycles, reducing the
4118334Speter   branch penalty to zero.
4218334Speter
4318334Speter   The Motorola 88000 conditionally exposes its branch delay slot,
4418334Speter   so code is shorter when it is turned off, but will run faster
4518334Speter   when useful insns are scheduled there.
4618334Speter
4718334Speter   The IBM ROMP has two forms of branch and call insns, both with and
4818334Speter   without a delay slot.  Much like the 88k, insns not using the delay
4918334Speter   slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
5018334Speter
5118334Speter   The SPARC always has a branch delay slot, but its effects can be
5218334Speter   annulled when the branch is not taken.  This means that failing to
5318334Speter   find other sources of insns, we can hoist an insn from the branch
5418334Speter   target that would only be safe to execute knowing that the branch
5518334Speter   is taken.
5618334Speter
5718334Speter   The HP-PA always has a branch delay slot.  For unconditional branches
5818334Speter   its effects can be annulled when the branch is taken.  The effects
5918334Speter   of the delay slot in a conditional branch can be nullified for forward
6018334Speter   taken branches, or for untaken backward branches.  This means
6118334Speter   we can hoist insns from the fall-through path for forward branches or
6218334Speter   steal insns from the target of backward branches.
6318334Speter
6418334Speter   Three techniques for filling delay slots have been implemented so far:
6518334Speter
6618334Speter   (1) `fill_simple_delay_slots' is the simplest, most efficient way
6718334Speter   to fill delay slots.  This pass first looks for insns which come
6818334Speter   from before the branch and which are safe to execute after the
6918334Speter   branch.  Then it searches after the insn requiring delay slots or,
7018334Speter   in the case of a branch, for insns that are after the point at
7118334Speter   which the branch merges into the fallthrough code, if such a point
7218334Speter   exists.  When such insns are found, the branch penalty decreases
7318334Speter   and no code expansion takes place.
7418334Speter
7518334Speter   (2) `fill_eager_delay_slots' is more complicated: it is used for
7618334Speter   scheduling conditional jumps, or for scheduling jumps which cannot
7718334Speter   be filled using (1).  A machine need not have annulled jumps to use
7818334Speter   this strategy, but it helps (by keeping more options open).
7918334Speter   `fill_eager_delay_slots' tries to guess the direction the branch
8018334Speter   will go; if it guesses right 100% of the time, it can reduce the
8118334Speter   branch penalty as much as `fill_simple_delay_slots' does.  If it
8218334Speter   guesses wrong 100% of the time, it might as well schedule nops (or
8318334Speter   on the m88k, unexpose the branch slot).  When
8418334Speter   `fill_eager_delay_slots' takes insns from the fall-through path of
8518334Speter   the jump, usually there is no code expansion; when it takes insns
8618334Speter   from the branch target, there is code expansion if it is not the
8718334Speter   only way to reach that target.
8818334Speter
8918334Speter   (3) `relax_delay_slots' uses a set of rules to simplify code that
9018334Speter   has been reorganized by (1) and (2).  It finds cases where
9118334Speter   conditional test can be eliminated, jumps can be threaded, extra
9218334Speter   insns can be eliminated, etc.  It is the job of (1) and (2) to do a
9318334Speter   good job of scheduling locally; `relax_delay_slots' takes care of
9418334Speter   making the various individual schedules work well together.  It is
9518334Speter   especially tuned to handle the control flow interactions of branch
9618334Speter   insns.  It does nothing for insns with delay slots that do not
9718334Speter   branch.
9818334Speter
9918334Speter   On machines that use CC0, we are very conservative.  We will not make
10018334Speter   a copy of an insn involving CC0 since we want to maintain a 1-1
10118334Speter   correspondence between the insn that sets and uses CC0.  The insns are
10218334Speter   allowed to be separated by placing an insn that sets CC0 (but not an insn
10318334Speter   that uses CC0; we could do this, but it doesn't seem worthwhile) in a
10418334Speter   delay slot.  In that case, we point each insn at the other with REG_CC_USER
10518334Speter   and REG_CC_SETTER notes.  Note that these restrictions affect very few
10618334Speter   machines because most RISC machines with delay slots will not use CC0
10718334Speter   (the RT is the only known exception at this point).
10818334Speter
10918334Speter   Not yet implemented:
11018334Speter
11118334Speter   The Acorn Risc Machine can conditionally execute most insns, so
11218334Speter   it is profitable to move single insns into a position to execute
11318334Speter   based on the condition code of the previous insn.
11418334Speter
11518334Speter   The HP-PA can conditionally nullify insns, providing a similar
11618334Speter   effect to the ARM, differing mostly in which insn is "in charge".   */
11718334Speter
11818334Speter#include "config.h"
11950397Sobrien#include "system.h"
12018334Speter#include "rtl.h"
12150397Sobrien#include "expr.h"
12218334Speter#include "insn-config.h"
12318334Speter#include "conditions.h"
12418334Speter#include "hard-reg-set.h"
12518334Speter#include "basic-block.h"
12618334Speter#include "regs.h"
12718334Speter#include "insn-flags.h"
12818334Speter#include "recog.h"
12918334Speter#include "flags.h"
13018334Speter#include "output.h"
13118334Speter#include "obstack.h"
13218334Speter#include "insn-attr.h"
13318334Speter
13450397Sobrien/* Import list of registers used as spill regs from reload.  */
13550397Sobrienextern HARD_REG_SET used_spill_regs;
13650397Sobrien
13750397Sobrien/* Import highest label used in function at end of reload.  */
13850397Sobrienextern int max_label_num_after_reload;
13950397Sobrien
14050397Sobrien
14118334Speter#ifdef DELAY_SLOTS
14218334Speter
14318334Speter#define obstack_chunk_alloc xmalloc
14418334Speter#define obstack_chunk_free free
14518334Speter
14618334Speter#ifndef ANNUL_IFTRUE_SLOTS
14718334Speter#define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
14818334Speter#endif
14918334Speter#ifndef ANNUL_IFFALSE_SLOTS
15018334Speter#define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
15118334Speter#endif
15218334Speter
15318334Speter/* Insns which have delay slots that have not yet been filled.  */
15418334Speter
15518334Speterstatic struct obstack unfilled_slots_obstack;
15618334Speterstatic rtx *unfilled_firstobj;
15718334Speter
15818334Speter/* Define macros to refer to the first and last slot containing unfilled
15918334Speter   insns.  These are used because the list may move and its address
16018334Speter   should be recomputed at each use.  */
16118334Speter
16218334Speter#define unfilled_slots_base	\
16318334Speter  ((rtx *) obstack_base (&unfilled_slots_obstack))
16418334Speter
16518334Speter#define unfilled_slots_next	\
16618334Speter  ((rtx *) obstack_next_free (&unfilled_slots_obstack))
16718334Speter
16818334Speter/* This structure is used to indicate which hardware resources are set or
16918334Speter   needed by insns so far.  */
17018334Speter
17118334Speterstruct resources
17218334Speter{
17318334Speter  char memory;			/* Insn sets or needs a memory location.  */
17450397Sobrien  char unch_memory;		/* Insn sets of needs a "unchanging" MEM.  */
17550397Sobrien  char volatil;			/* Insn sets or needs a volatile memory loc.  */
17618334Speter  char cc;			/* Insn sets or needs the condition codes.  */
17718334Speter  HARD_REG_SET regs;		/* Which registers are set or needed.  */
17818334Speter};
17918334Speter
18018334Speter/* Macro to clear all resources.  */
18118334Speter#define CLEAR_RESOURCE(RES)	\
18218334Speter do { (RES)->memory = (RES)->unch_memory = (RES)->volatil = (RES)->cc = 0; \
18318334Speter      CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
18418334Speter
18518334Speter/* Indicates what resources are required at the beginning of the epilogue.  */
18618334Speterstatic struct resources start_of_epilogue_needs;
18718334Speter
18818334Speter/* Indicates what resources are required at function end.  */
18918334Speterstatic struct resources end_of_function_needs;
19018334Speter
19118334Speter/* Points to the label before the end of the function.  */
19218334Speterstatic rtx end_of_function_label;
19318334Speter
19418334Speter/* This structure is used to record liveness information at the targets or
19518334Speter   fallthrough insns of branches.  We will most likely need the information
19618334Speter   at targets again, so save them in a hash table rather than recomputing them
19718334Speter   each time.  */
19818334Speter
19918334Speterstruct target_info
20018334Speter{
20118334Speter  int uid;			/* INSN_UID of target.  */
20218334Speter  struct target_info *next;	/* Next info for same hash bucket.  */
20318334Speter  HARD_REG_SET live_regs;	/* Registers live at target.  */
20418334Speter  int block;			/* Basic block number containing target.  */
20518334Speter  int bb_tick;			/* Generation count of basic block info.  */
20618334Speter};
20718334Speter
20818334Speter#define TARGET_HASH_PRIME 257
20918334Speter
21018334Speter/* Define the hash table itself.  */
21118334Speterstatic struct target_info **target_hash_table;
21218334Speter
21318334Speter/* For each basic block, we maintain a generation number of its basic
21418334Speter   block info, which is updated each time we move an insn from the
21518334Speter   target of a jump.  This is the generation number indexed by block
21618334Speter   number.  */
21718334Speter
21818334Speterstatic int *bb_ticks;
21918334Speter
22018334Speter/* Mapping between INSN_UID's and position in the code since INSN_UID's do
22118334Speter   not always monotonically increase.  */
22218334Speterstatic int *uid_to_ruid;
22318334Speter
22418334Speter/* Highest valid index in `uid_to_ruid'.  */
22518334Speterstatic int max_uid;
22618334Speter
22718334Speterstatic void mark_referenced_resources PROTO((rtx, struct resources *, int));
22818334Speterstatic void mark_set_resources	PROTO((rtx, struct resources *, int, int));
22918334Speterstatic int stop_search_p	PROTO((rtx, int));
23018334Speterstatic int resource_conflicts_p	PROTO((struct resources *,
23118334Speter				       struct resources *));
23218334Speterstatic int insn_references_resource_p PROTO((rtx, struct resources *, int));
23350397Sobrienstatic int insn_sets_resource_p PROTO((rtx, struct resources *, int));
23418334Speterstatic rtx find_end_label	PROTO((void));
23550397Sobrienstatic rtx emit_delay_sequence	PROTO((rtx, rtx, int));
23618334Speterstatic rtx add_to_delay_list	PROTO((rtx, rtx));
23718334Speterstatic void delete_from_delay_slot PROTO((rtx));
23818334Speterstatic void delete_scheduled_jump PROTO((rtx));
23918334Speterstatic void note_delay_statistics PROTO((int, int));
24018334Speterstatic rtx optimize_skip	PROTO((rtx));
24118334Speterstatic int get_jump_flags PROTO((rtx, rtx));
24218334Speterstatic int rare_destination PROTO((rtx));
24318334Speterstatic int mostly_true_jump	PROTO((rtx, rtx));
24418334Speterstatic rtx get_branch_condition	PROTO((rtx, rtx));
24518334Speterstatic int condition_dominates_p PROTO((rtx, rtx));
24618334Speterstatic rtx steal_delay_list_from_target PROTO((rtx, rtx, rtx, rtx,
24718334Speter					       struct resources *,
24818334Speter					       struct resources *,
24918334Speter					       struct resources *,
25018334Speter					       int, int *, int *, rtx *));
25118334Speterstatic rtx steal_delay_list_from_fallthrough PROTO((rtx, rtx, rtx, rtx,
25218334Speter						    struct resources *,
25318334Speter						    struct resources *,
25418334Speter						    struct resources *,
25518334Speter						    int, int *, int *));
25618334Speterstatic void try_merge_delay_insns PROTO((rtx, rtx));
25718334Speterstatic rtx redundant_insn	PROTO((rtx, rtx, rtx));
25818334Speterstatic int own_thread_p		PROTO((rtx, rtx, int));
25918334Speterstatic int find_basic_block	PROTO((rtx));
26018334Speterstatic void update_block	PROTO((rtx, rtx));
26118334Speterstatic int reorg_redirect_jump PROTO((rtx, rtx));
26218334Speterstatic void update_reg_dead_notes PROTO((rtx, rtx));
26350397Sobrienstatic void fix_reg_dead_note PROTO((rtx, rtx));
26418334Speterstatic void update_reg_unused_notes PROTO((rtx, rtx));
26518334Speterstatic void update_live_status	PROTO((rtx, rtx));
26618334Speterstatic rtx next_insn_no_annul	PROTO((rtx));
26750397Sobrienstatic rtx find_dead_or_set_registers PROTO ((rtx, struct resources *, rtx *,
26850397Sobrien					      int, struct resources,
26950397Sobrien					      struct resources));
27018334Speterstatic void mark_target_live_regs PROTO((rtx, struct resources *));
27150397Sobrienstatic void fill_simple_delay_slots PROTO((int));
27218334Speterstatic rtx fill_slots_from_thread PROTO((rtx, rtx, rtx, rtx, int, int,
27350397Sobrien					 int, int, int *, rtx));
27450397Sobrienstatic void fill_eager_delay_slots PROTO((void));
27518334Speterstatic void relax_delay_slots	PROTO((rtx));
27618334Speterstatic void make_return_insns	PROTO((rtx));
27718334Speterstatic int redirect_with_delay_slots_safe_p PROTO ((rtx, rtx, rtx));
27818334Speterstatic int redirect_with_delay_list_safe_p PROTO ((rtx, rtx, rtx));
27918334Speter
28018334Speter/* Given X, some rtl, and RES, a pointer to a `struct resource', mark
28150397Sobrien   which resources are references by the insn.  If INCLUDE_DELAYED_EFFECTS
28218334Speter   is TRUE, resources used by the called routine will be included for
28318334Speter   CALL_INSNs.  */
28418334Speter
28518334Speterstatic void
28618334Spetermark_referenced_resources (x, res, include_delayed_effects)
28718334Speter     register rtx x;
28818334Speter     register struct resources *res;
28918334Speter     register int include_delayed_effects;
29018334Speter{
29118334Speter  register enum rtx_code code = GET_CODE (x);
29218334Speter  register int i, j;
29318334Speter  register char *format_ptr;
29418334Speter
29518334Speter  /* Handle leaf items for which we set resource flags.  Also, special-case
29618334Speter     CALL, SET and CLOBBER operators.  */
29718334Speter  switch (code)
29818334Speter    {
29918334Speter    case CONST:
30018334Speter    case CONST_INT:
30118334Speter    case CONST_DOUBLE:
30218334Speter    case PC:
30318334Speter    case SYMBOL_REF:
30418334Speter    case LABEL_REF:
30518334Speter      return;
30618334Speter
30718334Speter    case SUBREG:
30818334Speter      if (GET_CODE (SUBREG_REG (x)) != REG)
30918334Speter	mark_referenced_resources (SUBREG_REG (x), res, 0);
31018334Speter      else
31118334Speter	{
31218334Speter	  int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
31318334Speter	  int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
31418334Speter	  for (i = regno; i < last_regno; i++)
31518334Speter	    SET_HARD_REG_BIT (res->regs, i);
31618334Speter	}
31718334Speter      return;
31818334Speter
31918334Speter    case REG:
32018334Speter      for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
32118334Speter	SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
32218334Speter      return;
32318334Speter
32418334Speter    case MEM:
32518334Speter      /* If this memory shouldn't change, it really isn't referencing
32618334Speter	 memory.  */
32718334Speter      if (RTX_UNCHANGING_P (x))
32818334Speter	res->unch_memory = 1;
32918334Speter      else
33018334Speter	res->memory = 1;
33118334Speter      res->volatil = MEM_VOLATILE_P (x);
33218334Speter
33318334Speter      /* Mark registers used to access memory.  */
33418334Speter      mark_referenced_resources (XEXP (x, 0), res, 0);
33518334Speter      return;
33618334Speter
33718334Speter    case CC0:
33818334Speter      res->cc = 1;
33918334Speter      return;
34018334Speter
34118334Speter    case UNSPEC_VOLATILE:
34218334Speter    case ASM_INPUT:
34318334Speter      /* Traditional asm's are always volatile.  */
34418334Speter      res->volatil = 1;
34518334Speter      return;
34618334Speter
34750397Sobrien    case TRAP_IF:
34850397Sobrien      res->volatil = 1;
34950397Sobrien      break;
35050397Sobrien
35118334Speter    case ASM_OPERANDS:
35218334Speter      res->volatil = MEM_VOLATILE_P (x);
35318334Speter
35418334Speter      /* For all ASM_OPERANDS, we must traverse the vector of input operands.
35518334Speter	 We can not just fall through here since then we would be confused
35618334Speter	 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
35718334Speter	 traditional asms unlike their normal usage.  */
35818334Speter
35918334Speter      for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
36018334Speter	mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
36118334Speter      return;
36218334Speter
36318334Speter    case CALL:
36418334Speter      /* The first operand will be a (MEM (xxx)) but doesn't really reference
36518334Speter	 memory.  The second operand may be referenced, though.  */
36618334Speter      mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
36718334Speter      mark_referenced_resources (XEXP (x, 1), res, 0);
36818334Speter      return;
36918334Speter
37018334Speter    case SET:
37118334Speter      /* Usually, the first operand of SET is set, not referenced.  But
37218334Speter	 registers used to access memory are referenced.  SET_DEST is
37318334Speter	 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT.  */
37418334Speter
37518334Speter      mark_referenced_resources (SET_SRC (x), res, 0);
37618334Speter
37718334Speter      x = SET_DEST (x);
37818334Speter      if (GET_CODE (x) == SIGN_EXTRACT || GET_CODE (x) == ZERO_EXTRACT)
37918334Speter	mark_referenced_resources (x, res, 0);
38018334Speter      else if (GET_CODE (x) == SUBREG)
38118334Speter	x = SUBREG_REG (x);
38218334Speter      if (GET_CODE (x) == MEM)
38318334Speter	mark_referenced_resources (XEXP (x, 0), res, 0);
38418334Speter      return;
38518334Speter
38618334Speter    case CLOBBER:
38718334Speter      return;
38818334Speter
38918334Speter    case CALL_INSN:
39018334Speter      if (include_delayed_effects)
39118334Speter	{
39218334Speter	  /* A CALL references memory, the frame pointer if it exists, the
39318334Speter	     stack pointer, any global registers and any registers given in
39418334Speter	     USE insns immediately in front of the CALL.
39518334Speter
39618334Speter	     However, we may have moved some of the parameter loading insns
39718334Speter	     into the delay slot of this CALL.  If so, the USE's for them
39818334Speter	     don't count and should be skipped.  */
39918334Speter	  rtx insn = PREV_INSN (x);
40018334Speter	  rtx sequence = 0;
40118334Speter	  int seq_size = 0;
40218334Speter	  rtx next = NEXT_INSN (x);
40318334Speter	  int i;
40418334Speter
40550397Sobrien	  /* If we are part of a delay slot sequence, point at the SEQUENCE.  */
40618334Speter	  if (NEXT_INSN (insn) != x)
40718334Speter	    {
40818334Speter	      next = NEXT_INSN (NEXT_INSN (insn));
40918334Speter	      sequence = PATTERN (NEXT_INSN (insn));
41018334Speter	      seq_size = XVECLEN (sequence, 0);
41118334Speter	      if (GET_CODE (sequence) != SEQUENCE)
41218334Speter		abort ();
41318334Speter	    }
41418334Speter
41518334Speter	  res->memory = 1;
41618334Speter	  SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
41718334Speter	  if (frame_pointer_needed)
41818334Speter	    {
41918334Speter	      SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
42018334Speter#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
42118334Speter	      SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
42218334Speter#endif
42318334Speter	    }
42418334Speter
42518334Speter	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
42618334Speter	    if (global_regs[i])
42718334Speter	      SET_HARD_REG_BIT (res->regs, i);
42818334Speter
42918334Speter	  /* Check for a NOTE_INSN_SETJMP.  If it exists, then we must
43018334Speter	     assume that this call can need any register.
43118334Speter
43218334Speter	     This is done to be more conservative about how we handle setjmp.
43318334Speter	     We assume that they both use and set all registers.  Using all
43418334Speter	     registers ensures that a register will not be considered dead
43518334Speter	     just because it crosses a setjmp call.  A register should be
43618334Speter	     considered dead only if the setjmp call returns non-zero.  */
43718334Speter	  if (next && GET_CODE (next) == NOTE
43818334Speter	      && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
43918334Speter	    SET_HARD_REG_SET (res->regs);
44018334Speter
44118334Speter	  {
44218334Speter	    rtx link;
44318334Speter
44418334Speter	    for (link = CALL_INSN_FUNCTION_USAGE (x);
44518334Speter		 link;
44618334Speter		 link = XEXP (link, 1))
44718334Speter	      if (GET_CODE (XEXP (link, 0)) == USE)
44818334Speter		{
44918334Speter		  for (i = 1; i < seq_size; i++)
45018334Speter		    {
45118334Speter		      rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
45218334Speter		      if (GET_CODE (slot_pat) == SET
45318334Speter			  && rtx_equal_p (SET_DEST (slot_pat),
45418334Speter					  SET_DEST (XEXP (link, 0))))
45518334Speter			break;
45618334Speter		    }
45718334Speter		  if (i >= seq_size)
45818334Speter		    mark_referenced_resources (SET_DEST (XEXP (link, 0)),
45918334Speter					       res, 0);
46018334Speter		}
46118334Speter	  }
46218334Speter	}
46318334Speter
46450397Sobrien      /* ... fall through to other INSN processing ...  */
46518334Speter
46618334Speter    case INSN:
46718334Speter    case JUMP_INSN:
46818334Speter
46918334Speter#ifdef INSN_REFERENCES_ARE_DELAYED
47018334Speter      if (! include_delayed_effects
47118334Speter	  && INSN_REFERENCES_ARE_DELAYED (x))
47218334Speter	return;
47318334Speter#endif
47418334Speter
47518334Speter      /* No special processing, just speed up.  */
47618334Speter      mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
47718334Speter      return;
47850397Sobrien
47950397Sobrien    default:
48050397Sobrien      break;
48118334Speter    }
48218334Speter
48318334Speter  /* Process each sub-expression and flag what it needs.  */
48418334Speter  format_ptr = GET_RTX_FORMAT (code);
48518334Speter  for (i = 0; i < GET_RTX_LENGTH (code); i++)
48618334Speter    switch (*format_ptr++)
48718334Speter      {
48818334Speter      case 'e':
48918334Speter	mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
49018334Speter	break;
49118334Speter
49218334Speter      case 'E':
49318334Speter	for (j = 0; j < XVECLEN (x, i); j++)
49418334Speter	  mark_referenced_resources (XVECEXP (x, i, j), res,
49518334Speter				     include_delayed_effects);
49618334Speter	break;
49718334Speter      }
49818334Speter}
49918334Speter
50050397Sobrien/* Given X, a part of an insn, and a pointer to a `struct resource',
50150397Sobrien   RES, indicate which resources are modified by the insn. If
50250397Sobrien   INCLUDE_DELAYED_EFFECTS is nonzero, also mark resources potentially
50350397Sobrien   set by the called routine.
50418334Speter
50518334Speter   If IN_DEST is nonzero, it means we are inside a SET.  Otherwise,
50618334Speter   objects are being referenced instead of set.
50718334Speter
50818334Speter   We never mark the insn as modifying the condition code unless it explicitly
50918334Speter   SETs CC0 even though this is not totally correct.  The reason for this is
51018334Speter   that we require a SET of CC0 to immediately precede the reference to CC0.
51118334Speter   So if some other insn sets CC0 as a side-effect, we know it cannot affect
51218334Speter   our computation and thus may be placed in a delay slot.   */
51318334Speter
51418334Speterstatic void
51518334Spetermark_set_resources (x, res, in_dest, include_delayed_effects)
51618334Speter     register rtx x;
51718334Speter     register struct resources *res;
51818334Speter     int in_dest;
51918334Speter     int include_delayed_effects;
52018334Speter{
52118334Speter  register enum rtx_code code;
52218334Speter  register int i, j;
52318334Speter  register char *format_ptr;
52418334Speter
52518334Speter restart:
52618334Speter
52718334Speter  code = GET_CODE (x);
52818334Speter
52918334Speter  switch (code)
53018334Speter    {
53118334Speter    case NOTE:
53218334Speter    case BARRIER:
53318334Speter    case CODE_LABEL:
53418334Speter    case USE:
53518334Speter    case CONST_INT:
53618334Speter    case CONST_DOUBLE:
53718334Speter    case LABEL_REF:
53818334Speter    case SYMBOL_REF:
53918334Speter    case CONST:
54018334Speter    case PC:
54118334Speter      /* These don't set any resources.  */
54218334Speter      return;
54318334Speter
54418334Speter    case CC0:
54518334Speter      if (in_dest)
54618334Speter	res->cc = 1;
54718334Speter      return;
54818334Speter
54918334Speter    case CALL_INSN:
55018334Speter      /* Called routine modifies the condition code, memory, any registers
55118334Speter	 that aren't saved across calls, global registers and anything
55218334Speter	 explicitly CLOBBERed immediately after the CALL_INSN.  */
55318334Speter
55418334Speter      if (include_delayed_effects)
55518334Speter	{
55618334Speter	  rtx next = NEXT_INSN (x);
55718334Speter	  rtx prev = PREV_INSN (x);
55818334Speter	  rtx link;
55918334Speter
56018334Speter	  res->cc = res->memory = 1;
56118334Speter	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
56218334Speter	    if (call_used_regs[i] || global_regs[i])
56318334Speter	      SET_HARD_REG_BIT (res->regs, i);
56418334Speter
56518334Speter	  /* If X is part of a delay slot sequence, then NEXT should be
56618334Speter	     the first insn after the sequence.  */
56718334Speter	  if (NEXT_INSN (prev) != x)
56818334Speter	    next = NEXT_INSN (NEXT_INSN (prev));
56918334Speter
57018334Speter	  for (link = CALL_INSN_FUNCTION_USAGE (x);
57118334Speter	       link; link = XEXP (link, 1))
57218334Speter	    if (GET_CODE (XEXP (link, 0)) == CLOBBER)
57318334Speter	      mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, 0);
57418334Speter
57518334Speter	  /* Check for a NOTE_INSN_SETJMP.  If it exists, then we must
57618334Speter	     assume that this call can clobber any register.  */
57718334Speter	  if (next && GET_CODE (next) == NOTE
57818334Speter	      && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
57918334Speter	    SET_HARD_REG_SET (res->regs);
58018334Speter	}
58118334Speter
58250397Sobrien      /* ... and also what its RTL says it modifies, if anything.  */
58318334Speter
58418334Speter    case JUMP_INSN:
58518334Speter    case INSN:
58618334Speter
58718334Speter	/* An insn consisting of just a CLOBBER (or USE) is just for flow
58818334Speter	   and doesn't actually do anything, so we ignore it.  */
58918334Speter
59018334Speter#ifdef INSN_SETS_ARE_DELAYED
59118334Speter      if (! include_delayed_effects
59218334Speter	  && INSN_SETS_ARE_DELAYED (x))
59318334Speter	return;
59418334Speter#endif
59518334Speter
59618334Speter      x = PATTERN (x);
59718334Speter      if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
59818334Speter	goto restart;
59918334Speter      return;
60018334Speter
60118334Speter    case SET:
60218334Speter      /* If the source of a SET is a CALL, this is actually done by
60318334Speter	 the called routine.  So only include it if we are to include the
60418334Speter	 effects of the calling routine.  */
60518334Speter
60618334Speter      mark_set_resources (SET_DEST (x), res,
60718334Speter			  (include_delayed_effects
60818334Speter			   || GET_CODE (SET_SRC (x)) != CALL),
60918334Speter			  0);
61018334Speter
61118334Speter      mark_set_resources (SET_SRC (x), res, 0, 0);
61218334Speter      return;
61318334Speter
61418334Speter    case CLOBBER:
61518334Speter      mark_set_resources (XEXP (x, 0), res, 1, 0);
61618334Speter      return;
61718334Speter
61818334Speter    case SEQUENCE:
61918334Speter      for (i = 0; i < XVECLEN (x, 0); i++)
62018334Speter	if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
62118334Speter	       && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
62218334Speter	  mark_set_resources (XVECEXP (x, 0, i), res, 0,
62318334Speter			      include_delayed_effects);
62418334Speter      return;
62518334Speter
62618334Speter    case POST_INC:
62718334Speter    case PRE_INC:
62818334Speter    case POST_DEC:
62918334Speter    case PRE_DEC:
63018334Speter      mark_set_resources (XEXP (x, 0), res, 1, 0);
63118334Speter      return;
63218334Speter
63318334Speter    case ZERO_EXTRACT:
63418334Speter      mark_set_resources (XEXP (x, 0), res, in_dest, 0);
63518334Speter      mark_set_resources (XEXP (x, 1), res, 0, 0);
63618334Speter      mark_set_resources (XEXP (x, 2), res, 0, 0);
63718334Speter      return;
63818334Speter
63918334Speter    case MEM:
64018334Speter      if (in_dest)
64118334Speter	{
64218334Speter	  res->memory = 1;
64318334Speter	  res->unch_memory = RTX_UNCHANGING_P (x);
64418334Speter	  res->volatil = MEM_VOLATILE_P (x);
64518334Speter	}
64618334Speter
64718334Speter      mark_set_resources (XEXP (x, 0), res, 0, 0);
64818334Speter      return;
64918334Speter
65018334Speter    case SUBREG:
65118334Speter      if (in_dest)
65218334Speter	{
65318334Speter	  if (GET_CODE (SUBREG_REG (x)) != REG)
65418334Speter	    mark_set_resources (SUBREG_REG (x), res,
65518334Speter				in_dest, include_delayed_effects);
65618334Speter	  else
65718334Speter	    {
65818334Speter	      int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
65918334Speter	      int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
66018334Speter	      for (i = regno; i < last_regno; i++)
66118334Speter		SET_HARD_REG_BIT (res->regs, i);
66218334Speter	    }
66318334Speter	}
66418334Speter      return;
66518334Speter
66618334Speter    case REG:
66718334Speter      if (in_dest)
66818334Speter        for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
66918334Speter	  SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
67018334Speter      return;
67150397Sobrien
67250397Sobrien    default:
67350397Sobrien      break;
67418334Speter    }
67518334Speter
67618334Speter  /* Process each sub-expression and flag what it needs.  */
67718334Speter  format_ptr = GET_RTX_FORMAT (code);
67818334Speter  for (i = 0; i < GET_RTX_LENGTH (code); i++)
67918334Speter    switch (*format_ptr++)
68018334Speter      {
68118334Speter      case 'e':
68218334Speter	mark_set_resources (XEXP (x, i), res, in_dest, include_delayed_effects);
68318334Speter	break;
68418334Speter
68518334Speter      case 'E':
68618334Speter	for (j = 0; j < XVECLEN (x, i); j++)
68718334Speter	  mark_set_resources (XVECEXP (x, i, j), res, in_dest,
68818334Speter			      include_delayed_effects);
68918334Speter	break;
69018334Speter      }
69118334Speter}
69218334Speter
69318334Speter/* Return TRUE if this insn should stop the search for insn to fill delay
69418334Speter   slots.  LABELS_P indicates that labels should terminate the search.
69518334Speter   In all cases, jumps terminate the search.  */
69618334Speter
69718334Speterstatic int
69818334Speterstop_search_p (insn, labels_p)
69918334Speter     rtx insn;
70018334Speter     int labels_p;
70118334Speter{
70218334Speter  if (insn == 0)
70318334Speter    return 1;
70418334Speter
70518334Speter  switch (GET_CODE (insn))
70618334Speter    {
70718334Speter    case NOTE:
70818334Speter    case CALL_INSN:
70918334Speter      return 0;
71018334Speter
71118334Speter    case CODE_LABEL:
71218334Speter      return labels_p;
71318334Speter
71418334Speter    case JUMP_INSN:
71518334Speter    case BARRIER:
71618334Speter      return 1;
71718334Speter
71818334Speter    case INSN:
71918334Speter      /* OK unless it contains a delay slot or is an `asm' insn of some type.
72018334Speter	 We don't know anything about these.  */
72118334Speter      return (GET_CODE (PATTERN (insn)) == SEQUENCE
72218334Speter	      || GET_CODE (PATTERN (insn)) == ASM_INPUT
72318334Speter	      || asm_noperands (PATTERN (insn)) >= 0);
72418334Speter
72518334Speter    default:
72618334Speter      abort ();
72718334Speter    }
72818334Speter}
72918334Speter
73018334Speter/* Return TRUE if any resources are marked in both RES1 and RES2 or if either
73118334Speter   resource set contains a volatile memory reference.  Otherwise, return FALSE.  */
73218334Speter
73318334Speterstatic int
73418334Speterresource_conflicts_p (res1, res2)
73518334Speter     struct resources *res1, *res2;
73618334Speter{
73718334Speter  if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
73818334Speter      || (res1->unch_memory && res2->unch_memory)
73918334Speter      || res1->volatil || res2->volatil)
74018334Speter    return 1;
74118334Speter
74218334Speter#ifdef HARD_REG_SET
74318334Speter  return (res1->regs & res2->regs) != HARD_CONST (0);
74418334Speter#else
74518334Speter  {
74618334Speter    int i;
74718334Speter
74818334Speter    for (i = 0; i < HARD_REG_SET_LONGS; i++)
74918334Speter      if ((res1->regs[i] & res2->regs[i]) != 0)
75018334Speter	return 1;
75118334Speter    return 0;
75218334Speter  }
75318334Speter#endif
75418334Speter}
75518334Speter
75618334Speter/* Return TRUE if any resource marked in RES, a `struct resources', is
75750397Sobrien   referenced by INSN.  If INCLUDE_DELAYED_EFFECTS is set, return if the called
75818334Speter   routine is using those resources.
75918334Speter
76018334Speter   We compute this by computing all the resources referenced by INSN and
76118334Speter   seeing if this conflicts with RES.  It might be faster to directly check
76218334Speter   ourselves, and this is the way it used to work, but it means duplicating
76318334Speter   a large block of complex code.  */
76418334Speter
76518334Speterstatic int
76618334Speterinsn_references_resource_p (insn, res, include_delayed_effects)
76718334Speter     register rtx insn;
76818334Speter     register struct resources *res;
76918334Speter     int include_delayed_effects;
77018334Speter{
77118334Speter  struct resources insn_res;
77218334Speter
77318334Speter  CLEAR_RESOURCE (&insn_res);
77418334Speter  mark_referenced_resources (insn, &insn_res, include_delayed_effects);
77518334Speter  return resource_conflicts_p (&insn_res, res);
77618334Speter}
77718334Speter
77818334Speter/* Return TRUE if INSN modifies resources that are marked in RES.
77950397Sobrien   INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
78018334Speter   included.   CC0 is only modified if it is explicitly set; see comments
78118334Speter   in front of mark_set_resources for details.  */
78218334Speter
78318334Speterstatic int
78418334Speterinsn_sets_resource_p (insn, res, include_delayed_effects)
78518334Speter     register rtx insn;
78618334Speter     register struct resources *res;
78718334Speter     int include_delayed_effects;
78818334Speter{
78918334Speter  struct resources insn_sets;
79018334Speter
79118334Speter  CLEAR_RESOURCE (&insn_sets);
79218334Speter  mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
79318334Speter  return resource_conflicts_p (&insn_sets, res);
79418334Speter}
79518334Speter
79618334Speter/* Find a label at the end of the function or before a RETURN.  If there is
79718334Speter   none, make one.  */
79818334Speter
79918334Speterstatic rtx
80018334Speterfind_end_label ()
80118334Speter{
80218334Speter  rtx insn;
80318334Speter
80418334Speter  /* If we found one previously, return it.  */
80518334Speter  if (end_of_function_label)
80618334Speter    return end_of_function_label;
80718334Speter
80818334Speter  /* Otherwise, see if there is a label at the end of the function.  If there
80918334Speter     is, it must be that RETURN insns aren't needed, so that is our return
81018334Speter     label and we don't have to do anything else.  */
81118334Speter
81218334Speter  insn = get_last_insn ();
81318334Speter  while (GET_CODE (insn) == NOTE
81418334Speter	 || (GET_CODE (insn) == INSN
81518334Speter	     && (GET_CODE (PATTERN (insn)) == USE
81618334Speter		 || GET_CODE (PATTERN (insn)) == CLOBBER)))
81718334Speter    insn = PREV_INSN (insn);
81818334Speter
81918334Speter  /* When a target threads its epilogue we might already have a
82018334Speter     suitable return insn.  If so put a label before it for the
82118334Speter     end_of_function_label.  */
82218334Speter  if (GET_CODE (insn) == BARRIER
82318334Speter      && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
82418334Speter      && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
82518334Speter    {
82618334Speter      rtx temp = PREV_INSN (PREV_INSN (insn));
82718334Speter      end_of_function_label = gen_label_rtx ();
82818334Speter      LABEL_NUSES (end_of_function_label) = 0;
82918334Speter
83050397Sobrien      /* Put the label before an USE insns that may proceed the RETURN insn.  */
83118334Speter      while (GET_CODE (temp) == USE)
83218334Speter	temp = PREV_INSN (temp);
83318334Speter
83418334Speter      emit_label_after (end_of_function_label, temp);
83518334Speter    }
83618334Speter
83718334Speter  else if (GET_CODE (insn) == CODE_LABEL)
83818334Speter    end_of_function_label = insn;
83918334Speter  else
84018334Speter    {
84118334Speter      /* Otherwise, make a new label and emit a RETURN and BARRIER,
84218334Speter	 if needed.  */
84318334Speter      end_of_function_label = gen_label_rtx ();
84418334Speter      LABEL_NUSES (end_of_function_label) = 0;
84518334Speter      emit_label (end_of_function_label);
84618334Speter#ifdef HAVE_return
84718334Speter      if (HAVE_return)
84818334Speter	{
84918334Speter	  /* The return we make may have delay slots too.  */
85018334Speter	  rtx insn = gen_return ();
85118334Speter	  insn = emit_jump_insn (insn);
85218334Speter	  emit_barrier ();
85318334Speter          if (num_delay_slots (insn) > 0)
85418334Speter	    obstack_ptr_grow (&unfilled_slots_obstack, insn);
85518334Speter	}
85618334Speter#endif
85718334Speter    }
85818334Speter
85918334Speter  /* Show one additional use for this label so it won't go away until
86018334Speter     we are done.  */
86118334Speter  ++LABEL_NUSES (end_of_function_label);
86218334Speter
86318334Speter  return end_of_function_label;
86418334Speter}
86518334Speter
86618334Speter/* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
86718334Speter   the pattern of INSN with the SEQUENCE.
86818334Speter
86918334Speter   Chain the insns so that NEXT_INSN of each insn in the sequence points to
87018334Speter   the next and NEXT_INSN of the last insn in the sequence points to
87118334Speter   the first insn after the sequence.  Similarly for PREV_INSN.  This makes
87218334Speter   it easier to scan all insns.
87318334Speter
87418334Speter   Returns the SEQUENCE that replaces INSN.  */
87518334Speter
87618334Speterstatic rtx
87750397Sobrienemit_delay_sequence (insn, list, length)
87818334Speter     rtx insn;
87918334Speter     rtx list;
88018334Speter     int length;
88118334Speter{
88218334Speter  register int i = 1;
88318334Speter  register rtx li;
88418334Speter  int had_barrier = 0;
88518334Speter
88650397Sobrien  /* Allocate the rtvec to hold the insns and the SEQUENCE.  */
88718334Speter  rtvec seqv = rtvec_alloc (length + 1);
88850397Sobrien  rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
88918334Speter  rtx seq_insn = make_insn_raw (seq);
89018334Speter  rtx first = get_insns ();
89118334Speter  rtx last = get_last_insn ();
89218334Speter
89350397Sobrien  /* Make a copy of the insn having delay slots.  */
89418334Speter  rtx delay_insn = copy_rtx (insn);
89518334Speter
89618334Speter  /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
89718334Speter     confuse further processing.  Update LAST in case it was the last insn.
89818334Speter     We will put the BARRIER back in later.  */
89918334Speter  if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
90018334Speter    {
90118334Speter      delete_insn (NEXT_INSN (insn));
90218334Speter      last = get_last_insn ();
90318334Speter      had_barrier = 1;
90418334Speter    }
90518334Speter
90618334Speter  /* Splice our SEQUENCE into the insn stream where INSN used to be.  */
90718334Speter  NEXT_INSN (seq_insn) = NEXT_INSN (insn);
90818334Speter  PREV_INSN (seq_insn) = PREV_INSN (insn);
90918334Speter
91050397Sobrien  if (insn != last)
91150397Sobrien    PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
91250397Sobrien
91350397Sobrien  if (insn != first)
91450397Sobrien    NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
91550397Sobrien
91650397Sobrien  /* Note the calls to set_new_first_and_last_insn must occur after
91750397Sobrien     SEQ_INSN has been completely spliced into the insn stream.
91850397Sobrien
91950397Sobrien     Otherwise CUR_INSN_UID will get set to an incorrect value because
92050397Sobrien     set_new_first_and_last_insn will not find SEQ_INSN in the chain.  */
92118334Speter  if (insn == last)
92218334Speter    set_new_first_and_last_insn (first, seq_insn);
92318334Speter
92418334Speter  if (insn == first)
92518334Speter    set_new_first_and_last_insn (seq_insn, last);
92618334Speter
92718334Speter  /* Build our SEQUENCE and rebuild the insn chain.  */
92818334Speter  XVECEXP (seq, 0, 0) = delay_insn;
92918334Speter  INSN_DELETED_P (delay_insn) = 0;
93018334Speter  PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
93118334Speter
93218334Speter  for (li = list; li; li = XEXP (li, 1), i++)
93318334Speter    {
93418334Speter      rtx tem = XEXP (li, 0);
93518334Speter      rtx note;
93618334Speter
93718334Speter      /* Show that this copy of the insn isn't deleted.  */
93818334Speter      INSN_DELETED_P (tem) = 0;
93918334Speter
94018334Speter      XVECEXP (seq, 0, i) = tem;
94118334Speter      PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
94218334Speter      NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
94318334Speter
94418334Speter      /* Remove any REG_DEAD notes because we can't rely on them now
94518334Speter	 that the insn has been moved.  */
94618334Speter      for (note = REG_NOTES (tem); note; note = XEXP (note, 1))
94718334Speter	if (REG_NOTE_KIND (note) == REG_DEAD)
94818334Speter	  XEXP (note, 0) = const0_rtx;
94918334Speter    }
95018334Speter
95118334Speter  NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
95218334Speter
95318334Speter  /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
95418334Speter     last insn in that SEQUENCE to point to us.  Similarly for the first
95518334Speter     insn in the following insn if it is a SEQUENCE.  */
95618334Speter
95718334Speter  if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
95818334Speter      && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
95918334Speter    NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
96018334Speter			XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
96118334Speter      = seq_insn;
96218334Speter
96318334Speter  if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
96418334Speter      && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
96518334Speter    PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
96618334Speter
96718334Speter  /* If there used to be a BARRIER, put it back.  */
96818334Speter  if (had_barrier)
96918334Speter    emit_barrier_after (seq_insn);
97018334Speter
97118334Speter  if (i != length + 1)
97218334Speter    abort ();
97318334Speter
97418334Speter  return seq_insn;
97518334Speter}
97618334Speter
97718334Speter/* Add INSN to DELAY_LIST and return the head of the new list.  The list must
97818334Speter   be in the order in which the insns are to be executed.  */
97918334Speter
98018334Speterstatic rtx
98118334Speteradd_to_delay_list (insn, delay_list)
98218334Speter     rtx insn;
98318334Speter     rtx delay_list;
98418334Speter{
98518334Speter  /* If we have an empty list, just make a new list element.  If
98650397Sobrien     INSN has its block number recorded, clear it since we may
98718334Speter     be moving the insn to a new block.  */
98818334Speter
98918334Speter  if (delay_list == 0)
99018334Speter    {
99118334Speter      struct target_info *tinfo;
99218334Speter
99318334Speter      for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
99418334Speter	   tinfo; tinfo = tinfo->next)
99518334Speter	if (tinfo->uid == INSN_UID (insn))
99618334Speter	  break;
99718334Speter
99818334Speter      if (tinfo)
99918334Speter	tinfo->block = -1;
100018334Speter
100150397Sobrien      return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
100218334Speter    }
100318334Speter
100418334Speter  /* Otherwise this must be an INSN_LIST.  Add INSN to the end of the
100518334Speter     list.  */
100618334Speter  XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
100718334Speter
100818334Speter  return delay_list;
100918334Speter}
101018334Speter
101150397Sobrien/* Delete INSN from the delay slot of the insn that it is in.  This may
101218334Speter   produce an insn without anything in its delay slots.  */
101318334Speter
101418334Speterstatic void
101518334Speterdelete_from_delay_slot (insn)
101618334Speter     rtx insn;
101718334Speter{
101818334Speter  rtx trial, seq_insn, seq, prev;
101918334Speter  rtx delay_list = 0;
102018334Speter  int i;
102118334Speter
102218334Speter  /* We first must find the insn containing the SEQUENCE with INSN in its
102318334Speter     delay slot.  Do this by finding an insn, TRIAL, where
102418334Speter     PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL.  */
102518334Speter
102618334Speter  for (trial = insn;
102718334Speter       PREV_INSN (NEXT_INSN (trial)) == trial;
102818334Speter       trial = NEXT_INSN (trial))
102918334Speter    ;
103018334Speter
103118334Speter  seq_insn = PREV_INSN (NEXT_INSN (trial));
103218334Speter  seq = PATTERN (seq_insn);
103318334Speter
103418334Speter  /* Create a delay list consisting of all the insns other than the one
103518334Speter     we are deleting (unless we were the only one).  */
103618334Speter  if (XVECLEN (seq, 0) > 2)
103718334Speter    for (i = 1; i < XVECLEN (seq, 0); i++)
103818334Speter      if (XVECEXP (seq, 0, i) != insn)
103918334Speter	delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
104018334Speter
104118334Speter  /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
104218334Speter     list, and rebuild the delay list if non-empty.  */
104318334Speter  prev = PREV_INSN (seq_insn);
104418334Speter  trial = XVECEXP (seq, 0, 0);
104518334Speter  delete_insn (seq_insn);
104618334Speter  add_insn_after (trial, prev);
104718334Speter
104818334Speter  if (GET_CODE (trial) == JUMP_INSN
104918334Speter      && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
105018334Speter    emit_barrier_after (trial);
105118334Speter
105218334Speter  /* If there are any delay insns, remit them.  Otherwise clear the
105318334Speter     annul flag.  */
105418334Speter  if (delay_list)
105550397Sobrien    trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
105618334Speter  else
105718334Speter    INSN_ANNULLED_BRANCH_P (trial) = 0;
105818334Speter
105918334Speter  INSN_FROM_TARGET_P (insn) = 0;
106018334Speter
106118334Speter  /* Show we need to fill this insn again.  */
106218334Speter  obstack_ptr_grow (&unfilled_slots_obstack, trial);
106318334Speter}
106418334Speter
106518334Speter/* Delete INSN, a JUMP_INSN.  If it is a conditional jump, we must track down
106618334Speter   the insn that sets CC0 for it and delete it too.  */
106718334Speter
106818334Speterstatic void
106918334Speterdelete_scheduled_jump (insn)
107018334Speter     rtx insn;
107118334Speter{
107218334Speter  /* Delete the insn that sets cc0 for us.  On machines without cc0, we could
107318334Speter     delete the insn that sets the condition code, but it is hard to find it.
107418334Speter     Since this case is rare anyway, don't bother trying; there would likely
107518334Speter     be other insns that became dead anyway, which we wouldn't know to
107618334Speter     delete.  */
107718334Speter
107818334Speter#ifdef HAVE_cc0
107918334Speter  if (reg_mentioned_p (cc0_rtx, insn))
108018334Speter    {
108118334Speter      rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
108218334Speter
108318334Speter      /* If a reg-note was found, it points to an insn to set CC0.  This
108418334Speter	 insn is in the delay list of some other insn.  So delete it from
108518334Speter	 the delay list it was in.  */
108618334Speter      if (note)
108718334Speter	{
108818334Speter	  if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
108918334Speter	      && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
109018334Speter	    delete_from_delay_slot (XEXP (note, 0));
109118334Speter	}
109218334Speter      else
109318334Speter	{
109418334Speter	  /* The insn setting CC0 is our previous insn, but it may be in
109518334Speter	     a delay slot.  It will be the last insn in the delay slot, if
109618334Speter	     it is.  */
109718334Speter	  rtx trial = previous_insn (insn);
109818334Speter	  if (GET_CODE (trial) == NOTE)
109918334Speter	    trial = prev_nonnote_insn (trial);
110018334Speter	  if (sets_cc0_p (PATTERN (trial)) != 1
110118334Speter	      || FIND_REG_INC_NOTE (trial, 0))
110218334Speter	    return;
110318334Speter	  if (PREV_INSN (NEXT_INSN (trial)) == trial)
110418334Speter	    delete_insn (trial);
110518334Speter	  else
110618334Speter	    delete_from_delay_slot (trial);
110718334Speter	}
110818334Speter    }
110918334Speter#endif
111018334Speter
111118334Speter  delete_insn (insn);
111218334Speter}
111318334Speter
111418334Speter/* Counters for delay-slot filling.  */
111518334Speter
111618334Speter#define NUM_REORG_FUNCTIONS 2
111718334Speter#define MAX_DELAY_HISTOGRAM 3
111818334Speter#define MAX_REORG_PASSES 2
111918334Speter
112018334Speterstatic int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
112118334Speter
112218334Speterstatic int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
112318334Speter
112418334Speterstatic int reorg_pass_number;
112518334Speter
112618334Speterstatic void
112718334Speternote_delay_statistics (slots_filled, index)
112818334Speter     int slots_filled, index;
112918334Speter{
113018334Speter  num_insns_needing_delays[index][reorg_pass_number]++;
113118334Speter  if (slots_filled > MAX_DELAY_HISTOGRAM)
113218334Speter    slots_filled = MAX_DELAY_HISTOGRAM;
113318334Speter  num_filled_delays[index][slots_filled][reorg_pass_number]++;
113418334Speter}
113518334Speter
113618334Speter#if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
113718334Speter
113818334Speter/* Optimize the following cases:
113918334Speter
114018334Speter   1.  When a conditional branch skips over only one instruction,
114118334Speter       use an annulling branch and put that insn in the delay slot.
114218334Speter       Use either a branch that annuls when the condition if true or
114318334Speter       invert the test with a branch that annuls when the condition is
114418334Speter       false.  This saves insns, since otherwise we must copy an insn
114518334Speter       from the L1 target.
114618334Speter
114718334Speter        (orig)		 (skip)		(otherwise)
114818334Speter	Bcc.n L1	Bcc',a L1	Bcc,a L1'
114918334Speter	insn		insn		insn2
115018334Speter      L1:	      L1:	      L1:
115118334Speter	insn2		insn2		insn2
115218334Speter	insn3		insn3	      L1':
115318334Speter					insn3
115418334Speter
115518334Speter   2.  When a conditional branch skips over only one instruction,
115618334Speter       and after that, it unconditionally branches somewhere else,
115718334Speter       perform the similar optimization. This saves executing the
115818334Speter       second branch in the case where the inverted condition is true.
115918334Speter
116018334Speter	Bcc.n L1	Bcc',a L2
116118334Speter	insn		insn
116218334Speter      L1:	      L1:
116318334Speter	Bra L2		Bra L2
116418334Speter
116518334Speter   INSN is a JUMP_INSN.
116618334Speter
116718334Speter   This should be expanded to skip over N insns, where N is the number
116818334Speter   of delay slots required.  */
116918334Speter
117018334Speterstatic rtx
117118334Speteroptimize_skip (insn)
117218334Speter     register rtx insn;
117318334Speter{
117418334Speter  register rtx trial = next_nonnote_insn (insn);
117518334Speter  rtx next_trial = next_active_insn (trial);
117618334Speter  rtx delay_list = 0;
117718334Speter  rtx target_label;
117818334Speter  int flags;
117918334Speter
118018334Speter  flags = get_jump_flags (insn, JUMP_LABEL (insn));
118118334Speter
118218334Speter  if (trial == 0
118318334Speter      || GET_CODE (trial) != INSN
118418334Speter      || GET_CODE (PATTERN (trial)) == SEQUENCE
118518334Speter      || recog_memoized (trial) < 0
118618334Speter      || (! eligible_for_annul_false (insn, 0, trial, flags)
118718334Speter	  && ! eligible_for_annul_true (insn, 0, trial, flags)))
118818334Speter    return 0;
118918334Speter
119018334Speter  /* There are two cases where we are just executing one insn (we assume
119118334Speter     here that a branch requires only one insn; this should be generalized
119218334Speter     at some point):  Where the branch goes around a single insn or where
119318334Speter     we have one insn followed by a branch to the same label we branch to.
119418334Speter     In both of these cases, inverting the jump and annulling the delay
119518334Speter     slot give the same effect in fewer insns.  */
119618334Speter  if ((next_trial == next_active_insn (JUMP_LABEL (insn)))
119718334Speter      || (next_trial != 0
119818334Speter	  && GET_CODE (next_trial) == JUMP_INSN
119918334Speter	  && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
120018334Speter	  && (simplejump_p (next_trial)
120118334Speter	      || GET_CODE (PATTERN (next_trial)) == RETURN)))
120218334Speter    {
120318334Speter      if (eligible_for_annul_false (insn, 0, trial, flags))
120418334Speter	{
120518334Speter	  if (invert_jump (insn, JUMP_LABEL (insn)))
120618334Speter	    INSN_FROM_TARGET_P (trial) = 1;
120718334Speter	  else if (! eligible_for_annul_true (insn, 0, trial, flags))
120818334Speter	    return 0;
120918334Speter	}
121018334Speter
121118334Speter      delay_list = add_to_delay_list (trial, NULL_RTX);
121218334Speter      next_trial = next_active_insn (trial);
121318334Speter      update_block (trial, trial);
121418334Speter      delete_insn (trial);
121518334Speter
121618334Speter      /* Also, if we are targeting an unconditional
121718334Speter	 branch, thread our jump to the target of that branch.  Don't
121818334Speter	 change this into a RETURN here, because it may not accept what
121918334Speter	 we have in the delay slot.  We'll fix this up later.  */
122018334Speter      if (next_trial && GET_CODE (next_trial) == JUMP_INSN
122118334Speter	  && (simplejump_p (next_trial)
122218334Speter	      || GET_CODE (PATTERN (next_trial)) == RETURN))
122318334Speter	{
122418334Speter	  target_label = JUMP_LABEL (next_trial);
122518334Speter	  if (target_label == 0)
122618334Speter	    target_label = find_end_label ();
122718334Speter
122818334Speter	  /* Recompute the flags based on TARGET_LABEL since threading
122918334Speter	     the jump to TARGET_LABEL may change the direction of the
123018334Speter	     jump (which may change the circumstances in which the
123118334Speter	     delay slot is nullified).  */
123218334Speter	  flags = get_jump_flags (insn, target_label);
123318334Speter	  if (eligible_for_annul_true (insn, 0, trial, flags))
123418334Speter	    reorg_redirect_jump (insn, target_label);
123518334Speter	}
123618334Speter
123718334Speter      INSN_ANNULLED_BRANCH_P (insn) = 1;
123818334Speter    }
123918334Speter
124018334Speter  return delay_list;
124118334Speter}
124218334Speter#endif
124318334Speter
124418334Speter
124518334Speter/*  Encode and return branch direction and prediction information for
124618334Speter    INSN assuming it will jump to LABEL.
124718334Speter
124818334Speter    Non conditional branches return no direction information and
124918334Speter    are predicted as very likely taken.  */
125050397Sobrien
125118334Speterstatic int
125218334Speterget_jump_flags (insn, label)
125318334Speter     rtx insn, label;
125418334Speter{
125518334Speter  int flags;
125618334Speter
125718334Speter  /* get_jump_flags can be passed any insn with delay slots, these may
125818334Speter     be INSNs, CALL_INSNs, or JUMP_INSNs.  Only JUMP_INSNs have branch
125918334Speter     direction information, and only if they are conditional jumps.
126018334Speter
126118334Speter     If LABEL is zero, then there is no way to determine the branch
126218334Speter     direction.  */
126318334Speter  if (GET_CODE (insn) == JUMP_INSN
126418334Speter      && (condjump_p (insn) || condjump_in_parallel_p (insn))
126518334Speter      && INSN_UID (insn) <= max_uid
126618334Speter      && label != 0
126718334Speter      && INSN_UID (label) <= max_uid)
126818334Speter    flags
126918334Speter      = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
127018334Speter	 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
127118334Speter  /* No valid direction information.  */
127218334Speter  else
127318334Speter    flags = 0;
127418334Speter
127518334Speter  /* If insn is a conditional branch call mostly_true_jump to get
127618334Speter     determine the branch prediction.
127718334Speter
127818334Speter     Non conditional branches are predicted as very likely taken.  */
127918334Speter  if (GET_CODE (insn) == JUMP_INSN
128018334Speter      && (condjump_p (insn) || condjump_in_parallel_p (insn)))
128118334Speter    {
128218334Speter      int prediction;
128318334Speter
128418334Speter      prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
128518334Speter      switch (prediction)
128618334Speter	{
128718334Speter	  case 2:
128818334Speter	    flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
128918334Speter	    break;
129018334Speter	  case 1:
129118334Speter	    flags |= ATTR_FLAG_likely;
129218334Speter	    break;
129318334Speter	  case 0:
129418334Speter	    flags |= ATTR_FLAG_unlikely;
129518334Speter	    break;
129618334Speter	  case -1:
129718334Speter	    flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
129818334Speter	    break;
129918334Speter
130018334Speter	  default:
130118334Speter	    abort();
130218334Speter	}
130318334Speter    }
130418334Speter  else
130518334Speter    flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
130618334Speter
130718334Speter  return flags;
130818334Speter}
130918334Speter
131018334Speter/* Return 1 if INSN is a destination that will be branched to rarely (the
131118334Speter   return point of a function); return 2 if DEST will be branched to very
131218334Speter   rarely (a call to a function that doesn't return).  Otherwise,
131318334Speter   return 0.  */
131418334Speter
131518334Speterstatic int
131618334Speterrare_destination (insn)
131718334Speter     rtx insn;
131818334Speter{
131918334Speter  int jump_count = 0;
132018334Speter  rtx next;
132118334Speter
132218334Speter  for (; insn; insn = next)
132318334Speter    {
132418334Speter      if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
132518334Speter	insn = XVECEXP (PATTERN (insn), 0, 0);
132618334Speter
132718334Speter      next = NEXT_INSN (insn);
132818334Speter
132918334Speter      switch (GET_CODE (insn))
133018334Speter	{
133118334Speter	case CODE_LABEL:
133218334Speter	  return 0;
133318334Speter	case BARRIER:
133418334Speter	  /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN.  We
133518334Speter	     don't scan past JUMP_INSNs, so any barrier we find here must
133618334Speter	     have been after a CALL_INSN and hence mean the call doesn't
133718334Speter	     return.  */
133818334Speter	  return 2;
133918334Speter	case JUMP_INSN:
134018334Speter	  if (GET_CODE (PATTERN (insn)) == RETURN)
134118334Speter	    return 1;
134218334Speter	  else if (simplejump_p (insn)
134318334Speter		   && jump_count++ < 10)
134418334Speter	    next = JUMP_LABEL (insn);
134518334Speter	  else
134618334Speter	    return 0;
134750397Sobrien
134850397Sobrien	default:
134950397Sobrien	  break;
135018334Speter	}
135118334Speter    }
135218334Speter
135318334Speter  /* If we got here it means we hit the end of the function.  So this
135418334Speter     is an unlikely destination.  */
135518334Speter
135618334Speter  return 1;
135718334Speter}
135818334Speter
135918334Speter/* Return truth value of the statement that this branch
136018334Speter   is mostly taken.  If we think that the branch is extremely likely
136118334Speter   to be taken, we return 2.  If the branch is slightly more likely to be
136218334Speter   taken, return 1.  If the branch is slightly less likely to be taken,
136318334Speter   return 0 and if the branch is highly unlikely to be taken, return -1.
136418334Speter
136518334Speter   CONDITION, if non-zero, is the condition that JUMP_INSN is testing.  */
136618334Speter
136718334Speterstatic int
136818334Spetermostly_true_jump (jump_insn, condition)
136918334Speter     rtx jump_insn, condition;
137018334Speter{
137118334Speter  rtx target_label = JUMP_LABEL (jump_insn);
137218334Speter  rtx insn;
137318334Speter  int rare_dest = rare_destination (target_label);
137418334Speter  int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
137518334Speter
137650397Sobrien  /* If branch probabilities are available, then use that number since it
137750397Sobrien     always gives a correct answer.  */
137850397Sobrien  if (flag_branch_probabilities)
137950397Sobrien    {
138050397Sobrien      rtx note = find_reg_note (jump_insn, REG_BR_PROB, 0);;
138150397Sobrien      if (note)
138250397Sobrien	{
138350397Sobrien	  int prob = XINT (note, 0);
138450397Sobrien
138550397Sobrien	  if (prob >= REG_BR_PROB_BASE * 9 / 10)
138650397Sobrien	    return 2;
138750397Sobrien	  else if (prob >= REG_BR_PROB_BASE / 2)
138850397Sobrien	    return 1;
138950397Sobrien	  else if (prob >= REG_BR_PROB_BASE / 10)
139050397Sobrien	    return 0;
139150397Sobrien	  else
139250397Sobrien	    return -1;
139350397Sobrien	}
139450397Sobrien    }
139550397Sobrien
139618334Speter  /* If this is a branch outside a loop, it is highly unlikely.  */
139718334Speter  if (GET_CODE (PATTERN (jump_insn)) == SET
139818334Speter      && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
139918334Speter      && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
140018334Speter	   && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
140118334Speter	  || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
140218334Speter	      && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
140318334Speter    return -1;
140418334Speter
140518334Speter  if (target_label)
140618334Speter    {
140718334Speter      /* If this is the test of a loop, it is very likely true.  We scan
140818334Speter	 backwards from the target label.  If we find a NOTE_INSN_LOOP_BEG
140918334Speter	 before the next real insn, we assume the branch is to the top of
141018334Speter	 the loop.  */
141118334Speter      for (insn = PREV_INSN (target_label);
141218334Speter	   insn && GET_CODE (insn) == NOTE;
141318334Speter	   insn = PREV_INSN (insn))
141418334Speter	if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
141518334Speter	  return 2;
141618334Speter
141718334Speter      /* If this is a jump to the test of a loop, it is likely true.  We scan
141818334Speter	 forwards from the target label.  If we find a NOTE_INSN_LOOP_VTOP
141918334Speter	 before the next real insn, we assume the branch is to the loop branch
142018334Speter	 test.  */
142118334Speter      for (insn = NEXT_INSN (target_label);
142218334Speter	   insn && GET_CODE (insn) == NOTE;
142318334Speter	   insn = PREV_INSN (insn))
142418334Speter	if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
142518334Speter	  return 1;
142618334Speter    }
142718334Speter
142818334Speter  /* Look at the relative rarities of the fallthrough and destination.  If
142950397Sobrien     they differ, we can predict the branch that way.  */
143018334Speter
143118334Speter  switch (rare_fallthrough - rare_dest)
143218334Speter    {
143318334Speter    case -2:
143418334Speter      return -1;
143518334Speter    case -1:
143618334Speter      return 0;
143718334Speter    case 0:
143818334Speter      break;
143918334Speter    case 1:
144018334Speter      return 1;
144118334Speter    case 2:
144218334Speter      return 2;
144318334Speter    }
144418334Speter
144518334Speter  /* If we couldn't figure out what this jump was, assume it won't be
144618334Speter     taken.  This should be rare.  */
144718334Speter  if (condition == 0)
144818334Speter    return 0;
144918334Speter
145018334Speter  /* EQ tests are usually false and NE tests are usually true.  Also,
145118334Speter     most quantities are positive, so we can make the appropriate guesses
145218334Speter     about signed comparisons against zero.  */
145318334Speter  switch (GET_CODE (condition))
145418334Speter    {
145518334Speter    case CONST_INT:
145618334Speter      /* Unconditional branch.  */
145718334Speter      return 1;
145818334Speter    case EQ:
145918334Speter      return 0;
146018334Speter    case NE:
146118334Speter      return 1;
146218334Speter    case LE:
146318334Speter    case LT:
146418334Speter      if (XEXP (condition, 1) == const0_rtx)
146518334Speter        return 0;
146618334Speter      break;
146718334Speter    case GE:
146818334Speter    case GT:
146918334Speter      if (XEXP (condition, 1) == const0_rtx)
147018334Speter	return 1;
147118334Speter      break;
147250397Sobrien
147350397Sobrien    default:
147450397Sobrien      break;
147518334Speter    }
147618334Speter
147718334Speter  /* Predict backward branches usually take, forward branches usually not.  If
147818334Speter     we don't know whether this is forward or backward, assume the branch
147918334Speter     will be taken, since most are.  */
148018334Speter  return (target_label == 0 || INSN_UID (jump_insn) > max_uid
148118334Speter	  || INSN_UID (target_label) > max_uid
148218334Speter	  || (uid_to_ruid[INSN_UID (jump_insn)]
148318334Speter	      > uid_to_ruid[INSN_UID (target_label)]));;
148418334Speter}
148518334Speter
148618334Speter/* Return the condition under which INSN will branch to TARGET.  If TARGET
148718334Speter   is zero, return the condition under which INSN will return.  If INSN is
148818334Speter   an unconditional branch, return const_true_rtx.  If INSN isn't a simple
148918334Speter   type of jump, or it doesn't go to TARGET, return 0.  */
149018334Speter
149118334Speterstatic rtx
149218334Speterget_branch_condition (insn, target)
149318334Speter     rtx insn;
149418334Speter     rtx target;
149518334Speter{
149618334Speter  rtx pat = PATTERN (insn);
149718334Speter  rtx src;
149818334Speter
149918334Speter  if (condjump_in_parallel_p (insn))
150018334Speter    pat = XVECEXP (pat, 0, 0);
150118334Speter
150218334Speter  if (GET_CODE (pat) == RETURN)
150318334Speter    return target == 0 ? const_true_rtx : 0;
150418334Speter
150518334Speter  else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
150618334Speter    return 0;
150718334Speter
150818334Speter  src = SET_SRC (pat);
150918334Speter  if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
151018334Speter    return const_true_rtx;
151118334Speter
151218334Speter  else if (GET_CODE (src) == IF_THEN_ELSE
151318334Speter	   && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
151418334Speter	       || (GET_CODE (XEXP (src, 1)) == LABEL_REF
151518334Speter		   && XEXP (XEXP (src, 1), 0) == target))
151618334Speter	   && XEXP (src, 2) == pc_rtx)
151718334Speter    return XEXP (src, 0);
151818334Speter
151918334Speter  else if (GET_CODE (src) == IF_THEN_ELSE
152018334Speter	   && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
152118334Speter	       || (GET_CODE (XEXP (src, 2)) == LABEL_REF
152218334Speter		   && XEXP (XEXP (src, 2), 0) == target))
152318334Speter	   && XEXP (src, 1) == pc_rtx)
152450397Sobrien    return gen_rtx_fmt_ee (reverse_condition (GET_CODE (XEXP (src, 0))),
152550397Sobrien			   GET_MODE (XEXP (src, 0)),
152650397Sobrien			   XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
152718334Speter
152818334Speter  return 0;
152918334Speter}
153018334Speter
153118334Speter/* Return non-zero if CONDITION is more strict than the condition of
153218334Speter   INSN, i.e., if INSN will always branch if CONDITION is true.  */
153318334Speter
153418334Speterstatic int
153518334Spetercondition_dominates_p (condition, insn)
153618334Speter     rtx condition;
153718334Speter     rtx insn;
153818334Speter{
153918334Speter  rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
154018334Speter  enum rtx_code code = GET_CODE (condition);
154118334Speter  enum rtx_code other_code;
154218334Speter
154318334Speter  if (rtx_equal_p (condition, other_condition)
154418334Speter      || other_condition == const_true_rtx)
154518334Speter    return 1;
154618334Speter
154718334Speter  else if (condition == const_true_rtx || other_condition == 0)
154818334Speter    return 0;
154918334Speter
155018334Speter  other_code = GET_CODE (other_condition);
155118334Speter  if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
155218334Speter      || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
155318334Speter      || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
155418334Speter    return 0;
155518334Speter
155618334Speter  return comparison_dominates_p (code, other_code);
155718334Speter}
155818334Speter
155918334Speter/* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
156018334Speter   any insns already in the delay slot of JUMP.  */
156118334Speter
156218334Speterstatic int
156318334Speterredirect_with_delay_slots_safe_p (jump, newlabel, seq)
156418334Speter     rtx jump, newlabel, seq;
156518334Speter{
156650397Sobrien  int flags, i;
156718334Speter  rtx pat = PATTERN (seq);
156818334Speter
156918334Speter  /* Make sure all the delay slots of this jump would still
157018334Speter     be valid after threading the jump.  If they are still
157118334Speter     valid, then return non-zero.  */
157218334Speter
157318334Speter  flags = get_jump_flags (jump, newlabel);
157418334Speter  for (i = 1; i < XVECLEN (pat, 0); i++)
157518334Speter    if (! (
157618334Speter#ifdef ANNUL_IFFALSE_SLOTS
157718334Speter	   (INSN_ANNULLED_BRANCH_P (jump)
157818334Speter	    && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
157918334Speter	   ? eligible_for_annul_false (jump, i - 1,
158018334Speter				       XVECEXP (pat, 0, i), flags) :
158118334Speter#endif
158218334Speter#ifdef ANNUL_IFTRUE_SLOTS
158318334Speter	   (INSN_ANNULLED_BRANCH_P (jump)
158418334Speter	    && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
158518334Speter	   ? eligible_for_annul_true (jump, i - 1,
158618334Speter				      XVECEXP (pat, 0, i), flags) :
158718334Speter#endif
158818334Speter	   eligible_for_delay (jump, i -1, XVECEXP (pat, 0, i), flags)))
158918334Speter      break;
159018334Speter
159118334Speter  return (i == XVECLEN (pat, 0));
159218334Speter}
159318334Speter
159418334Speter/* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
159518334Speter   any insns we wish to place in the delay slot of JUMP.  */
159618334Speter
159718334Speterstatic int
159818334Speterredirect_with_delay_list_safe_p (jump, newlabel, delay_list)
159918334Speter     rtx jump, newlabel, delay_list;
160018334Speter{
160118334Speter  int flags, i;
160218334Speter  rtx li;
160318334Speter
160418334Speter  /* Make sure all the insns in DELAY_LIST would still be
160518334Speter     valid after threading the jump.  If they are still
160618334Speter     valid, then return non-zero.  */
160718334Speter
160818334Speter  flags = get_jump_flags (jump, newlabel);
160918334Speter  for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
161018334Speter    if (! (
161118334Speter#ifdef ANNUL_IFFALSE_SLOTS
161218334Speter	   (INSN_ANNULLED_BRANCH_P (jump)
161318334Speter	    && INSN_FROM_TARGET_P (XEXP (li, 0)))
161418334Speter	   ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
161518334Speter#endif
161618334Speter#ifdef ANNUL_IFTRUE_SLOTS
161718334Speter	   (INSN_ANNULLED_BRANCH_P (jump)
161818334Speter	    && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
161918334Speter	   ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
162018334Speter#endif
162118334Speter	   eligible_for_delay (jump, i, XEXP (li, 0), flags)))
162218334Speter      break;
162318334Speter
162418334Speter  return (li == NULL);
162518334Speter}
162618334Speter
162718334Speter
162818334Speter/* INSN branches to an insn whose pattern SEQ is a SEQUENCE.  Given that
162918334Speter   the condition tested by INSN is CONDITION and the resources shown in
163018334Speter   OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
163118334Speter   from SEQ's delay list, in addition to whatever insns it may execute
163218334Speter   (in DELAY_LIST).   SETS and NEEDED are denote resources already set and
163318334Speter   needed while searching for delay slot insns.  Return the concatenated
163418334Speter   delay list if possible, otherwise, return 0.
163518334Speter
163618334Speter   SLOTS_TO_FILL is the total number of slots required by INSN, and
163718334Speter   PSLOTS_FILLED points to the number filled so far (also the number of
163818334Speter   insns in DELAY_LIST).  It is updated with the number that have been
163918334Speter   filled from the SEQUENCE, if any.
164018334Speter
164118334Speter   PANNUL_P points to a non-zero value if we already know that we need
164218334Speter   to annul INSN.  If this routine determines that annulling is needed,
164318334Speter   it may set that value non-zero.
164418334Speter
164518334Speter   PNEW_THREAD points to a location that is to receive the place at which
164618334Speter   execution should continue.  */
164718334Speter
164818334Speterstatic rtx
164918334Spetersteal_delay_list_from_target (insn, condition, seq, delay_list,
165018334Speter			      sets, needed, other_needed,
165118334Speter			      slots_to_fill, pslots_filled, pannul_p,
165218334Speter			      pnew_thread)
165318334Speter     rtx insn, condition;
165418334Speter     rtx seq;
165518334Speter     rtx delay_list;
165618334Speter     struct resources *sets, *needed, *other_needed;
165718334Speter     int slots_to_fill;
165818334Speter     int *pslots_filled;
165918334Speter     int *pannul_p;
166018334Speter     rtx *pnew_thread;
166118334Speter{
166218334Speter  rtx temp;
166318334Speter  int slots_remaining = slots_to_fill - *pslots_filled;
166418334Speter  int total_slots_filled = *pslots_filled;
166518334Speter  rtx new_delay_list = 0;
166618334Speter  int must_annul = *pannul_p;
166718334Speter  int i;
166818334Speter
166918334Speter  /* We can't do anything if there are more delay slots in SEQ than we
167018334Speter     can handle, or if we don't know that it will be a taken branch.
167118334Speter     We know that it will be a taken branch if it is either an unconditional
167218334Speter     branch or a conditional branch with a stricter branch condition.
167318334Speter
167418334Speter     Also, exit if the branch has more than one set, since then it is computing
167518334Speter     other results that can't be ignored, e.g. the HPPA mov&branch instruction.
167618334Speter     ??? It may be possible to move other sets into INSN in addition to
167718334Speter     moving the instructions in the delay slots.  */
167818334Speter
167918334Speter  if (XVECLEN (seq, 0) - 1 > slots_remaining
168018334Speter      || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
168118334Speter      || ! single_set (XVECEXP (seq, 0, 0)))
168218334Speter    return delay_list;
168318334Speter
168418334Speter  for (i = 1; i < XVECLEN (seq, 0); i++)
168518334Speter    {
168618334Speter      rtx trial = XVECEXP (seq, 0, i);
168718334Speter      int flags;
168818334Speter
168918334Speter      if (insn_references_resource_p (trial, sets, 0)
169018334Speter	  || insn_sets_resource_p (trial, needed, 0)
169118334Speter	  || insn_sets_resource_p (trial, sets, 0)
169218334Speter#ifdef HAVE_cc0
169318334Speter	  /* If TRIAL sets CC0, we can't copy it, so we can't steal this
169418334Speter	     delay list.  */
169518334Speter	  || find_reg_note (trial, REG_CC_USER, NULL_RTX)
169618334Speter#endif
169718334Speter	  /* If TRIAL is from the fallthrough code of an annulled branch insn
169818334Speter	     in SEQ, we cannot use it.  */
169918334Speter	  || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
170018334Speter	      && ! INSN_FROM_TARGET_P (trial)))
170118334Speter	return delay_list;
170218334Speter
170318334Speter      /* If this insn was already done (usually in a previous delay slot),
170418334Speter	 pretend we put it in our delay slot.  */
170518334Speter      if (redundant_insn (trial, insn, new_delay_list))
170618334Speter	continue;
170718334Speter
170818334Speter      /* We will end up re-vectoring this branch, so compute flags
170918334Speter	 based on jumping to the new label.  */
171018334Speter      flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
171118334Speter
171218334Speter      if (! must_annul
171318334Speter	  && ((condition == const_true_rtx
171418334Speter	       || (! insn_sets_resource_p (trial, other_needed, 0)
171518334Speter		   && ! may_trap_p (PATTERN (trial)))))
171618334Speter	  ? eligible_for_delay (insn, total_slots_filled, trial, flags)
171718334Speter	  : (must_annul = 1,
171818334Speter	     eligible_for_annul_false (insn, total_slots_filled, trial, flags)))
171918334Speter	{
172018334Speter	  temp = copy_rtx (trial);
172118334Speter	  INSN_FROM_TARGET_P (temp) = 1;
172218334Speter	  new_delay_list = add_to_delay_list (temp, new_delay_list);
172318334Speter	  total_slots_filled++;
172418334Speter
172518334Speter	  if (--slots_remaining == 0)
172618334Speter	    break;
172718334Speter	}
172818334Speter      else
172918334Speter	return delay_list;
173018334Speter    }
173118334Speter
173218334Speter  /* Show the place to which we will be branching.  */
173318334Speter  *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
173418334Speter
173518334Speter  /* Add any new insns to the delay list and update the count of the
173618334Speter     number of slots filled.  */
173718334Speter  *pslots_filled = total_slots_filled;
173818334Speter  *pannul_p = must_annul;
173918334Speter
174018334Speter  if (delay_list == 0)
174118334Speter    return new_delay_list;
174218334Speter
174318334Speter  for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
174418334Speter    delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
174518334Speter
174618334Speter  return delay_list;
174718334Speter}
174818334Speter
174918334Speter/* Similar to steal_delay_list_from_target except that SEQ is on the
175018334Speter   fallthrough path of INSN.  Here we only do something if the delay insn
175118334Speter   of SEQ is an unconditional branch.  In that case we steal its delay slot
175218334Speter   for INSN since unconditional branches are much easier to fill.  */
175318334Speter
175418334Speterstatic rtx
175518334Spetersteal_delay_list_from_fallthrough (insn, condition, seq,
175618334Speter				   delay_list, sets, needed, other_needed,
175718334Speter				   slots_to_fill, pslots_filled, pannul_p)
175818334Speter     rtx insn, condition;
175918334Speter     rtx seq;
176018334Speter     rtx delay_list;
176118334Speter     struct resources *sets, *needed, *other_needed;
176218334Speter     int slots_to_fill;
176318334Speter     int *pslots_filled;
176418334Speter     int *pannul_p;
176518334Speter{
176618334Speter  int i;
176718334Speter  int flags;
176818334Speter
176918334Speter  flags = get_jump_flags (insn, JUMP_LABEL (insn));
177018334Speter
177118334Speter  /* We can't do anything if SEQ's delay insn isn't an
177218334Speter     unconditional branch.  */
177318334Speter
177418334Speter  if (! simplejump_p (XVECEXP (seq, 0, 0))
177518334Speter      && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
177618334Speter    return delay_list;
177718334Speter
177818334Speter  for (i = 1; i < XVECLEN (seq, 0); i++)
177918334Speter    {
178018334Speter      rtx trial = XVECEXP (seq, 0, i);
178118334Speter
178218334Speter      /* If TRIAL sets CC0, stealing it will move it too far from the use
178318334Speter	 of CC0.  */
178418334Speter      if (insn_references_resource_p (trial, sets, 0)
178518334Speter	  || insn_sets_resource_p (trial, needed, 0)
178618334Speter	  || insn_sets_resource_p (trial, sets, 0)
178718334Speter#ifdef HAVE_cc0
178818334Speter	  || sets_cc0_p (PATTERN (trial))
178918334Speter#endif
179018334Speter	  )
179118334Speter
179218334Speter	break;
179318334Speter
179418334Speter      /* If this insn was already done, we don't need it.  */
179518334Speter      if (redundant_insn (trial, insn, delay_list))
179618334Speter	{
179718334Speter	  delete_from_delay_slot (trial);
179818334Speter	  continue;
179918334Speter	}
180018334Speter
180118334Speter      if (! *pannul_p
180218334Speter	  && ((condition == const_true_rtx
180318334Speter	       || (! insn_sets_resource_p (trial, other_needed, 0)
180418334Speter		   && ! may_trap_p (PATTERN (trial)))))
180518334Speter	  ? eligible_for_delay (insn, *pslots_filled, trial, flags)
180618334Speter	  : (*pannul_p = 1,
180718334Speter	     eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
180818334Speter	{
180918334Speter	  delete_from_delay_slot (trial);
181018334Speter	  delay_list = add_to_delay_list (trial, delay_list);
181118334Speter
181218334Speter	  if (++(*pslots_filled) == slots_to_fill)
181318334Speter	    break;
181418334Speter	}
181518334Speter      else
181618334Speter	break;
181718334Speter    }
181818334Speter
181918334Speter  return delay_list;
182018334Speter}
182118334Speter
182218334Speter/* Try merging insns starting at THREAD which match exactly the insns in
182318334Speter   INSN's delay list.
182418334Speter
182518334Speter   If all insns were matched and the insn was previously annulling, the
182618334Speter   annul bit will be cleared.
182718334Speter
182818334Speter   For each insn that is merged, if the branch is or will be non-annulling,
182918334Speter   we delete the merged insn.  */
183018334Speter
183118334Speterstatic void
183218334Spetertry_merge_delay_insns (insn, thread)
183318334Speter     rtx insn, thread;
183418334Speter{
183518334Speter  rtx trial, next_trial;
183618334Speter  rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
183718334Speter  int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
183818334Speter  int slot_number = 1;
183918334Speter  int num_slots = XVECLEN (PATTERN (insn), 0);
184018334Speter  rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
184118334Speter  struct resources set, needed;
184218334Speter  rtx merged_insns = 0;
184318334Speter  int i;
184418334Speter  int flags;
184518334Speter
184618334Speter  flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
184718334Speter
184818334Speter  CLEAR_RESOURCE (&needed);
184918334Speter  CLEAR_RESOURCE (&set);
185018334Speter
185118334Speter  /* If this is not an annulling branch, take into account anything needed in
185218334Speter     NEXT_TO_MATCH.  This prevents two increments from being incorrectly
185318334Speter     folded into one.  If we are annulling, this would be the correct
185418334Speter     thing to do.  (The alternative, looking at things set in NEXT_TO_MATCH
185518334Speter     will essentially disable this optimization.  This method is somewhat of
185618334Speter     a kludge, but I don't see a better way.)  */
185718334Speter  if (! annul_p)
185818334Speter    mark_referenced_resources (next_to_match, &needed, 1);
185918334Speter
186018334Speter  for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
186118334Speter    {
186218334Speter      rtx pat = PATTERN (trial);
186318334Speter      rtx oldtrial = trial;
186418334Speter
186518334Speter      next_trial = next_nonnote_insn (trial);
186618334Speter
186718334Speter      /* TRIAL must be a CALL_INSN or INSN.  Skip USE and CLOBBER.  */
186818334Speter      if (GET_CODE (trial) == INSN
186918334Speter	  && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
187018334Speter	continue;
187118334Speter
187218334Speter      if (GET_CODE (next_to_match) == GET_CODE (trial)
187318334Speter#ifdef HAVE_cc0
187418334Speter	  /* We can't share an insn that sets cc0.  */
187518334Speter	  && ! sets_cc0_p (pat)
187618334Speter#endif
187718334Speter	  && ! insn_references_resource_p (trial, &set, 1)
187818334Speter	  && ! insn_sets_resource_p (trial, &set, 1)
187918334Speter	  && ! insn_sets_resource_p (trial, &needed, 1)
188018334Speter	  && (trial = try_split (pat, trial, 0)) != 0
188118334Speter	  /* Update next_trial, in case try_split succeeded.  */
188218334Speter	  && (next_trial = next_nonnote_insn (trial))
188318334Speter	  /* Likewise THREAD.  */
188418334Speter	  && (thread = oldtrial == thread ? trial : thread)
188518334Speter	  && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
188618334Speter	  /* Have to test this condition if annul condition is different
188718334Speter	     from (and less restrictive than) non-annulling one.  */
188818334Speter	  && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
188918334Speter	{
189018334Speter
189118334Speter	  if (! annul_p)
189218334Speter	    {
189318334Speter	      update_block (trial, thread);
189418334Speter	      if (trial == thread)
189518334Speter		thread = next_active_insn (thread);
189618334Speter
189718334Speter	      delete_insn (trial);
189818334Speter	      INSN_FROM_TARGET_P (next_to_match) = 0;
189918334Speter	    }
190018334Speter	  else
190150397Sobrien	    merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
190218334Speter
190318334Speter	  if (++slot_number == num_slots)
190418334Speter	    break;
190518334Speter
190618334Speter	  next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
190718334Speter	  if (! annul_p)
190818334Speter	    mark_referenced_resources (next_to_match, &needed, 1);
190918334Speter	}
191018334Speter
191118334Speter      mark_set_resources (trial, &set, 0, 1);
191218334Speter      mark_referenced_resources (trial, &needed, 1);
191318334Speter    }
191418334Speter
191518334Speter  /* See if we stopped on a filled insn.  If we did, try to see if its
191618334Speter     delay slots match.  */
191718334Speter  if (slot_number != num_slots
191818334Speter      && trial && GET_CODE (trial) == INSN
191918334Speter      && GET_CODE (PATTERN (trial)) == SEQUENCE
192018334Speter      && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
192118334Speter    {
192218334Speter      rtx pat = PATTERN (trial);
192318334Speter      rtx filled_insn = XVECEXP (pat, 0, 0);
192418334Speter
192518334Speter      /* Account for resources set/needed by the filled insn.  */
192618334Speter      mark_set_resources (filled_insn, &set, 0, 1);
192718334Speter      mark_referenced_resources (filled_insn, &needed, 1);
192818334Speter
192918334Speter      for (i = 1; i < XVECLEN (pat, 0); i++)
193018334Speter	{
193118334Speter	  rtx dtrial = XVECEXP (pat, 0, i);
193218334Speter
193318334Speter	  if (! insn_references_resource_p (dtrial, &set, 1)
193418334Speter	      && ! insn_sets_resource_p (dtrial, &set, 1)
193518334Speter	      && ! insn_sets_resource_p (dtrial, &needed, 1)
193618334Speter#ifdef HAVE_cc0
193718334Speter	      && ! sets_cc0_p (PATTERN (dtrial))
193818334Speter#endif
193918334Speter	      && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
194018334Speter	      && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
194118334Speter	    {
194218334Speter	      if (! annul_p)
194318334Speter		{
194418334Speter		  update_block (dtrial, thread);
194518334Speter		  delete_from_delay_slot (dtrial);
194618334Speter		  INSN_FROM_TARGET_P (next_to_match) = 0;
194718334Speter		}
194818334Speter	      else
194950397Sobrien		merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
195050397Sobrien						  merged_insns);
195118334Speter
195218334Speter	      if (++slot_number == num_slots)
195318334Speter		break;
195418334Speter
195518334Speter	      next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
195618334Speter	    }
195718334Speter	}
195818334Speter    }
195918334Speter
196018334Speter  /* If all insns in the delay slot have been matched and we were previously
196118334Speter     annulling the branch, we need not any more.  In that case delete all the
196250397Sobrien     merged insns.  Also clear the INSN_FROM_TARGET_P bit of each insn in
196318334Speter     the delay list so that we know that it isn't only being used at the
196418334Speter     target.  */
196518334Speter  if (slot_number == num_slots && annul_p)
196618334Speter    {
196718334Speter      for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
196818334Speter	{
196918334Speter	  if (GET_MODE (merged_insns) == SImode)
197018334Speter	    {
197118334Speter	      update_block (XEXP (merged_insns, 0), thread);
197218334Speter	      delete_from_delay_slot (XEXP (merged_insns, 0));
197318334Speter	    }
197418334Speter	  else
197518334Speter	    {
197618334Speter	      update_block (XEXP (merged_insns, 0), thread);
197718334Speter	      delete_insn (XEXP (merged_insns, 0));
197818334Speter	    }
197918334Speter	}
198018334Speter
198118334Speter      INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
198218334Speter
198318334Speter      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
198418334Speter	INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
198518334Speter    }
198618334Speter}
198718334Speter
198818334Speter/* See if INSN is redundant with an insn in front of TARGET.  Often this
198918334Speter   is called when INSN is a candidate for a delay slot of TARGET.
199018334Speter   DELAY_LIST are insns that will be placed in delay slots of TARGET in front
199118334Speter   of INSN.  Often INSN will be redundant with an insn in a delay slot of
199218334Speter   some previous insn.  This happens when we have a series of branches to the
199318334Speter   same label; in that case the first insn at the target might want to go
199418334Speter   into each of the delay slots.
199518334Speter
199618334Speter   If we are not careful, this routine can take up a significant fraction
199718334Speter   of the total compilation time (4%), but only wins rarely.  Hence we
199818334Speter   speed this routine up by making two passes.  The first pass goes back
199918334Speter   until it hits a label and sees if it find an insn with an identical
200018334Speter   pattern.  Only in this (relatively rare) event does it check for
200118334Speter   data conflicts.
200218334Speter
200318334Speter   We do not split insns we encounter.  This could cause us not to find a
200418334Speter   redundant insn, but the cost of splitting seems greater than the possible
200518334Speter   gain in rare cases.  */
200618334Speter
200718334Speterstatic rtx
200818334Speterredundant_insn (insn, target, delay_list)
200918334Speter     rtx insn;
201018334Speter     rtx target;
201118334Speter     rtx delay_list;
201218334Speter{
201318334Speter  rtx target_main = target;
201418334Speter  rtx ipat = PATTERN (insn);
201518334Speter  rtx trial, pat;
201618334Speter  struct resources needed, set;
201718334Speter  int i;
201818334Speter
201950397Sobrien  /* If INSN has any REG_UNUSED notes, it can't match anything since we
202050397Sobrien     are allowed to not actually assign to such a register.  */
202150397Sobrien  if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
202250397Sobrien    return 0;
202350397Sobrien
202418334Speter  /* Scan backwards looking for a match.  */
202518334Speter  for (trial = PREV_INSN (target); trial; trial = PREV_INSN (trial))
202618334Speter    {
202718334Speter      if (GET_CODE (trial) == CODE_LABEL)
202818334Speter	return 0;
202918334Speter
203018334Speter      if (GET_RTX_CLASS (GET_CODE (trial)) != 'i')
203118334Speter	continue;
203218334Speter
203318334Speter      pat = PATTERN (trial);
203418334Speter      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
203518334Speter	continue;
203618334Speter
203718334Speter      if (GET_CODE (pat) == SEQUENCE)
203818334Speter	{
203918334Speter	  /* Stop for a CALL and its delay slots because it is difficult to
204018334Speter	     track its resource needs correctly.  */
204118334Speter	  if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
204218334Speter	    return 0;
204318334Speter
204418334Speter	  /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
204518334Speter	     slots because it is difficult to track its resource needs
204618334Speter	     correctly.  */
204718334Speter
204818334Speter#ifdef INSN_SETS_ARE_DELAYED
204918334Speter	  if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
205018334Speter	    return 0;
205118334Speter#endif
205218334Speter
205318334Speter#ifdef INSN_REFERENCES_ARE_DELAYED
205418334Speter	  if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
205518334Speter	    return 0;
205618334Speter#endif
205718334Speter
205818334Speter	  /* See if any of the insns in the delay slot match, updating
205918334Speter	     resource requirements as we go.  */
206018334Speter	  for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
206118334Speter	    if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
206250397Sobrien		&& rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
206350397Sobrien		&& ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
206418334Speter	      break;
206518334Speter
206618334Speter	  /* If found a match, exit this loop early.  */
206718334Speter	  if (i > 0)
206818334Speter	    break;
206918334Speter	}
207018334Speter
207150397Sobrien      else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
207250397Sobrien	       && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
207318334Speter	break;
207418334Speter    }
207518334Speter
207618334Speter  /* If we didn't find an insn that matches, return 0.  */
207718334Speter  if (trial == 0)
207818334Speter    return 0;
207918334Speter
208018334Speter  /* See what resources this insn sets and needs.  If they overlap, or
208118334Speter     if this insn references CC0, it can't be redundant.  */
208218334Speter
208318334Speter  CLEAR_RESOURCE (&needed);
208418334Speter  CLEAR_RESOURCE (&set);
208518334Speter  mark_set_resources (insn, &set, 0, 1);
208618334Speter  mark_referenced_resources (insn, &needed, 1);
208718334Speter
208818334Speter  /* If TARGET is a SEQUENCE, get the main insn.  */
208918334Speter  if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
209018334Speter    target_main = XVECEXP (PATTERN (target), 0, 0);
209118334Speter
209218334Speter  if (resource_conflicts_p (&needed, &set)
209318334Speter#ifdef HAVE_cc0
209418334Speter      || reg_mentioned_p (cc0_rtx, ipat)
209518334Speter#endif
209618334Speter      /* The insn requiring the delay may not set anything needed or set by
209718334Speter	 INSN.  */
209818334Speter      || insn_sets_resource_p (target_main, &needed, 1)
209918334Speter      || insn_sets_resource_p (target_main, &set, 1))
210018334Speter    return 0;
210118334Speter
210218334Speter  /* Insns we pass may not set either NEEDED or SET, so merge them for
210318334Speter     simpler tests.  */
210418334Speter  needed.memory |= set.memory;
210518334Speter  needed.unch_memory |= set.unch_memory;
210618334Speter  IOR_HARD_REG_SET (needed.regs, set.regs);
210718334Speter
210818334Speter  /* This insn isn't redundant if it conflicts with an insn that either is
210918334Speter     or will be in a delay slot of TARGET.  */
211018334Speter
211118334Speter  while (delay_list)
211218334Speter    {
211318334Speter      if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
211418334Speter	return 0;
211518334Speter      delay_list = XEXP (delay_list, 1);
211618334Speter    }
211718334Speter
211818334Speter  if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
211918334Speter    for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
212018334Speter      if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
212118334Speter	return 0;
212218334Speter
212318334Speter  /* Scan backwards until we reach a label or an insn that uses something
212418334Speter     INSN sets or sets something insn uses or sets.  */
212518334Speter
212618334Speter  for (trial = PREV_INSN (target);
212718334Speter       trial && GET_CODE (trial) != CODE_LABEL;
212818334Speter       trial = PREV_INSN (trial))
212918334Speter    {
213018334Speter      if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
213118334Speter	  && GET_CODE (trial) != JUMP_INSN)
213218334Speter	continue;
213318334Speter
213418334Speter      pat = PATTERN (trial);
213518334Speter      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
213618334Speter	continue;
213718334Speter
213818334Speter      if (GET_CODE (pat) == SEQUENCE)
213918334Speter	{
214018334Speter	  /* If this is a CALL_INSN and its delay slots, it is hard to track
214118334Speter	     the resource needs properly, so give up.  */
214218334Speter	  if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
214318334Speter	    return 0;
214418334Speter
214550397Sobrien	  /* If this is an INSN or JUMP_INSN with delayed effects, it
214618334Speter	     is hard to track the resource needs properly, so give up.  */
214718334Speter
214818334Speter#ifdef INSN_SETS_ARE_DELAYED
214918334Speter	  if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
215018334Speter	    return 0;
215118334Speter#endif
215218334Speter
215318334Speter#ifdef INSN_REFERENCES_ARE_DELAYED
215418334Speter	  if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
215518334Speter	    return 0;
215618334Speter#endif
215718334Speter
215818334Speter	  /* See if any of the insns in the delay slot match, updating
215918334Speter	     resource requirements as we go.  */
216018334Speter	  for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
216118334Speter	    {
216218334Speter	      rtx candidate = XVECEXP (pat, 0, i);
216318334Speter
216418334Speter	      /* If an insn will be annulled if the branch is false, it isn't
216518334Speter		 considered as a possible duplicate insn.  */
216618334Speter	      if (rtx_equal_p (PATTERN (candidate), ipat)
216718334Speter		  && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
216818334Speter			&& INSN_FROM_TARGET_P (candidate)))
216918334Speter		{
217018334Speter		  /* Show that this insn will be used in the sequel.  */
217118334Speter		  INSN_FROM_TARGET_P (candidate) = 0;
217218334Speter		  return candidate;
217318334Speter		}
217418334Speter
217518334Speter	      /* Unless this is an annulled insn from the target of a branch,
217618334Speter		 we must stop if it sets anything needed or set by INSN.  */
217718334Speter	      if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
217818334Speter		   || ! INSN_FROM_TARGET_P (candidate))
217918334Speter		  && insn_sets_resource_p (candidate, &needed, 1))
218018334Speter		return 0;
218118334Speter	    }
218218334Speter
218318334Speter
218418334Speter	  /* If the insn requiring the delay slot conflicts with INSN, we
218518334Speter	     must stop.  */
218618334Speter	  if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
218718334Speter	    return 0;
218818334Speter	}
218918334Speter      else
219018334Speter	{
219118334Speter	  /* See if TRIAL is the same as INSN.  */
219218334Speter	  pat = PATTERN (trial);
219318334Speter	  if (rtx_equal_p (pat, ipat))
219418334Speter	    return trial;
219518334Speter
219618334Speter	  /* Can't go any further if TRIAL conflicts with INSN.  */
219718334Speter	  if (insn_sets_resource_p (trial, &needed, 1))
219818334Speter	    return 0;
219918334Speter	}
220018334Speter    }
220118334Speter
220218334Speter  return 0;
220318334Speter}
220418334Speter
220518334Speter/* Return 1 if THREAD can only be executed in one way.  If LABEL is non-zero,
220618334Speter   it is the target of the branch insn being scanned.  If ALLOW_FALLTHROUGH
220718334Speter   is non-zero, we are allowed to fall into this thread; otherwise, we are
220818334Speter   not.
220918334Speter
221018334Speter   If LABEL is used more than one or we pass a label other than LABEL before
221118334Speter   finding an active insn, we do not own this thread.  */
221218334Speter
221318334Speterstatic int
221418334Speterown_thread_p (thread, label, allow_fallthrough)
221518334Speter     rtx thread;
221618334Speter     rtx label;
221718334Speter     int allow_fallthrough;
221818334Speter{
221918334Speter  rtx active_insn;
222018334Speter  rtx insn;
222118334Speter
222218334Speter  /* We don't own the function end.  */
222318334Speter  if (thread == 0)
222418334Speter    return 0;
222518334Speter
222618334Speter  /* Get the first active insn, or THREAD, if it is an active insn.  */
222718334Speter  active_insn = next_active_insn (PREV_INSN (thread));
222818334Speter
222918334Speter  for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
223018334Speter    if (GET_CODE (insn) == CODE_LABEL
223118334Speter	&& (insn != label || LABEL_NUSES (insn) != 1))
223218334Speter      return 0;
223318334Speter
223418334Speter  if (allow_fallthrough)
223518334Speter    return 1;
223618334Speter
223718334Speter  /* Ensure that we reach a BARRIER before any insn or label.  */
223818334Speter  for (insn = prev_nonnote_insn (thread);
223918334Speter       insn == 0 || GET_CODE (insn) != BARRIER;
224018334Speter       insn = prev_nonnote_insn (insn))
224118334Speter    if (insn == 0
224218334Speter	|| GET_CODE (insn) == CODE_LABEL
224318334Speter	|| (GET_CODE (insn) == INSN
224418334Speter	    && GET_CODE (PATTERN (insn)) != USE
224518334Speter	    && GET_CODE (PATTERN (insn)) != CLOBBER))
224618334Speter      return 0;
224718334Speter
224818334Speter  return 1;
224918334Speter}
225018334Speter
225118334Speter/* Find the number of the basic block that starts closest to INSN.  Return -1
225218334Speter   if we couldn't find such a basic block.  */
225318334Speter
225418334Speterstatic int
225518334Speterfind_basic_block (insn)
225618334Speter     rtx insn;
225718334Speter{
225818334Speter  int i;
225918334Speter
226018334Speter  /* Scan backwards to the previous BARRIER.  Then see if we can find a
226118334Speter     label that starts a basic block.  Return the basic block number.  */
226218334Speter
226318334Speter  for (insn = prev_nonnote_insn (insn);
226418334Speter       insn && GET_CODE (insn) != BARRIER;
226518334Speter       insn = prev_nonnote_insn (insn))
226618334Speter    ;
226718334Speter
226818334Speter  /* The start of the function is basic block zero.  */
226918334Speter  if (insn == 0)
227018334Speter    return 0;
227118334Speter
227218334Speter  /* See if any of the upcoming CODE_LABELs start a basic block.  If we reach
227318334Speter     anything other than a CODE_LABEL or note, we can't find this code.  */
227418334Speter  for (insn = next_nonnote_insn (insn);
227518334Speter       insn && GET_CODE (insn) == CODE_LABEL;
227618334Speter       insn = next_nonnote_insn (insn))
227718334Speter    {
227818334Speter      for (i = 0; i < n_basic_blocks; i++)
227918334Speter	if (insn == basic_block_head[i])
228018334Speter	  return i;
228118334Speter    }
228218334Speter
228318334Speter  return -1;
228418334Speter}
228518334Speter
228618334Speter/* Called when INSN is being moved from a location near the target of a jump.
228718334Speter   We leave a marker of the form (use (INSN)) immediately in front
228818334Speter   of WHERE for mark_target_live_regs.  These markers will be deleted when
228918334Speter   reorg finishes.
229018334Speter
229118334Speter   We used to try to update the live status of registers if WHERE is at
229218334Speter   the start of a basic block, but that can't work since we may remove a
229318334Speter   BARRIER in relax_delay_slots.  */
229418334Speter
229518334Speterstatic void
229618334Speterupdate_block (insn, where)
229718334Speter     rtx insn;
229818334Speter     rtx where;
229918334Speter{
230018334Speter  int b;
230118334Speter
230218334Speter  /* Ignore if this was in a delay slot and it came from the target of
230318334Speter     a branch.  */
230418334Speter  if (INSN_FROM_TARGET_P (insn))
230518334Speter    return;
230618334Speter
230750397Sobrien  emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
230818334Speter
230918334Speter  /* INSN might be making a value live in a block where it didn't use to
231018334Speter     be.  So recompute liveness information for this block.  */
231118334Speter
231218334Speter  b = find_basic_block (insn);
231318334Speter  if (b != -1)
231418334Speter    bb_ticks[b]++;
231518334Speter}
231618334Speter
231718334Speter/* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
231818334Speter   the basic block containing the jump.  */
231918334Speter
232018334Speterstatic int
232118334Speterreorg_redirect_jump (jump, nlabel)
232218334Speter     rtx jump;
232318334Speter     rtx nlabel;
232418334Speter{
232518334Speter  int b = find_basic_block (jump);
232618334Speter
232718334Speter  if (b != -1)
232818334Speter    bb_ticks[b]++;
232918334Speter
233018334Speter  return redirect_jump (jump, nlabel);
233118334Speter}
233218334Speter
233318334Speter/* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
233418334Speter   We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
233518334Speter   that reference values used in INSN.  If we find one, then we move the
233618334Speter   REG_DEAD note to INSN.
233718334Speter
233818334Speter   This is needed to handle the case where an later insn (after INSN) has a
233918334Speter   REG_DEAD note for a register used by INSN, and this later insn subsequently
234018334Speter   gets moved before a CODE_LABEL because it is a redundant insn.  In this
234118334Speter   case, mark_target_live_regs may be confused into thinking the register
234218334Speter   is dead because it sees a REG_DEAD note immediately before a CODE_LABEL.  */
234318334Speter
234418334Speterstatic void
234518334Speterupdate_reg_dead_notes (insn, delayed_insn)
234618334Speter     rtx insn, delayed_insn;
234718334Speter{
234818334Speter  rtx p, link, next;
234918334Speter
235018334Speter  for (p = next_nonnote_insn (insn); p != delayed_insn;
235118334Speter       p = next_nonnote_insn (p))
235218334Speter    for (link = REG_NOTES (p); link; link = next)
235318334Speter      {
235418334Speter	next = XEXP (link, 1);
235518334Speter
235618334Speter	if (REG_NOTE_KIND (link) != REG_DEAD
235718334Speter	    || GET_CODE (XEXP (link, 0)) != REG)
235818334Speter	  continue;
235918334Speter
236018334Speter	if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
236118334Speter	  {
236218334Speter	    /* Move the REG_DEAD note from P to INSN.  */
236318334Speter	    remove_note (p, link);
236418334Speter	    XEXP (link, 1) = REG_NOTES (insn);
236518334Speter	    REG_NOTES (insn) = link;
236618334Speter	  }
236718334Speter      }
236818334Speter}
236918334Speter
237050397Sobrien/* Called when an insn redundant with start_insn is deleted.  If there
237150397Sobrien   is a REG_DEAD note for the target of start_insn between start_insn
237250397Sobrien   and stop_insn, then the REG_DEAD note needs to be deleted since the
237350397Sobrien   value no longer dies there.
237450397Sobrien
237550397Sobrien   If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
237650397Sobrien   confused into thinking the register is dead.  */
237750397Sobrien
237850397Sobrienstatic void
237950397Sobrienfix_reg_dead_note (start_insn, stop_insn)
238050397Sobrien     rtx start_insn, stop_insn;
238150397Sobrien{
238250397Sobrien  rtx p, link, next;
238350397Sobrien
238450397Sobrien  for (p = next_nonnote_insn (start_insn); p != stop_insn;
238550397Sobrien       p = next_nonnote_insn (p))
238650397Sobrien    for (link = REG_NOTES (p); link; link = next)
238750397Sobrien      {
238850397Sobrien	next = XEXP (link, 1);
238950397Sobrien
239050397Sobrien	if (REG_NOTE_KIND (link) != REG_DEAD
239150397Sobrien	    || GET_CODE (XEXP (link, 0)) != REG)
239250397Sobrien	  continue;
239350397Sobrien
239450397Sobrien	if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
239550397Sobrien	  {
239650397Sobrien	    remove_note (p, link);
239750397Sobrien	    return;
239850397Sobrien	  }
239950397Sobrien      }
240050397Sobrien}
240150397Sobrien
240218334Speter/* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
240318334Speter
240418334Speter   This handles the case of udivmodXi4 instructions which optimize their
240518334Speter   output depending on whether any REG_UNUSED notes are present.
240618334Speter   we must make sure that INSN calculates as many results as REDUNDANT_INSN
240718334Speter   does.  */
240818334Speter
240918334Speterstatic void
241018334Speterupdate_reg_unused_notes (insn, redundant_insn)
241118334Speter     rtx insn, redundant_insn;
241218334Speter{
241350397Sobrien  rtx link, next;
241418334Speter
241518334Speter  for (link = REG_NOTES (insn); link; link = next)
241618334Speter    {
241718334Speter      next = XEXP (link, 1);
241818334Speter
241918334Speter      if (REG_NOTE_KIND (link) != REG_UNUSED
242018334Speter	  || GET_CODE (XEXP (link, 0)) != REG)
242118334Speter	continue;
242218334Speter
242318334Speter      if (! find_regno_note (redundant_insn, REG_UNUSED,
242418334Speter			     REGNO (XEXP (link, 0))))
242518334Speter	remove_note (insn, link);
242618334Speter    }
242718334Speter}
242818334Speter
242918334Speter/* Marks registers possibly live at the current place being scanned by
243018334Speter   mark_target_live_regs.  Used only by next two function.    */
243118334Speter
243218334Speterstatic HARD_REG_SET current_live_regs;
243318334Speter
243418334Speter/* Marks registers for which we have seen a REG_DEAD note but no assignment.
243518334Speter   Also only used by the next two functions.  */
243618334Speter
243718334Speterstatic HARD_REG_SET pending_dead_regs;
243818334Speter
243918334Speter/* Utility function called from mark_target_live_regs via note_stores.
244018334Speter   It deadens any CLOBBERed registers and livens any SET registers.  */
244118334Speter
244218334Speterstatic void
244318334Speterupdate_live_status (dest, x)
244418334Speter     rtx dest;
244518334Speter     rtx x;
244618334Speter{
244718334Speter  int first_regno, last_regno;
244818334Speter  int i;
244918334Speter
245018334Speter  if (GET_CODE (dest) != REG
245118334Speter      && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG))
245218334Speter    return;
245318334Speter
245418334Speter  if (GET_CODE (dest) == SUBREG)
245518334Speter    first_regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
245618334Speter  else
245718334Speter    first_regno = REGNO (dest);
245818334Speter
245918334Speter  last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest));
246018334Speter
246118334Speter  if (GET_CODE (x) == CLOBBER)
246218334Speter    for (i = first_regno; i < last_regno; i++)
246318334Speter      CLEAR_HARD_REG_BIT (current_live_regs, i);
246418334Speter  else
246518334Speter    for (i = first_regno; i < last_regno; i++)
246618334Speter      {
246718334Speter	SET_HARD_REG_BIT (current_live_regs, i);
246818334Speter	CLEAR_HARD_REG_BIT (pending_dead_regs, i);
246918334Speter      }
247018334Speter}
247118334Speter
247218334Speter/* Similar to next_insn, but ignores insns in the delay slots of
247318334Speter   an annulled branch.  */
247418334Speter
247518334Speterstatic rtx
247618334Speternext_insn_no_annul (insn)
247718334Speter     rtx insn;
247818334Speter{
247918334Speter  if (insn)
248018334Speter    {
248118334Speter      /* If INSN is an annulled branch, skip any insns from the target
248218334Speter	 of the branch.  */
248318334Speter      if (INSN_ANNULLED_BRANCH_P (insn)
248418334Speter	  && NEXT_INSN (PREV_INSN (insn)) != insn)
248518334Speter	while (INSN_FROM_TARGET_P (NEXT_INSN (insn)))
248618334Speter	  insn = NEXT_INSN (insn);
248718334Speter
248818334Speter      insn = NEXT_INSN (insn);
248918334Speter      if (insn && GET_CODE (insn) == INSN
249018334Speter	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
249118334Speter	insn = XVECEXP (PATTERN (insn), 0, 0);
249218334Speter    }
249318334Speter
249418334Speter  return insn;
249518334Speter}
249618334Speter
249750397Sobrien/* A subroutine of mark_target_live_regs.  Search forward from TARGET
249850397Sobrien   looking for registers that are set before they are used.  These are dead.
249950397Sobrien   Stop after passing a few conditional jumps, and/or a small
250050397Sobrien   number of unconditional branches.  */
250150397Sobrien
250250397Sobrienstatic rtx
250350397Sobrienfind_dead_or_set_registers (target, res, jump_target, jump_count, set, needed)
250450397Sobrien     rtx target;
250550397Sobrien     struct resources *res;
250650397Sobrien     rtx *jump_target;
250750397Sobrien     int jump_count;
250850397Sobrien     struct resources set, needed;
250950397Sobrien{
251050397Sobrien  HARD_REG_SET scratch;
251150397Sobrien  rtx insn, next;
251250397Sobrien  rtx jump_insn = 0;
251350397Sobrien  int i;
251450397Sobrien
251550397Sobrien  for (insn = target; insn; insn = next)
251650397Sobrien    {
251750397Sobrien      rtx this_jump_insn = insn;
251850397Sobrien
251950397Sobrien      next = NEXT_INSN (insn);
252050397Sobrien      switch (GET_CODE (insn))
252150397Sobrien	{
252250397Sobrien	case CODE_LABEL:
252350397Sobrien	  /* After a label, any pending dead registers that weren't yet
252450397Sobrien	     used can be made dead.  */
252550397Sobrien	  AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
252650397Sobrien	  AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
252750397Sobrien	  CLEAR_HARD_REG_SET (pending_dead_regs);
252850397Sobrien
252950397Sobrien	  if (CODE_LABEL_NUMBER (insn) < max_label_num_after_reload)
253050397Sobrien	    {
253150397Sobrien	      /* All spill registers are dead at a label, so kill all of the
253250397Sobrien		 ones that aren't needed also.  */
253350397Sobrien	      COPY_HARD_REG_SET (scratch, used_spill_regs);
253450397Sobrien	      AND_COMPL_HARD_REG_SET (scratch, needed.regs);
253550397Sobrien	      AND_COMPL_HARD_REG_SET (res->regs, scratch);
253650397Sobrien	    }
253750397Sobrien	  continue;
253850397Sobrien
253950397Sobrien	case BARRIER:
254050397Sobrien	case NOTE:
254150397Sobrien	  continue;
254250397Sobrien
254350397Sobrien	case INSN:
254450397Sobrien	  if (GET_CODE (PATTERN (insn)) == USE)
254550397Sobrien	    {
254650397Sobrien	      /* If INSN is a USE made by update_block, we care about the
254750397Sobrien		 underlying insn.  Any registers set by the underlying insn
254850397Sobrien		 are live since the insn is being done somewhere else.  */
254950397Sobrien	      if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
255050397Sobrien		mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, 1);
255150397Sobrien
255250397Sobrien	      /* All other USE insns are to be ignored.  */
255350397Sobrien	      continue;
255450397Sobrien	    }
255550397Sobrien	  else if (GET_CODE (PATTERN (insn)) == CLOBBER)
255650397Sobrien	    continue;
255750397Sobrien	  else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
255850397Sobrien	    {
255950397Sobrien	      /* An unconditional jump can be used to fill the delay slot
256050397Sobrien		 of a call, so search for a JUMP_INSN in any position.  */
256150397Sobrien	      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
256250397Sobrien		{
256350397Sobrien		  this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
256450397Sobrien		  if (GET_CODE (this_jump_insn) == JUMP_INSN)
256550397Sobrien		    break;
256650397Sobrien		}
256750397Sobrien	    }
256850397Sobrien
256950397Sobrien	default:
257050397Sobrien	  break;
257150397Sobrien	}
257250397Sobrien
257350397Sobrien      if (GET_CODE (this_jump_insn) == JUMP_INSN)
257450397Sobrien	{
257550397Sobrien	  if (jump_count++ < 10)
257650397Sobrien	    {
257750397Sobrien	      if (simplejump_p (this_jump_insn)
257850397Sobrien		  || GET_CODE (PATTERN (this_jump_insn)) == RETURN)
257950397Sobrien		{
258050397Sobrien		  next = JUMP_LABEL (this_jump_insn);
258150397Sobrien		  if (jump_insn == 0)
258250397Sobrien		    {
258350397Sobrien		      jump_insn = insn;
258450397Sobrien		      if (jump_target)
258550397Sobrien			*jump_target = JUMP_LABEL (this_jump_insn);
258650397Sobrien		    }
258750397Sobrien		}
258850397Sobrien	      else if (condjump_p (this_jump_insn)
258950397Sobrien		       || condjump_in_parallel_p (this_jump_insn))
259050397Sobrien		{
259150397Sobrien		  struct resources target_set, target_res;
259250397Sobrien		  struct resources fallthrough_res;
259350397Sobrien
259450397Sobrien		  /* We can handle conditional branches here by following
259550397Sobrien		     both paths, and then IOR the results of the two paths
259650397Sobrien		     together, which will give us registers that are dead
259750397Sobrien		     on both paths.  Since this is expensive, we give it
259850397Sobrien		     a much higher cost than unconditional branches.  The
259950397Sobrien		     cost was chosen so that we will follow at most 1
260050397Sobrien		     conditional branch.  */
260150397Sobrien
260250397Sobrien		  jump_count += 4;
260350397Sobrien		  if (jump_count >= 10)
260450397Sobrien		    break;
260550397Sobrien
260650397Sobrien		  mark_referenced_resources (insn, &needed, 1);
260750397Sobrien
260850397Sobrien		  /* For an annulled branch, mark_set_resources ignores slots
260950397Sobrien		     filled by instructions from the target.  This is correct
261050397Sobrien		     if the branch is not taken.  Since we are following both
261150397Sobrien		     paths from the branch, we must also compute correct info
261250397Sobrien		     if the branch is taken.  We do this by inverting all of
261350397Sobrien		     the INSN_FROM_TARGET_P bits, calling mark_set_resources,
261450397Sobrien		     and then inverting the INSN_FROM_TARGET_P bits again.  */
261550397Sobrien
261650397Sobrien		  if (GET_CODE (PATTERN (insn)) == SEQUENCE
261750397Sobrien		      && INSN_ANNULLED_BRANCH_P (this_jump_insn))
261850397Sobrien		    {
261950397Sobrien		      for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
262050397Sobrien			INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
262150397Sobrien			  = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
262250397Sobrien
262350397Sobrien		      target_set = set;
262450397Sobrien		      mark_set_resources (insn, &target_set, 0, 1);
262550397Sobrien
262650397Sobrien		      for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
262750397Sobrien			INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
262850397Sobrien			  = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
262950397Sobrien
263050397Sobrien		      mark_set_resources (insn, &set, 0, 1);
263150397Sobrien		    }
263250397Sobrien		  else
263350397Sobrien		    {
263450397Sobrien		      mark_set_resources (insn, &set, 0, 1);
263550397Sobrien		      target_set = set;
263650397Sobrien		    }
263750397Sobrien
263850397Sobrien		  target_res = *res;
263950397Sobrien		  COPY_HARD_REG_SET (scratch, target_set.regs);
264050397Sobrien		  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
264150397Sobrien		  AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
264250397Sobrien
264350397Sobrien		  fallthrough_res = *res;
264450397Sobrien		  COPY_HARD_REG_SET (scratch, set.regs);
264550397Sobrien		  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
264650397Sobrien		  AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
264750397Sobrien
264850397Sobrien		  find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
264950397Sobrien					      &target_res, 0, jump_count,
265050397Sobrien					      target_set, needed);
265150397Sobrien		  find_dead_or_set_registers (next,
265250397Sobrien					      &fallthrough_res, 0, jump_count,
265350397Sobrien					      set, needed);
265450397Sobrien		  IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
265550397Sobrien		  AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
265650397Sobrien		  break;
265750397Sobrien		}
265850397Sobrien	      else
265950397Sobrien		break;
266050397Sobrien	    }
266150397Sobrien	  else
266250397Sobrien	    {
266350397Sobrien	      /* Don't try this optimization if we expired our jump count
266450397Sobrien		 above, since that would mean there may be an infinite loop
266550397Sobrien		 in the function being compiled.  */
266650397Sobrien	      jump_insn = 0;
266750397Sobrien	      break;
266850397Sobrien	    }
266950397Sobrien	}
267050397Sobrien
267150397Sobrien      mark_referenced_resources (insn, &needed, 1);
267250397Sobrien      mark_set_resources (insn, &set, 0, 1);
267350397Sobrien
267450397Sobrien      COPY_HARD_REG_SET (scratch, set.regs);
267550397Sobrien      AND_COMPL_HARD_REG_SET (scratch, needed.regs);
267650397Sobrien      AND_COMPL_HARD_REG_SET (res->regs, scratch);
267750397Sobrien    }
267850397Sobrien
267950397Sobrien  return jump_insn;
268050397Sobrien}
268150397Sobrien
268218334Speter/* Set the resources that are live at TARGET.
268318334Speter
268418334Speter   If TARGET is zero, we refer to the end of the current function and can
268518334Speter   return our precomputed value.
268618334Speter
268718334Speter   Otherwise, we try to find out what is live by consulting the basic block
268818334Speter   information.  This is tricky, because we must consider the actions of
268918334Speter   reload and jump optimization, which occur after the basic block information
269018334Speter   has been computed.
269118334Speter
269218334Speter   Accordingly, we proceed as follows::
269318334Speter
269418334Speter   We find the previous BARRIER and look at all immediately following labels
269518334Speter   (with no intervening active insns) to see if any of them start a basic
269618334Speter   block.  If we hit the start of the function first, we use block 0.
269718334Speter
269818334Speter   Once we have found a basic block and a corresponding first insns, we can
269918334Speter   accurately compute the live status from basic_block_live_regs and
270018334Speter   reg_renumber.  (By starting at a label following a BARRIER, we are immune
270118334Speter   to actions taken by reload and jump.)  Then we scan all insns between
270218334Speter   that point and our target.  For each CLOBBER (or for call-clobbered regs
270318334Speter   when we pass a CALL_INSN), mark the appropriate registers are dead.  For
270418334Speter   a SET, mark them as live.
270518334Speter
270618334Speter   We have to be careful when using REG_DEAD notes because they are not
270718334Speter   updated by such things as find_equiv_reg.  So keep track of registers
270818334Speter   marked as dead that haven't been assigned to, and mark them dead at the
270918334Speter   next CODE_LABEL since reload and jump won't propagate values across labels.
271018334Speter
271118334Speter   If we cannot find the start of a basic block (should be a very rare
271218334Speter   case, if it can happen at all), mark everything as potentially live.
271318334Speter
271418334Speter   Next, scan forward from TARGET looking for things set or clobbered
271518334Speter   before they are used.  These are not live.
271618334Speter
271718334Speter   Because we can be called many times on the same target, save our results
271818334Speter   in a hash table indexed by INSN_UID.  */
271918334Speter
272018334Speterstatic void
272118334Spetermark_target_live_regs (target, res)
272218334Speter     rtx target;
272318334Speter     struct resources *res;
272418334Speter{
272518334Speter  int b = -1;
272618334Speter  int i;
272718334Speter  struct target_info *tinfo;
272850397Sobrien  rtx insn;
272918334Speter  rtx jump_insn = 0;
273018334Speter  rtx jump_target;
273118334Speter  HARD_REG_SET scratch;
273218334Speter  struct resources set, needed;
273318334Speter
273418334Speter  /* Handle end of function.  */
273518334Speter  if (target == 0)
273618334Speter    {
273718334Speter      *res = end_of_function_needs;
273818334Speter      return;
273918334Speter    }
274018334Speter
274118334Speter  /* We have to assume memory is needed, but the CC isn't.  */
274218334Speter  res->memory = 1;
274318334Speter  res->volatil = res->unch_memory = 0;
274418334Speter  res->cc = 0;
274518334Speter
274618334Speter  /* See if we have computed this value already.  */
274718334Speter  for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
274818334Speter       tinfo; tinfo = tinfo->next)
274918334Speter    if (tinfo->uid == INSN_UID (target))
275018334Speter      break;
275118334Speter
275218334Speter  /* Start by getting the basic block number.  If we have saved information,
275318334Speter     we can get it from there unless the insn at the start of the basic block
275418334Speter     has been deleted.  */
275518334Speter  if (tinfo && tinfo->block != -1
275618334Speter      && ! INSN_DELETED_P (basic_block_head[tinfo->block]))
275718334Speter    b = tinfo->block;
275818334Speter
275918334Speter  if (b == -1)
276018334Speter    b = find_basic_block (target);
276118334Speter
276218334Speter  if (tinfo)
276318334Speter    {
276418334Speter      /* If the information is up-to-date, use it.  Otherwise, we will
276518334Speter	 update it below.  */
276618334Speter      if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
276718334Speter	{
276818334Speter	  COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
276918334Speter	  return;
277018334Speter	}
277118334Speter    }
277218334Speter  else
277318334Speter    {
277418334Speter      /* Allocate a place to put our results and chain it into the
277518334Speter	 hash table.  */
277618334Speter      tinfo = (struct target_info *) oballoc (sizeof (struct target_info));
277718334Speter      tinfo->uid = INSN_UID (target);
277818334Speter      tinfo->block = b;
277918334Speter      tinfo->next = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
278018334Speter      target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
278118334Speter    }
278218334Speter
278318334Speter  CLEAR_HARD_REG_SET (pending_dead_regs);
278418334Speter
278518334Speter  /* If we found a basic block, get the live registers from it and update
278618334Speter     them with anything set or killed between its start and the insn before
278718334Speter     TARGET.  Otherwise, we must assume everything is live.  */
278818334Speter  if (b != -1)
278918334Speter    {
279018334Speter      regset regs_live = basic_block_live_at_start[b];
279150397Sobrien      int j;
279218334Speter      int regno;
279318334Speter      rtx start_insn, stop_insn;
279418334Speter
279518334Speter      /* Compute hard regs live at start of block -- this is the real hard regs
279618334Speter	 marked live, plus live pseudo regs that have been renumbered to
279718334Speter	 hard regs.  */
279818334Speter
279950397Sobrien      REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
280018334Speter
280150397Sobrien      EXECUTE_IF_SET_IN_REG_SET
280250397Sobrien	(regs_live, FIRST_PSEUDO_REGISTER, i,
280350397Sobrien	 {
280450397Sobrien	   if ((regno = reg_renumber[i]) >= 0)
280550397Sobrien	     for (j = regno;
280650397Sobrien		  j < regno + HARD_REGNO_NREGS (regno,
280750397Sobrien						PSEUDO_REGNO_MODE (i));
280850397Sobrien		  j++)
280950397Sobrien	       SET_HARD_REG_BIT (current_live_regs, j);
281050397Sobrien	 });
281118334Speter
281218334Speter      /* Get starting and ending insn, handling the case where each might
281318334Speter	 be a SEQUENCE.  */
281418334Speter      start_insn = (b == 0 ? get_insns () : basic_block_head[b]);
281518334Speter      stop_insn = target;
281618334Speter
281718334Speter      if (GET_CODE (start_insn) == INSN
281818334Speter	  && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
281918334Speter	start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
282018334Speter
282118334Speter      if (GET_CODE (stop_insn) == INSN
282218334Speter	  && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
282318334Speter	stop_insn = next_insn (PREV_INSN (stop_insn));
282418334Speter
282518334Speter      for (insn = start_insn; insn != stop_insn;
282618334Speter	   insn = next_insn_no_annul (insn))
282718334Speter	{
282818334Speter	  rtx link;
282918334Speter	  rtx real_insn = insn;
283018334Speter
283118334Speter	  /* If this insn is from the target of a branch, it isn't going to
283218334Speter	     be used in the sequel.  If it is used in both cases, this
283318334Speter	     test will not be true.  */
283418334Speter	  if (INSN_FROM_TARGET_P (insn))
283518334Speter	    continue;
283618334Speter
283718334Speter	  /* If this insn is a USE made by update_block, we care about the
283818334Speter	     underlying insn.  */
283918334Speter	  if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
284018334Speter	      && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
284118334Speter	      real_insn = XEXP (PATTERN (insn), 0);
284218334Speter
284318334Speter	  if (GET_CODE (real_insn) == CALL_INSN)
284418334Speter	    {
284518334Speter	      /* CALL clobbers all call-used regs that aren't fixed except
284618334Speter		 sp, ap, and fp.  Do this before setting the result of the
284718334Speter		 call live.  */
284818334Speter	      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
284918334Speter		if (call_used_regs[i]
285018334Speter		    && i != STACK_POINTER_REGNUM && i != FRAME_POINTER_REGNUM
285118334Speter		    && i != ARG_POINTER_REGNUM
285218334Speter#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
285318334Speter		    && i != HARD_FRAME_POINTER_REGNUM
285418334Speter#endif
285518334Speter#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
285618334Speter		    && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
285718334Speter#endif
285818334Speter#ifdef PIC_OFFSET_TABLE_REGNUM
285918334Speter		    && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
286018334Speter#endif
286118334Speter		    )
286218334Speter		  CLEAR_HARD_REG_BIT (current_live_regs, i);
286318334Speter
286418334Speter	      /* A CALL_INSN sets any global register live, since it may
286518334Speter		 have been modified by the call.  */
286618334Speter	      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
286718334Speter		if (global_regs[i])
286818334Speter		  SET_HARD_REG_BIT (current_live_regs, i);
286918334Speter	    }
287018334Speter
287118334Speter	  /* Mark anything killed in an insn to be deadened at the next
287218334Speter	     label.  Ignore USE insns; the only REG_DEAD notes will be for
287318334Speter	     parameters.  But they might be early.  A CALL_INSN will usually
287418334Speter	     clobber registers used for parameters.  It isn't worth bothering
287518334Speter	     with the unlikely case when it won't.  */
287618334Speter	  if ((GET_CODE (real_insn) == INSN
287718334Speter	       && GET_CODE (PATTERN (real_insn)) != USE
287818334Speter	       && GET_CODE (PATTERN (real_insn)) != CLOBBER)
287918334Speter	      || GET_CODE (real_insn) == JUMP_INSN
288018334Speter	      || GET_CODE (real_insn) == CALL_INSN)
288118334Speter	    {
288218334Speter	      for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
288318334Speter		if (REG_NOTE_KIND (link) == REG_DEAD
288418334Speter		    && GET_CODE (XEXP (link, 0)) == REG
288518334Speter		    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
288618334Speter		  {
288718334Speter		    int first_regno = REGNO (XEXP (link, 0));
288818334Speter		    int last_regno
288918334Speter		      = (first_regno
289018334Speter			 + HARD_REGNO_NREGS (first_regno,
289118334Speter					     GET_MODE (XEXP (link, 0))));
289218334Speter
289318334Speter		    for (i = first_regno; i < last_regno; i++)
289418334Speter		      SET_HARD_REG_BIT (pending_dead_regs, i);
289518334Speter		  }
289618334Speter
289718334Speter	      note_stores (PATTERN (real_insn), update_live_status);
289818334Speter
289918334Speter	      /* If any registers were unused after this insn, kill them.
290018334Speter		 These notes will always be accurate.  */
290118334Speter	      for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
290218334Speter		if (REG_NOTE_KIND (link) == REG_UNUSED
290318334Speter		    && GET_CODE (XEXP (link, 0)) == REG
290418334Speter		    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
290518334Speter		  {
290618334Speter		    int first_regno = REGNO (XEXP (link, 0));
290718334Speter		    int last_regno
290818334Speter		      = (first_regno
290918334Speter			 + HARD_REGNO_NREGS (first_regno,
291018334Speter					     GET_MODE (XEXP (link, 0))));
291118334Speter
291218334Speter		    for (i = first_regno; i < last_regno; i++)
291318334Speter		      CLEAR_HARD_REG_BIT (current_live_regs, i);
291418334Speter		  }
291518334Speter	    }
291618334Speter
291718334Speter	  else if (GET_CODE (real_insn) == CODE_LABEL)
291818334Speter	    {
291918334Speter	      /* A label clobbers the pending dead registers since neither
292018334Speter		 reload nor jump will propagate a value across a label.  */
292118334Speter	      AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
292218334Speter	      CLEAR_HARD_REG_SET (pending_dead_regs);
292318334Speter	    }
292418334Speter
292518334Speter	  /* The beginning of the epilogue corresponds to the end of the
292618334Speter	     RTL chain when there are no epilogue insns.  Certain resources
292718334Speter	     are implicitly required at that point.  */
292818334Speter	  else if (GET_CODE (real_insn) == NOTE
292918334Speter 		   && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
293018334Speter	    IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
293118334Speter	}
293218334Speter
293318334Speter      COPY_HARD_REG_SET (res->regs, current_live_regs);
293418334Speter      tinfo->block = b;
293518334Speter      tinfo->bb_tick = bb_ticks[b];
293618334Speter    }
293718334Speter  else
293818334Speter    /* We didn't find the start of a basic block.  Assume everything
293918334Speter       in use.  This should happen only extremely rarely.  */
294018334Speter    SET_HARD_REG_SET (res->regs);
294118334Speter
294218334Speter  CLEAR_RESOURCE (&set);
294318334Speter  CLEAR_RESOURCE (&needed);
294418334Speter
294550397Sobrien  jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
294650397Sobrien					  set, needed);
294718334Speter
294818334Speter  /* If we hit an unconditional branch, we have another way of finding out
294918334Speter     what is live: we can see what is live at the branch target and include
295018334Speter     anything used but not set before the branch.  The only things that are
295150397Sobrien     live are those that are live using the above test and the test below.  */
295218334Speter
295350397Sobrien  if (jump_insn)
295418334Speter    {
295518334Speter      struct resources new_resources;
295618334Speter      rtx stop_insn = next_active_insn (jump_insn);
295718334Speter
295818334Speter      mark_target_live_regs (next_active_insn (jump_target), &new_resources);
295918334Speter      CLEAR_RESOURCE (&set);
296018334Speter      CLEAR_RESOURCE (&needed);
296118334Speter
296218334Speter      /* Include JUMP_INSN in the needed registers.  */
296318334Speter      for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
296418334Speter	{
296518334Speter	  mark_referenced_resources (insn, &needed, 1);
296618334Speter
296718334Speter	  COPY_HARD_REG_SET (scratch, needed.regs);
296818334Speter	  AND_COMPL_HARD_REG_SET (scratch, set.regs);
296918334Speter	  IOR_HARD_REG_SET (new_resources.regs, scratch);
297018334Speter
297118334Speter	  mark_set_resources (insn, &set, 0, 1);
297218334Speter	}
297318334Speter
297418334Speter      AND_HARD_REG_SET (res->regs, new_resources.regs);
297518334Speter    }
297618334Speter
297718334Speter  COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
297818334Speter}
297918334Speter
298018334Speter/* Scan a function looking for insns that need a delay slot and find insns to
298118334Speter   put into the delay slot.
298218334Speter
298318334Speter   NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
298418334Speter   as calls).  We do these first since we don't want jump insns (that are
298518334Speter   easier to fill) to get the only insns that could be used for non-jump insns.
298618334Speter   When it is zero, only try to fill JUMP_INSNs.
298718334Speter
298818334Speter   When slots are filled in this manner, the insns (including the
298918334Speter   delay_insn) are put together in a SEQUENCE rtx.  In this fashion,
299018334Speter   it is possible to tell whether a delay slot has really been filled
299118334Speter   or not.  `final' knows how to deal with this, by communicating
299218334Speter   through FINAL_SEQUENCE.  */
299318334Speter
299418334Speterstatic void
299550397Sobrienfill_simple_delay_slots (non_jumps_p)
299618334Speter     int non_jumps_p;
299718334Speter{
299818334Speter  register rtx insn, pat, trial, next_trial;
299950397Sobrien  register int i;
300018334Speter  int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
300118334Speter  struct resources needed, set;
300250397Sobrien  int slots_to_fill, slots_filled;
300318334Speter  rtx delay_list;
300418334Speter
300518334Speter  for (i = 0; i < num_unfilled_slots; i++)
300618334Speter    {
300718334Speter      int flags;
300818334Speter      /* Get the next insn to fill.  If it has already had any slots assigned,
300918334Speter	 we can't do anything with it.  Maybe we'll improve this later.  */
301018334Speter
301118334Speter      insn = unfilled_slots_base[i];
301218334Speter      if (insn == 0
301318334Speter	  || INSN_DELETED_P (insn)
301418334Speter	  || (GET_CODE (insn) == INSN
301518334Speter	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
301618334Speter	  || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
301718334Speter	  || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
301818334Speter	continue;
301918334Speter
302018334Speter      if (GET_CODE (insn) == JUMP_INSN)
302118334Speter	flags = get_jump_flags (insn, JUMP_LABEL (insn));
302218334Speter      else
302318334Speter	flags = get_jump_flags (insn, NULL_RTX);
302418334Speter      slots_to_fill = num_delay_slots (insn);
302550397Sobrien
302650397Sobrien      /* Some machine description have defined instructions to have
302750397Sobrien	 delay slots only in certain circumstances which may depend on
302850397Sobrien	 nearby insns (which change due to reorg's actions).
302950397Sobrien
303050397Sobrien	 For example, the PA port normally has delay slots for unconditional
303150397Sobrien	 jumps.
303250397Sobrien
303350397Sobrien	 However, the PA port claims such jumps do not have a delay slot
303450397Sobrien	 if they are immediate successors of certain CALL_INSNs.  This
303550397Sobrien	 allows the port to favor filling the delay slot of the call with
303650397Sobrien	 the unconditional jump.  */
303718334Speter      if (slots_to_fill == 0)
303850397Sobrien	continue;
303918334Speter
304018334Speter      /* This insn needs, or can use, some delay slots.  SLOTS_TO_FILL
304118334Speter	 says how many.  After initialization, first try optimizing
304218334Speter
304318334Speter	 call _foo		call _foo
304418334Speter	 nop			add %o7,.-L1,%o7
304518334Speter	 b,a L1
304618334Speter	 nop
304718334Speter
304818334Speter	 If this case applies, the delay slot of the call is filled with
304918334Speter	 the unconditional jump.  This is done first to avoid having the
305018334Speter	 delay slot of the call filled in the backward scan.  Also, since
305118334Speter	 the unconditional jump is likely to also have a delay slot, that
305218334Speter	 insn must exist when it is subsequently scanned.
305318334Speter
305418334Speter	 This is tried on each insn with delay slots as some machines
305518334Speter	 have insns which perform calls, but are not represented as
305618334Speter	 CALL_INSNs.  */
305718334Speter
305818334Speter      slots_filled = 0;
305918334Speter      delay_list = 0;
306018334Speter
306118334Speter      if ((trial = next_active_insn (insn))
306218334Speter	  && GET_CODE (trial) == JUMP_INSN
306318334Speter	  && simplejump_p (trial)
306418334Speter	  && eligible_for_delay (insn, slots_filled, trial, flags)
306518334Speter	  && no_labels_between_p (insn, trial))
306618334Speter	{
306718334Speter	  rtx *tmp;
306818334Speter	  slots_filled++;
306918334Speter	  delay_list = add_to_delay_list (trial, delay_list);
307018334Speter
307118334Speter	  /* TRIAL may have had its delay slot filled, then unfilled.  When
307218334Speter	     the delay slot is unfilled, TRIAL is placed back on the unfilled
307318334Speter	     slots obstack.  Unfortunately, it is placed on the end of the
307418334Speter	     obstack, not in its original location.  Therefore, we must search
307518334Speter	     from entry i + 1 to the end of the unfilled slots obstack to
307618334Speter	     try and find TRIAL.  */
307718334Speter	  tmp = &unfilled_slots_base[i + 1];
307818334Speter	  while (*tmp != trial && tmp != unfilled_slots_next)
307918334Speter	    tmp++;
308018334Speter
308118334Speter	  /* Remove the unconditional jump from consideration for delay slot
308218334Speter	     filling and unthread it.   */
308318334Speter	  if (*tmp == trial)
308418334Speter	    *tmp = 0;
308518334Speter	  {
308618334Speter	    rtx next = NEXT_INSN (trial);
308718334Speter	    rtx prev = PREV_INSN (trial);
308818334Speter	    if (prev)
308918334Speter	      NEXT_INSN (prev) = next;
309018334Speter	    if (next)
309118334Speter	      PREV_INSN (next) = prev;
309218334Speter	  }
309318334Speter	}
309418334Speter
309518334Speter      /* Now, scan backwards from the insn to search for a potential
309618334Speter	 delay-slot candidate.  Stop searching when a label or jump is hit.
309718334Speter
309818334Speter	 For each candidate, if it is to go into the delay slot (moved
309918334Speter	 forward in execution sequence), it must not need or set any resources
310018334Speter	 that were set by later insns and must not set any resources that
310118334Speter	 are needed for those insns.
310218334Speter
310318334Speter	 The delay slot insn itself sets resources unless it is a call
310418334Speter	 (in which case the called routine, not the insn itself, is doing
310518334Speter	 the setting).  */
310618334Speter
310718334Speter      if (slots_filled < slots_to_fill)
310818334Speter	{
310918334Speter	  CLEAR_RESOURCE (&needed);
311018334Speter	  CLEAR_RESOURCE (&set);
311118334Speter	  mark_set_resources (insn, &set, 0, 0);
311218334Speter	  mark_referenced_resources (insn, &needed, 0);
311318334Speter
311418334Speter	  for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
311518334Speter	       trial = next_trial)
311618334Speter	    {
311718334Speter	      next_trial = prev_nonnote_insn (trial);
311818334Speter
311918334Speter	      /* This must be an INSN or CALL_INSN.  */
312018334Speter	      pat = PATTERN (trial);
312118334Speter
312218334Speter	      /* USE and CLOBBER at this level was just for flow; ignore it.  */
312318334Speter	      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
312418334Speter		continue;
312518334Speter
312618334Speter	      /* Check for resource conflict first, to avoid unnecessary
312718334Speter		 splitting.  */
312818334Speter	      if (! insn_references_resource_p (trial, &set, 1)
312918334Speter		  && ! insn_sets_resource_p (trial, &set, 1)
313018334Speter		  && ! insn_sets_resource_p (trial, &needed, 1)
313118334Speter#ifdef HAVE_cc0
313218334Speter		  /* Can't separate set of cc0 from its use.  */
313318334Speter		  && ! (reg_mentioned_p (cc0_rtx, pat)
313418334Speter			&& ! sets_cc0_p (cc0_rtx, pat))
313518334Speter#endif
313618334Speter		  )
313718334Speter		{
313818334Speter		  trial = try_split (pat, trial, 1);
313918334Speter		  next_trial = prev_nonnote_insn (trial);
314018334Speter		  if (eligible_for_delay (insn, slots_filled, trial, flags))
314118334Speter		    {
314218334Speter		      /* In this case, we are searching backward, so if we
314318334Speter			 find insns to put on the delay list, we want
314418334Speter			 to put them at the head, rather than the
314518334Speter			 tail, of the list.  */
314618334Speter
314718334Speter		      update_reg_dead_notes (trial, insn);
314850397Sobrien		      delay_list = gen_rtx_INSN_LIST (VOIDmode,
314950397Sobrien						      trial, delay_list);
315018334Speter		      update_block (trial, trial);
315118334Speter		      delete_insn (trial);
315218334Speter		      if (slots_to_fill == ++slots_filled)
315318334Speter			break;
315418334Speter		      continue;
315518334Speter		    }
315618334Speter		}
315718334Speter
315818334Speter	      mark_set_resources (trial, &set, 0, 1);
315918334Speter	      mark_referenced_resources (trial, &needed, 1);
316018334Speter	    }
316118334Speter	}
316218334Speter
316318334Speter      /* If all needed slots haven't been filled, we come here.  */
316418334Speter
316518334Speter      /* Try to optimize case of jumping around a single insn.  */
316618334Speter#if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
316718334Speter      if (slots_filled != slots_to_fill
316818334Speter	  && delay_list == 0
316918334Speter	  && GET_CODE (insn) == JUMP_INSN
317018334Speter	  && (condjump_p (insn) || condjump_in_parallel_p (insn)))
317118334Speter	{
317218334Speter	  delay_list = optimize_skip (insn);
317318334Speter	  if (delay_list)
317418334Speter	    slots_filled += 1;
317518334Speter	}
317618334Speter#endif
317718334Speter
317818334Speter      /* Try to get insns from beyond the insn needing the delay slot.
317918334Speter	 These insns can neither set or reference resources set in insns being
318018334Speter	 skipped, cannot set resources in the insn being skipped, and, if this
318118334Speter	 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
318218334Speter	 call might not return).
318318334Speter
318418334Speter	 There used to be code which continued past the target label if
318518334Speter	 we saw all uses of the target label.  This code did not work,
318618334Speter	 because it failed to account for some instructions which were
318718334Speter	 both annulled and marked as from the target.  This can happen as a
318818334Speter	 result of optimize_skip.  Since this code was redundant with
318918334Speter	 fill_eager_delay_slots anyways, it was just deleted.  */
319018334Speter
319118334Speter      if (slots_filled != slots_to_fill
319218334Speter          && (GET_CODE (insn) != JUMP_INSN
319318334Speter	      || ((condjump_p (insn) || condjump_in_parallel_p (insn))
319418334Speter		   && ! simplejump_p (insn)
319518334Speter		   && JUMP_LABEL (insn) != 0)))
319618334Speter	{
319718334Speter	  rtx target = 0;
319818334Speter	  int maybe_never = 0;
319918334Speter	  struct resources needed_at_jump;
320018334Speter
320118334Speter	  CLEAR_RESOURCE (&needed);
320218334Speter	  CLEAR_RESOURCE (&set);
320318334Speter
320418334Speter	  if (GET_CODE (insn) == CALL_INSN)
320518334Speter	    {
320618334Speter	      mark_set_resources (insn, &set, 0, 1);
320718334Speter	      mark_referenced_resources (insn, &needed, 1);
320818334Speter	      maybe_never = 1;
320918334Speter	    }
321018334Speter	  else
321118334Speter	    {
321218334Speter	      mark_set_resources (insn, &set, 0, 1);
321318334Speter	      mark_referenced_resources (insn, &needed, 1);
321418334Speter	      if (GET_CODE (insn) == JUMP_INSN)
321518334Speter		target = JUMP_LABEL (insn);
321618334Speter	    }
321718334Speter
321818334Speter	  for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
321918334Speter	    {
322018334Speter	      rtx pat, trial_delay;
322118334Speter
322218334Speter	      next_trial = next_nonnote_insn (trial);
322318334Speter
322418334Speter	      if (GET_CODE (trial) == CODE_LABEL
322518334Speter		  || GET_CODE (trial) == BARRIER)
322618334Speter		break;
322718334Speter
322818334Speter	      /* We must have an INSN, JUMP_INSN, or CALL_INSN.  */
322918334Speter	      pat = PATTERN (trial);
323018334Speter
323118334Speter	      /* Stand-alone USE and CLOBBER are just for flow.  */
323218334Speter	      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
323318334Speter		continue;
323418334Speter
323518334Speter	      /* If this already has filled delay slots, get the insn needing
323618334Speter		 the delay slots.  */
323718334Speter	      if (GET_CODE (pat) == SEQUENCE)
323818334Speter		trial_delay = XVECEXP (pat, 0, 0);
323918334Speter	      else
324018334Speter		trial_delay = trial;
324118334Speter
324218334Speter	      /* If this is a jump insn to our target, indicate that we have
324318334Speter		 seen another jump to it.  If we aren't handling a conditional
324418334Speter		 jump, stop our search. Otherwise, compute the needs at its
324518334Speter		 target and add them to NEEDED.  */
324618334Speter	      if (GET_CODE (trial_delay) == JUMP_INSN)
324718334Speter		{
324818334Speter		  if (target == 0)
324918334Speter		    break;
325018334Speter		  else if (JUMP_LABEL (trial_delay) != target)
325118334Speter		    {
325218334Speter		      mark_target_live_regs
325318334Speter			(next_active_insn (JUMP_LABEL (trial_delay)),
325418334Speter			 &needed_at_jump);
325518334Speter		      needed.memory |= needed_at_jump.memory;
325618334Speter		      needed.unch_memory |= needed_at_jump.unch_memory;
325718334Speter		      IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
325818334Speter		    }
325918334Speter		}
326018334Speter
326118334Speter	      /* See if we have a resource problem before we try to
326218334Speter		 split.   */
326318334Speter	      if (target == 0
326418334Speter		  && GET_CODE (pat) != SEQUENCE
326518334Speter		  && ! insn_references_resource_p (trial, &set, 1)
326618334Speter		  && ! insn_sets_resource_p (trial, &set, 1)
326718334Speter		  && ! insn_sets_resource_p (trial, &needed, 1)
326818334Speter#ifdef HAVE_cc0
326918334Speter		  && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
327018334Speter#endif
327118334Speter		  && ! (maybe_never && may_trap_p (pat))
327218334Speter		  && (trial = try_split (pat, trial, 0))
327318334Speter		  && eligible_for_delay (insn, slots_filled, trial, flags))
327418334Speter		{
327518334Speter		  next_trial = next_nonnote_insn (trial);
327618334Speter		  delay_list = add_to_delay_list (trial, delay_list);
327718334Speter
327818334Speter#ifdef HAVE_cc0
327918334Speter		  if (reg_mentioned_p (cc0_rtx, pat))
328018334Speter		    link_cc0_insns (trial);
328118334Speter#endif
328218334Speter
328318334Speter		  delete_insn (trial);
328418334Speter		  if (slots_to_fill == ++slots_filled)
328518334Speter		    break;
328618334Speter		  continue;
328718334Speter		}
328818334Speter
328918334Speter	      mark_set_resources (trial, &set, 0, 1);
329018334Speter	      mark_referenced_resources (trial, &needed, 1);
329118334Speter
329218334Speter	      /* Ensure we don't put insns between the setting of cc and the
329318334Speter		 comparison by moving a setting of cc into an earlier delay
329418334Speter		 slot since these insns could clobber the condition code.  */
329518334Speter	      set.cc = 1;
329618334Speter
329718334Speter	      /* If this is a call or jump, we might not get here.  */
329818334Speter	      if (GET_CODE (trial_delay) == CALL_INSN
329918334Speter		  || GET_CODE (trial_delay) == JUMP_INSN)
330018334Speter		maybe_never = 1;
330118334Speter	    }
330218334Speter
330318334Speter	  /* If there are slots left to fill and our search was stopped by an
330418334Speter	     unconditional branch, try the insn at the branch target.  We can
330518334Speter	     redirect the branch if it works.
330618334Speter
330718334Speter	     Don't do this if the insn at the branch target is a branch.  */
330818334Speter	  if (slots_to_fill != slots_filled
330918334Speter	      && trial
331018334Speter	      && GET_CODE (trial) == JUMP_INSN
331118334Speter	      && simplejump_p (trial)
331218334Speter	      && (target == 0 || JUMP_LABEL (trial) == target)
331318334Speter	      && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
331418334Speter	      && ! (GET_CODE (next_trial) == INSN
331518334Speter		    && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
331618334Speter	      && GET_CODE (next_trial) != JUMP_INSN
331718334Speter	      && ! insn_references_resource_p (next_trial, &set, 1)
331818334Speter	      && ! insn_sets_resource_p (next_trial, &set, 1)
331918334Speter	      && ! insn_sets_resource_p (next_trial, &needed, 1)
332018334Speter#ifdef HAVE_cc0
332118334Speter	      && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
332218334Speter#endif
332318334Speter	      && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
332418334Speter	      && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
332518334Speter	      && eligible_for_delay (insn, slots_filled, next_trial, flags))
332618334Speter	    {
332718334Speter	      rtx new_label = next_active_insn (next_trial);
332818334Speter
332918334Speter	      if (new_label != 0)
333018334Speter		new_label = get_label_before (new_label);
333118334Speter	      else
333218334Speter		new_label = find_end_label ();
333318334Speter
333418334Speter	      delay_list
333518334Speter		= add_to_delay_list (copy_rtx (next_trial), delay_list);
333618334Speter	      slots_filled++;
333718334Speter	      reorg_redirect_jump (trial, new_label);
333818334Speter
333918334Speter	      /* If we merged because we both jumped to the same place,
334018334Speter		 redirect the original insn also.  */
334118334Speter	      if (target)
334218334Speter		reorg_redirect_jump (insn, new_label);
334318334Speter	    }
334418334Speter	}
334518334Speter
334650397Sobrien      /* If this is an unconditional jump, then try to get insns from the
334750397Sobrien	 target of the jump.  */
334850397Sobrien      if (GET_CODE (insn) == JUMP_INSN
334950397Sobrien	  && simplejump_p (insn)
335050397Sobrien	  && slots_filled != slots_to_fill)
335150397Sobrien	delay_list
335250397Sobrien	  = fill_slots_from_thread (insn, const_true_rtx,
335350397Sobrien				    next_active_insn (JUMP_LABEL (insn)),
335450397Sobrien				    NULL, 1, 1,
335550397Sobrien				    own_thread_p (JUMP_LABEL (insn),
335650397Sobrien						  JUMP_LABEL (insn), 0),
335750397Sobrien				    slots_to_fill, &slots_filled,
335850397Sobrien				    delay_list);
335950397Sobrien
336018334Speter      if (delay_list)
336118334Speter	unfilled_slots_base[i]
336250397Sobrien	  = emit_delay_sequence (insn, delay_list, slots_filled);
336318334Speter
336418334Speter      if (slots_to_fill == slots_filled)
336518334Speter	unfilled_slots_base[i] = 0;
336618334Speter
336718334Speter      note_delay_statistics (slots_filled, 0);
336818334Speter    }
336918334Speter
337018334Speter#ifdef DELAY_SLOTS_FOR_EPILOGUE
337118334Speter  /* See if the epilogue needs any delay slots.  Try to fill them if so.
337218334Speter     The only thing we can do is scan backwards from the end of the
337318334Speter     function.  If we did this in a previous pass, it is incorrect to do it
337418334Speter     again.  */
337518334Speter  if (current_function_epilogue_delay_list)
337618334Speter    return;
337718334Speter
337818334Speter  slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
337918334Speter  if (slots_to_fill == 0)
338018334Speter    return;
338118334Speter
338218334Speter  slots_filled = 0;
338318334Speter  CLEAR_RESOURCE (&set);
338418334Speter
338518334Speter  /* The frame pointer and stack pointer are needed at the beginning of
338618334Speter     the epilogue, so instructions setting them can not be put in the
338718334Speter     epilogue delay slot.  However, everything else needed at function
338818334Speter     end is safe, so we don't want to use end_of_function_needs here.  */
338918334Speter  CLEAR_RESOURCE (&needed);
339018334Speter  if (frame_pointer_needed)
339118334Speter    {
339218334Speter      SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
339318334Speter#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
339418334Speter      SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
339518334Speter#endif
339618334Speter#ifdef EXIT_IGNORE_STACK
339718334Speter      if (! EXIT_IGNORE_STACK)
339818334Speter#endif
339918334Speter	SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
340018334Speter    }
340118334Speter  else
340218334Speter    SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
340318334Speter
340450397Sobrien#ifdef EPILOGUE_USES
340550397Sobrien  for (i = 0; i <FIRST_PSEUDO_REGISTER; i++)
340650397Sobrien    {
340750397Sobrien      if (EPILOGUE_USES (i))
340850397Sobrien	SET_HARD_REG_BIT (needed.regs, i);
340950397Sobrien    }
341050397Sobrien#endif
341150397Sobrien
341218334Speter  for (trial = get_last_insn (); ! stop_search_p (trial, 1);
341318334Speter       trial = PREV_INSN (trial))
341418334Speter    {
341518334Speter      if (GET_CODE (trial) == NOTE)
341618334Speter	continue;
341718334Speter      pat = PATTERN (trial);
341818334Speter      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
341918334Speter	continue;
342018334Speter
342118334Speter      if (! insn_references_resource_p (trial, &set, 1)
342218334Speter	  && ! insn_sets_resource_p (trial, &needed, 1)
342318334Speter	  && ! insn_sets_resource_p (trial, &set, 1)
342418334Speter#ifdef HAVE_cc0
342518334Speter	  /* Don't want to mess with cc0 here.  */
342618334Speter	  && ! reg_mentioned_p (cc0_rtx, pat)
342718334Speter#endif
342818334Speter	  )
342918334Speter	{
343018334Speter	  trial = try_split (pat, trial, 1);
343118334Speter	  if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
343218334Speter	    {
343318334Speter	      /* Here as well we are searching backward, so put the
343418334Speter		 insns we find on the head of the list.  */
343518334Speter
343618334Speter	      current_function_epilogue_delay_list
343750397Sobrien		= gen_rtx_INSN_LIST (VOIDmode, trial,
343850397Sobrien				     current_function_epilogue_delay_list);
343918334Speter	      mark_referenced_resources (trial, &end_of_function_needs, 1);
344018334Speter	      update_block (trial, trial);
344118334Speter	      delete_insn (trial);
344218334Speter
344318334Speter	      /* Clear deleted bit so final.c will output the insn.  */
344418334Speter	      INSN_DELETED_P (trial) = 0;
344518334Speter
344618334Speter	      if (slots_to_fill == ++slots_filled)
344718334Speter		break;
344818334Speter	      continue;
344918334Speter	    }
345018334Speter	}
345118334Speter
345218334Speter      mark_set_resources (trial, &set, 0, 1);
345318334Speter      mark_referenced_resources (trial, &needed, 1);
345418334Speter    }
345518334Speter
345618334Speter  note_delay_statistics (slots_filled, 0);
345718334Speter#endif
345818334Speter}
345918334Speter
346018334Speter/* Try to find insns to place in delay slots.
346118334Speter
346218334Speter   INSN is the jump needing SLOTS_TO_FILL delay slots.  It tests CONDITION
346318334Speter   or is an unconditional branch if CONDITION is const_true_rtx.
346418334Speter   *PSLOTS_FILLED is updated with the number of slots that we have filled.
346518334Speter
346618334Speter   THREAD is a flow-of-control, either the insns to be executed if the
346718334Speter   branch is true or if the branch is false, THREAD_IF_TRUE says which.
346818334Speter
346918334Speter   OPPOSITE_THREAD is the thread in the opposite direction.  It is used
347018334Speter   to see if any potential delay slot insns set things needed there.
347118334Speter
347218334Speter   LIKELY is non-zero if it is extremely likely that the branch will be
347318334Speter   taken and THREAD_IF_TRUE is set.  This is used for the branch at the
347418334Speter   end of a loop back up to the top.
347518334Speter
347618334Speter   OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
347718334Speter   thread.  I.e., it is the fallthrough code of our jump or the target of the
347818334Speter   jump when we are the only jump going there.
347918334Speter
348018334Speter   If OWN_THREAD is false, it must be the "true" thread of a jump.  In that
348118334Speter   case, we can only take insns from the head of the thread for our delay
348218334Speter   slot.  We then adjust the jump to point after the insns we have taken.  */
348318334Speter
348418334Speterstatic rtx
348518334Speterfill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
348650397Sobrien			thread_if_true, own_thread,
348750397Sobrien			slots_to_fill, pslots_filled, delay_list)
348818334Speter     rtx insn;
348918334Speter     rtx condition;
349018334Speter     rtx thread, opposite_thread;
349118334Speter     int likely;
349218334Speter     int thread_if_true;
349350397Sobrien     int own_thread;
349418334Speter     int slots_to_fill, *pslots_filled;
349550397Sobrien     rtx delay_list;
349618334Speter{
349718334Speter  rtx new_thread;
349818334Speter  struct resources opposite_needed, set, needed;
349918334Speter  rtx trial;
350018334Speter  int lose = 0;
350118334Speter  int must_annul = 0;
350218334Speter  int flags;
350318334Speter
350418334Speter  /* Validate our arguments.  */
350518334Speter  if ((condition == const_true_rtx && ! thread_if_true)
350618334Speter      || (! own_thread && ! thread_if_true))
350718334Speter    abort ();
350818334Speter
350918334Speter  flags = get_jump_flags (insn, JUMP_LABEL (insn));
351018334Speter
351118334Speter  /* If our thread is the end of subroutine, we can't get any delay
351218334Speter     insns from that.  */
351318334Speter  if (thread == 0)
351450397Sobrien      return delay_list;
351518334Speter
351618334Speter  /* If this is an unconditional branch, nothing is needed at the
351718334Speter     opposite thread.  Otherwise, compute what is needed there.  */
351818334Speter  if (condition == const_true_rtx)
351918334Speter    CLEAR_RESOURCE (&opposite_needed);
352018334Speter  else
352118334Speter    mark_target_live_regs (opposite_thread, &opposite_needed);
352218334Speter
352318334Speter  /* If the insn at THREAD can be split, do it here to avoid having to
352418334Speter     update THREAD and NEW_THREAD if it is done in the loop below.  Also
352518334Speter     initialize NEW_THREAD.  */
352618334Speter
352718334Speter  new_thread = thread = try_split (PATTERN (thread), thread, 0);
352818334Speter
352918334Speter  /* Scan insns at THREAD.  We are looking for an insn that can be removed
353018334Speter     from THREAD (it neither sets nor references resources that were set
353118334Speter     ahead of it and it doesn't set anything needs by the insns ahead of
353218334Speter     it) and that either can be placed in an annulling insn or aren't
353318334Speter     needed at OPPOSITE_THREAD.  */
353418334Speter
353518334Speter  CLEAR_RESOURCE (&needed);
353618334Speter  CLEAR_RESOURCE (&set);
353718334Speter
353818334Speter  /* If we do not own this thread, we must stop as soon as we find
353918334Speter     something that we can't put in a delay slot, since all we can do
354018334Speter     is branch into THREAD at a later point.  Therefore, labels stop
354118334Speter     the search if this is not the `true' thread.  */
354218334Speter
354318334Speter  for (trial = thread;
354418334Speter       ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
354518334Speter       trial = next_nonnote_insn (trial))
354618334Speter    {
354718334Speter      rtx pat, old_trial;
354818334Speter
354918334Speter      /* If we have passed a label, we no longer own this thread.  */
355018334Speter      if (GET_CODE (trial) == CODE_LABEL)
355118334Speter	{
355218334Speter	  own_thread = 0;
355318334Speter	  continue;
355418334Speter	}
355518334Speter
355618334Speter      pat = PATTERN (trial);
355718334Speter      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
355818334Speter	continue;
355918334Speter
356018334Speter      /* If TRIAL conflicts with the insns ahead of it, we lose.  Also,
356118334Speter	 don't separate or copy insns that set and use CC0.  */
356218334Speter      if (! insn_references_resource_p (trial, &set, 1)
356318334Speter	  && ! insn_sets_resource_p (trial, &set, 1)
356418334Speter	  && ! insn_sets_resource_p (trial, &needed, 1)
356518334Speter#ifdef HAVE_cc0
356618334Speter	  && ! (reg_mentioned_p (cc0_rtx, pat)
356718334Speter		&& (! own_thread || ! sets_cc0_p (pat)))
356818334Speter#endif
356918334Speter	  )
357018334Speter	{
357118334Speter	  rtx prior_insn;
357218334Speter
357318334Speter	  /* If TRIAL is redundant with some insn before INSN, we don't
357418334Speter	     actually need to add it to the delay list; we can merely pretend
357518334Speter	     we did.  */
357650397Sobrien	  if ((prior_insn = redundant_insn (trial, insn, delay_list)))
357718334Speter	    {
357850397Sobrien	      fix_reg_dead_note (prior_insn, insn);
357918334Speter	      if (own_thread)
358018334Speter		{
358118334Speter		  update_block (trial, thread);
358218334Speter		  if (trial == thread)
358318334Speter		    {
358418334Speter		      thread = next_active_insn (thread);
358518334Speter		      if (new_thread == trial)
358618334Speter			new_thread = thread;
358718334Speter		    }
358818334Speter
358918334Speter		  delete_insn (trial);
359018334Speter		}
359118334Speter	      else
359218334Speter		{
359318334Speter		  update_reg_unused_notes (prior_insn, trial);
359418334Speter		  new_thread = next_active_insn (trial);
359518334Speter		}
359618334Speter
359718334Speter	      continue;
359818334Speter	    }
359918334Speter
360018334Speter	  /* There are two ways we can win:  If TRIAL doesn't set anything
360118334Speter	     needed at the opposite thread and can't trap, or if it can
360218334Speter	     go into an annulled delay slot.  */
360318334Speter	  if (condition == const_true_rtx
360418334Speter	      || (! insn_sets_resource_p (trial, &opposite_needed, 1)
360518334Speter		  && ! may_trap_p (pat)))
360618334Speter	    {
360718334Speter	      old_trial = trial;
360818334Speter	      trial = try_split (pat, trial, 0);
360918334Speter	      if (new_thread == old_trial)
361018334Speter		new_thread = trial;
361118334Speter	      if (thread == old_trial)
361218334Speter		thread = trial;
361318334Speter	      pat = PATTERN (trial);
361418334Speter	      if (eligible_for_delay (insn, *pslots_filled, trial, flags))
361518334Speter		goto winner;
361618334Speter	    }
361718334Speter	  else if (0
361818334Speter#ifdef ANNUL_IFTRUE_SLOTS
361918334Speter		   || ! thread_if_true
362018334Speter#endif
362118334Speter#ifdef ANNUL_IFFALSE_SLOTS
362218334Speter		   || thread_if_true
362318334Speter#endif
362418334Speter		   )
362518334Speter	    {
362618334Speter	      old_trial = trial;
362718334Speter	      trial = try_split (pat, trial, 0);
362818334Speter	      if (new_thread == old_trial)
362918334Speter		new_thread = trial;
363018334Speter	      if (thread == old_trial)
363118334Speter		thread = trial;
363218334Speter	      pat = PATTERN (trial);
363318334Speter	      if ((thread_if_true
363418334Speter		   ? eligible_for_annul_false (insn, *pslots_filled, trial, flags)
363518334Speter		   : eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
363618334Speter		{
363718334Speter		  rtx temp;
363818334Speter
363918334Speter		  must_annul = 1;
364018334Speter		winner:
364118334Speter
364218334Speter#ifdef HAVE_cc0
364318334Speter		  if (reg_mentioned_p (cc0_rtx, pat))
364418334Speter		    link_cc0_insns (trial);
364518334Speter#endif
364618334Speter
364718334Speter		  /* If we own this thread, delete the insn.  If this is the
364818334Speter		     destination of a branch, show that a basic block status
364918334Speter		     may have been updated.  In any case, mark the new
365018334Speter		     starting point of this thread.  */
365118334Speter		  if (own_thread)
365218334Speter		    {
365318334Speter		      update_block (trial, thread);
365450397Sobrien		      if (trial == thread)
365550397Sobrien			{
365650397Sobrien			  thread = next_active_insn (thread);
365750397Sobrien			  if (new_thread == trial)
365850397Sobrien			    new_thread = thread;
365950397Sobrien			}
366018334Speter		      delete_insn (trial);
366118334Speter		    }
366218334Speter		  else
366318334Speter		    new_thread = next_active_insn (trial);
366418334Speter
366518334Speter		  temp = own_thread ? trial : copy_rtx (trial);
366618334Speter		  if (thread_if_true)
366718334Speter		    INSN_FROM_TARGET_P (temp) = 1;
366818334Speter
366918334Speter		  delay_list = add_to_delay_list (temp, delay_list);
367018334Speter
367150397Sobrien		  mark_set_resources (trial, &opposite_needed, 0, 1);
367250397Sobrien
367318334Speter		  if (slots_to_fill == ++(*pslots_filled))
367418334Speter		    {
367518334Speter		      /* Even though we have filled all the slots, we
367618334Speter			 may be branching to a location that has a
367718334Speter			 redundant insn.  Skip any if so.  */
367818334Speter		      while (new_thread && ! own_thread
367918334Speter			     && ! insn_sets_resource_p (new_thread, &set, 1)
368018334Speter			     && ! insn_sets_resource_p (new_thread, &needed, 1)
368118334Speter			     && ! insn_references_resource_p (new_thread,
368218334Speter							      &set, 1)
368350397Sobrien			     && (prior_insn
368450397Sobrien				 = redundant_insn (new_thread, insn,
368550397Sobrien						   delay_list)))
368650397Sobrien			{
368750397Sobrien			  /* We know we do not own the thread, so no need
368850397Sobrien			     to call update_block and delete_insn.  */
368950397Sobrien			  fix_reg_dead_note (prior_insn, insn);
369050397Sobrien			  update_reg_unused_notes (prior_insn, new_thread);
369150397Sobrien			  new_thread = next_active_insn (new_thread);
369250397Sobrien			}
369318334Speter		      break;
369418334Speter		    }
369518334Speter
369618334Speter		  continue;
369718334Speter		}
369818334Speter	    }
369918334Speter	}
370018334Speter
370118334Speter      /* This insn can't go into a delay slot.  */
370218334Speter      lose = 1;
370318334Speter      mark_set_resources (trial, &set, 0, 1);
370418334Speter      mark_referenced_resources (trial, &needed, 1);
370518334Speter
370618334Speter      /* Ensure we don't put insns between the setting of cc and the comparison
370718334Speter	 by moving a setting of cc into an earlier delay slot since these insns
370818334Speter	 could clobber the condition code.  */
370918334Speter      set.cc = 1;
371018334Speter
371118334Speter      /* If this insn is a register-register copy and the next insn has
371218334Speter	 a use of our destination, change it to use our source.  That way,
371318334Speter	 it will become a candidate for our delay slot the next time
371418334Speter	 through this loop.  This case occurs commonly in loops that
371518334Speter	 scan a list.
371618334Speter
371718334Speter	 We could check for more complex cases than those tested below,
371818334Speter	 but it doesn't seem worth it.  It might also be a good idea to try
371918334Speter	 to swap the two insns.  That might do better.
372018334Speter
372118334Speter	 We can't do this if the next insn modifies our destination, because
372218334Speter	 that would make the replacement into the insn invalid.  We also can't
372318334Speter	 do this if it modifies our source, because it might be an earlyclobber
372418334Speter	 operand.  This latter test also prevents updating the contents of
372518334Speter	 a PRE_INC.  */
372618334Speter
372718334Speter      if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
372818334Speter	  && GET_CODE (SET_SRC (pat)) == REG
372918334Speter	  && GET_CODE (SET_DEST (pat)) == REG)
373018334Speter	{
373118334Speter	  rtx next = next_nonnote_insn (trial);
373218334Speter
373318334Speter	  if (next && GET_CODE (next) == INSN
373418334Speter	      && GET_CODE (PATTERN (next)) != USE
373518334Speter	      && ! reg_set_p (SET_DEST (pat), next)
373618334Speter	      && ! reg_set_p (SET_SRC (pat), next)
373718334Speter	      && reg_referenced_p (SET_DEST (pat), PATTERN (next)))
373818334Speter	    validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
373918334Speter	}
374018334Speter    }
374118334Speter
374218334Speter  /* If we stopped on a branch insn that has delay slots, see if we can
374318334Speter     steal some of the insns in those slots.  */
374418334Speter  if (trial && GET_CODE (trial) == INSN
374518334Speter      && GET_CODE (PATTERN (trial)) == SEQUENCE
374618334Speter      && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
374718334Speter    {
374818334Speter      /* If this is the `true' thread, we will want to follow the jump,
374918334Speter	 so we can only do this if we have taken everything up to here.  */
375050397Sobrien      if (thread_if_true && trial == new_thread
375150397Sobrien	  && ! insn_references_resource_p (XVECEXP (PATTERN (trial), 0, 0),
375250397Sobrien					   &opposite_needed, 0))
375318334Speter	delay_list
375418334Speter	  = steal_delay_list_from_target (insn, condition, PATTERN (trial),
375518334Speter					  delay_list, &set, &needed,
375618334Speter					  &opposite_needed, slots_to_fill,
375718334Speter					  pslots_filled, &must_annul,
375818334Speter					  &new_thread);
375918334Speter      else if (! thread_if_true)
376018334Speter	delay_list
376118334Speter	  = steal_delay_list_from_fallthrough (insn, condition,
376218334Speter					       PATTERN (trial),
376318334Speter					       delay_list, &set, &needed,
376418334Speter					       &opposite_needed, slots_to_fill,
376518334Speter					       pslots_filled, &must_annul);
376618334Speter    }
376718334Speter
376818334Speter  /* If we haven't found anything for this delay slot and it is very
376918334Speter     likely that the branch will be taken, see if the insn at our target
377018334Speter     increments or decrements a register with an increment that does not
377118334Speter     depend on the destination register.  If so, try to place the opposite
377218334Speter     arithmetic insn after the jump insn and put the arithmetic insn in the
377318334Speter     delay slot.  If we can't do this, return.  */
377450397Sobrien  if (delay_list == 0 && likely && new_thread
377550397Sobrien      && GET_CODE (new_thread) == INSN
377650397Sobrien      && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
377750397Sobrien      && asm_noperands (PATTERN (new_thread)) < 0)
377818334Speter    {
377918334Speter      rtx pat = PATTERN (new_thread);
378018334Speter      rtx dest;
378118334Speter      rtx src;
378218334Speter
378318334Speter      trial = new_thread;
378418334Speter      pat = PATTERN (trial);
378518334Speter
378618334Speter      if (GET_CODE (trial) != INSN || GET_CODE (pat) != SET
378718334Speter	  || ! eligible_for_delay (insn, 0, trial, flags))
378818334Speter	return 0;
378918334Speter
379018334Speter      dest = SET_DEST (pat), src = SET_SRC (pat);
379118334Speter      if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
379218334Speter	  && rtx_equal_p (XEXP (src, 0), dest)
379318334Speter	  && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)))
379418334Speter	{
379518334Speter	  rtx other = XEXP (src, 1);
379618334Speter	  rtx new_arith;
379718334Speter	  rtx ninsn;
379818334Speter
379918334Speter	  /* If this is a constant adjustment, use the same code with
380018334Speter	     the negated constant.  Otherwise, reverse the sense of the
380118334Speter	     arithmetic.  */
380218334Speter	  if (GET_CODE (other) == CONST_INT)
380350397Sobrien	    new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
380450397Sobrien					negate_rtx (GET_MODE (src), other));
380518334Speter	  else
380650397Sobrien	    new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
380750397Sobrien					GET_MODE (src), dest, other);
380818334Speter
380950397Sobrien	  ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
381018334Speter				   insn);
381118334Speter
381218334Speter	  if (recog_memoized (ninsn) < 0
381318334Speter	      || (insn_extract (ninsn),
381418334Speter		  ! constrain_operands (INSN_CODE (ninsn), 1)))
381518334Speter	    {
381618334Speter	      delete_insn (ninsn);
381718334Speter	      return 0;
381818334Speter	    }
381918334Speter
382018334Speter	  if (own_thread)
382118334Speter	    {
382218334Speter	      update_block (trial, thread);
382350397Sobrien	      if (trial == thread)
382450397Sobrien		{
382550397Sobrien		  thread = next_active_insn (thread);
382650397Sobrien		  if (new_thread == trial)
382750397Sobrien		    new_thread = thread;
382850397Sobrien		}
382918334Speter	      delete_insn (trial);
383018334Speter	    }
383118334Speter	  else
383218334Speter	    new_thread = next_active_insn (trial);
383318334Speter
383418334Speter	  ninsn = own_thread ? trial : copy_rtx (trial);
383518334Speter	  if (thread_if_true)
383618334Speter	    INSN_FROM_TARGET_P (ninsn) = 1;
383718334Speter
383818334Speter	  delay_list = add_to_delay_list (ninsn, NULL_RTX);
383918334Speter	  (*pslots_filled)++;
384018334Speter	}
384118334Speter    }
384218334Speter
384318334Speter  if (delay_list && must_annul)
384418334Speter    INSN_ANNULLED_BRANCH_P (insn) = 1;
384518334Speter
384618334Speter  /* If we are to branch into the middle of this thread, find an appropriate
384718334Speter     label or make a new one if none, and redirect INSN to it.  If we hit the
384818334Speter     end of the function, use the end-of-function label.  */
384918334Speter  if (new_thread != thread)
385018334Speter    {
385118334Speter      rtx label;
385218334Speter
385318334Speter      if (! thread_if_true)
385418334Speter	abort ();
385518334Speter
385618334Speter      if (new_thread && GET_CODE (new_thread) == JUMP_INSN
385718334Speter	  && (simplejump_p (new_thread)
385818334Speter	      || GET_CODE (PATTERN (new_thread)) == RETURN)
385918334Speter	  && redirect_with_delay_list_safe_p (insn,
386018334Speter					      JUMP_LABEL (new_thread),
386118334Speter					      delay_list))
386218334Speter	new_thread = follow_jumps (JUMP_LABEL (new_thread));
386318334Speter
386418334Speter      if (new_thread == 0)
386518334Speter	label = find_end_label ();
386618334Speter      else if (GET_CODE (new_thread) == CODE_LABEL)
386718334Speter	label = new_thread;
386818334Speter      else
386918334Speter	label = get_label_before (new_thread);
387018334Speter
387118334Speter      reorg_redirect_jump (insn, label);
387218334Speter    }
387318334Speter
387418334Speter  return delay_list;
387518334Speter}
387618334Speter
387718334Speter/* Make another attempt to find insns to place in delay slots.
387818334Speter
387918334Speter   We previously looked for insns located in front of the delay insn
388018334Speter   and, for non-jump delay insns, located behind the delay insn.
388118334Speter
388218334Speter   Here only try to schedule jump insns and try to move insns from either
388318334Speter   the target or the following insns into the delay slot.  If annulling is
388418334Speter   supported, we will be likely to do this.  Otherwise, we can do this only
388518334Speter   if safe.  */
388618334Speter
388718334Speterstatic void
388850397Sobrienfill_eager_delay_slots ()
388918334Speter{
389018334Speter  register rtx insn;
389118334Speter  register int i;
389218334Speter  int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
389318334Speter
389418334Speter  for (i = 0; i < num_unfilled_slots; i++)
389518334Speter    {
389618334Speter      rtx condition;
389718334Speter      rtx target_label, insn_at_target, fallthrough_insn;
389818334Speter      rtx delay_list = 0;
389918334Speter      int own_target;
390018334Speter      int own_fallthrough;
390118334Speter      int prediction, slots_to_fill, slots_filled;
390218334Speter
390318334Speter      insn = unfilled_slots_base[i];
390418334Speter      if (insn == 0
390518334Speter	  || INSN_DELETED_P (insn)
390618334Speter	  || GET_CODE (insn) != JUMP_INSN
390718334Speter	  || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
390818334Speter	continue;
390918334Speter
391018334Speter      slots_to_fill = num_delay_slots (insn);
391150397Sobrien      /* Some machine description have defined instructions to have
391250397Sobrien	 delay slots only in certain circumstances which may depend on
391350397Sobrien	 nearby insns (which change due to reorg's actions).
391450397Sobrien
391550397Sobrien 	 For example, the PA port normally has delay slots for unconditional
391650397Sobrien	 jumps.
391750397Sobrien
391850397Sobrien	 However, the PA port claims such jumps do not have a delay slot
391950397Sobrien	 if they are immediate successors of certain CALL_INSNs.  This
392050397Sobrien	 allows the port to favor filling the delay slot of the call with
392150397Sobrien	 the unconditional jump.  */
392218334Speter      if (slots_to_fill == 0)
392350397Sobrien        continue;
392418334Speter
392518334Speter      slots_filled = 0;
392618334Speter      target_label = JUMP_LABEL (insn);
392718334Speter      condition = get_branch_condition (insn, target_label);
392818334Speter
392918334Speter      if (condition == 0)
393018334Speter	continue;
393118334Speter
393218334Speter      /* Get the next active fallthrough and target insns and see if we own
393318334Speter	 them.  Then see whether the branch is likely true.  We don't need
393418334Speter	 to do a lot of this for unconditional branches.  */
393518334Speter
393618334Speter      insn_at_target = next_active_insn (target_label);
393718334Speter      own_target = own_thread_p (target_label, target_label, 0);
393818334Speter
393918334Speter      if (condition == const_true_rtx)
394018334Speter	{
394118334Speter	  own_fallthrough = 0;
394218334Speter	  fallthrough_insn = 0;
394318334Speter	  prediction = 2;
394418334Speter	}
394518334Speter      else
394618334Speter	{
394718334Speter	  fallthrough_insn = next_active_insn (insn);
394818334Speter	  own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
394918334Speter	  prediction = mostly_true_jump (insn, condition);
395018334Speter	}
395118334Speter
395218334Speter      /* If this insn is expected to branch, first try to get insns from our
395318334Speter	 target, then our fallthrough insns.  If it is not, expected to branch,
395418334Speter	 try the other order.  */
395518334Speter
395618334Speter      if (prediction > 0)
395718334Speter	{
395818334Speter	  delay_list
395918334Speter	    = fill_slots_from_thread (insn, condition, insn_at_target,
396018334Speter				      fallthrough_insn, prediction == 2, 1,
396150397Sobrien				      own_target,
396250397Sobrien				      slots_to_fill, &slots_filled, delay_list);
396318334Speter
396418334Speter	  if (delay_list == 0 && own_fallthrough)
396518334Speter	    {
396618334Speter	      /* Even though we didn't find anything for delay slots,
396718334Speter		 we might have found a redundant insn which we deleted
396818334Speter		 from the thread that was filled.  So we have to recompute
396918334Speter		 the next insn at the target.  */
397018334Speter	      target_label = JUMP_LABEL (insn);
397118334Speter	      insn_at_target = next_active_insn (target_label);
397218334Speter
397318334Speter	      delay_list
397418334Speter		= fill_slots_from_thread (insn, condition, fallthrough_insn,
397518334Speter					  insn_at_target, 0, 0,
397650397Sobrien					  own_fallthrough,
397750397Sobrien					  slots_to_fill, &slots_filled,
397850397Sobrien					  delay_list);
397918334Speter	    }
398018334Speter	}
398118334Speter      else
398218334Speter	{
398318334Speter	  if (own_fallthrough)
398418334Speter	    delay_list
398518334Speter	      = fill_slots_from_thread (insn, condition, fallthrough_insn,
398618334Speter					insn_at_target, 0, 0,
398750397Sobrien					own_fallthrough,
398850397Sobrien					slots_to_fill, &slots_filled,
398950397Sobrien					delay_list);
399018334Speter
399118334Speter	  if (delay_list == 0)
399218334Speter	    delay_list
399318334Speter	      = fill_slots_from_thread (insn, condition, insn_at_target,
399418334Speter					next_active_insn (insn), 0, 1,
399550397Sobrien					own_target,
399650397Sobrien					slots_to_fill, &slots_filled,
399750397Sobrien					delay_list);
399818334Speter	}
399918334Speter
400018334Speter      if (delay_list)
400118334Speter	unfilled_slots_base[i]
400250397Sobrien	  = emit_delay_sequence (insn, delay_list, slots_filled);
400318334Speter
400418334Speter      if (slots_to_fill == slots_filled)
400518334Speter	unfilled_slots_base[i] = 0;
400618334Speter
400718334Speter      note_delay_statistics (slots_filled, 1);
400818334Speter    }
400918334Speter}
401018334Speter
401118334Speter/* Once we have tried two ways to fill a delay slot, make a pass over the
401218334Speter   code to try to improve the results and to do such things as more jump
401318334Speter   threading.  */
401418334Speter
401518334Speterstatic void
401618334Speterrelax_delay_slots (first)
401718334Speter     rtx first;
401818334Speter{
401918334Speter  register rtx insn, next, pat;
402018334Speter  register rtx trial, delay_insn, target_label;
402118334Speter
402218334Speter  /* Look at every JUMP_INSN and see if we can improve it.  */
402318334Speter  for (insn = first; insn; insn = next)
402418334Speter    {
402518334Speter      rtx other;
402618334Speter
402718334Speter      next = next_active_insn (insn);
402818334Speter
402918334Speter      /* If this is a jump insn, see if it now jumps to a jump, jumps to
403018334Speter	 the next insn, or jumps to a label that is not the last of a
403118334Speter	 group of consecutive labels.  */
403218334Speter      if (GET_CODE (insn) == JUMP_INSN
403318334Speter	  && (condjump_p (insn) || condjump_in_parallel_p (insn))
403418334Speter	  && (target_label = JUMP_LABEL (insn)) != 0)
403518334Speter	{
403618334Speter	  target_label = follow_jumps (target_label);
403718334Speter	  target_label = prev_label (next_active_insn (target_label));
403818334Speter
403918334Speter	  if (target_label == 0)
404018334Speter	    target_label = find_end_label ();
404118334Speter
404218334Speter	  if (next_active_insn (target_label) == next
404318334Speter	      && ! condjump_in_parallel_p (insn))
404418334Speter	    {
404518334Speter	      delete_jump (insn);
404618334Speter	      continue;
404718334Speter	    }
404818334Speter
404918334Speter	  if (target_label != JUMP_LABEL (insn))
405018334Speter	    reorg_redirect_jump (insn, target_label);
405118334Speter
405218334Speter	  /* See if this jump branches around a unconditional jump.
405318334Speter	     If so, invert this jump and point it to the target of the
405418334Speter	     second jump.  */
405518334Speter	  if (next && GET_CODE (next) == JUMP_INSN
405618334Speter	      && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
405718334Speter	      && next_active_insn (target_label) == next_active_insn (next)
405818334Speter	      && no_labels_between_p (insn, next))
405918334Speter	    {
406018334Speter	      rtx label = JUMP_LABEL (next);
406118334Speter
406218334Speter	      /* Be careful how we do this to avoid deleting code or
406318334Speter		 labels that are momentarily dead.  See similar optimization
406418334Speter		 in jump.c.
406518334Speter
406618334Speter		 We also need to ensure we properly handle the case when
406718334Speter		 invert_jump fails.  */
406818334Speter
406918334Speter	      ++LABEL_NUSES (target_label);
407018334Speter	      if (label)
407118334Speter		++LABEL_NUSES (label);
407218334Speter
407318334Speter	      if (invert_jump (insn, label))
407418334Speter		{
407518334Speter		  delete_insn (next);
407618334Speter		  next = insn;
407718334Speter		}
407818334Speter
407918334Speter	      if (label)
408018334Speter		--LABEL_NUSES (label);
408118334Speter
408218334Speter	      if (--LABEL_NUSES (target_label) == 0)
408318334Speter		delete_insn (target_label);
408418334Speter
408518334Speter	      continue;
408618334Speter	    }
408718334Speter	}
408818334Speter
408918334Speter      /* If this is an unconditional jump and the previous insn is a
409018334Speter	 conditional jump, try reversing the condition of the previous
409118334Speter	 insn and swapping our targets.  The next pass might be able to
409218334Speter	 fill the slots.
409318334Speter
409418334Speter	 Don't do this if we expect the conditional branch to be true, because
409518334Speter	 we would then be making the more common case longer.  */
409618334Speter
409718334Speter      if (GET_CODE (insn) == JUMP_INSN
409818334Speter	  && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
409918334Speter	  && (other = prev_active_insn (insn)) != 0
410018334Speter	  && (condjump_p (other) || condjump_in_parallel_p (other))
410118334Speter	  && no_labels_between_p (other, insn)
410218334Speter	  && 0 < mostly_true_jump (other,
410318334Speter				   get_branch_condition (other,
410418334Speter							 JUMP_LABEL (other))))
410518334Speter	{
410618334Speter	  rtx other_target = JUMP_LABEL (other);
410718334Speter	  target_label = JUMP_LABEL (insn);
410818334Speter
410918334Speter	  /* Increment the count of OTHER_TARGET, so it doesn't get deleted
411018334Speter	     as we move the label.  */
411118334Speter	  if (other_target)
411218334Speter	    ++LABEL_NUSES (other_target);
411318334Speter
411418334Speter	  if (invert_jump (other, target_label))
411518334Speter	    reorg_redirect_jump (insn, other_target);
411618334Speter
411718334Speter	  if (other_target)
411818334Speter	    --LABEL_NUSES (other_target);
411918334Speter	}
412018334Speter
412118334Speter      /* Now look only at cases where we have filled a delay slot.  */
412218334Speter      if (GET_CODE (insn) != INSN
412318334Speter	  || GET_CODE (PATTERN (insn)) != SEQUENCE)
412418334Speter	continue;
412518334Speter
412618334Speter      pat = PATTERN (insn);
412718334Speter      delay_insn = XVECEXP (pat, 0, 0);
412818334Speter
412918334Speter      /* See if the first insn in the delay slot is redundant with some
413018334Speter	 previous insn.  Remove it from the delay slot if so; then set up
413118334Speter	 to reprocess this insn.  */
413218334Speter      if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
413318334Speter	{
413418334Speter	  delete_from_delay_slot (XVECEXP (pat, 0, 1));
413518334Speter	  next = prev_active_insn (next);
413618334Speter	  continue;
413718334Speter	}
413818334Speter
413918334Speter      /* Now look only at the cases where we have a filled JUMP_INSN.  */
414018334Speter      if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
414118334Speter	  || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
414218334Speter		|| condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
414318334Speter	continue;
414418334Speter
414518334Speter      target_label = JUMP_LABEL (delay_insn);
414618334Speter
414718334Speter      if (target_label)
414818334Speter	{
414918334Speter	  /* If this jump goes to another unconditional jump, thread it, but
415018334Speter	     don't convert a jump into a RETURN here.  */
415118334Speter	  trial = follow_jumps (target_label);
415218334Speter	  /* We use next_real_insn instead of next_active_insn, so that
415318334Speter	     the special USE insns emitted by reorg won't be ignored.
415418334Speter	     If they are ignored, then they will get deleted if target_label
415518334Speter	     is now unreachable, and that would cause mark_target_live_regs
415618334Speter	     to fail.  */
415718334Speter	  trial = prev_label (next_real_insn (trial));
415818334Speter	  if (trial == 0 && target_label != 0)
415918334Speter	    trial = find_end_label ();
416018334Speter
416118334Speter	  if (trial != target_label
416218334Speter	      && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
416318334Speter	    {
416418334Speter	      reorg_redirect_jump (delay_insn, trial);
416518334Speter	      target_label = trial;
416618334Speter	    }
416718334Speter
416818334Speter	  /* If the first insn at TARGET_LABEL is redundant with a previous
416918334Speter	     insn, redirect the jump to the following insn process again.  */
417018334Speter	  trial = next_active_insn (target_label);
417118334Speter	  if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
417218334Speter	      && redundant_insn (trial, insn, 0))
417318334Speter	    {
417450397Sobrien	      rtx tmp;
417550397Sobrien
417650397Sobrien	      /* Figure out where to emit the special USE insn so we don't
417750397Sobrien		 later incorrectly compute register live/death info.  */
417850397Sobrien	      tmp = next_active_insn (trial);
417950397Sobrien	      if (tmp == 0)
418050397Sobrien		tmp = find_end_label ();
418150397Sobrien
418250397Sobrien	      /* Insert the special USE insn and update dataflow info.  */
418350397Sobrien              update_block (trial, tmp);
418450397Sobrien
418550397Sobrien	      /* Now emit a label before the special USE insn, and
418650397Sobrien		 redirect our jump to the new label.  */
418750397Sobrien	      target_label = get_label_before (PREV_INSN (tmp));
418818334Speter	      reorg_redirect_jump (delay_insn, target_label);
418918334Speter	      next = insn;
419018334Speter	      continue;
419118334Speter	    }
419218334Speter
419318334Speter	  /* Similarly, if it is an unconditional jump with one insn in its
419418334Speter	     delay list and that insn is redundant, thread the jump.  */
419518334Speter	  if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
419618334Speter	      && XVECLEN (PATTERN (trial), 0) == 2
419718334Speter	      && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
419818334Speter	      && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
419918334Speter		  || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
420018334Speter	      && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
420118334Speter	    {
420218334Speter	      target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
420318334Speter	      if (target_label == 0)
420418334Speter		target_label = find_end_label ();
420518334Speter
420618334Speter	      if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
420718334Speter						    insn))
420818334Speter		{
420918334Speter		  reorg_redirect_jump (delay_insn, target_label);
421018334Speter		  next = insn;
421118334Speter		  continue;
421218334Speter		}
421318334Speter	    }
421418334Speter	}
421518334Speter
421618334Speter      if (! INSN_ANNULLED_BRANCH_P (delay_insn)
421718334Speter	  && prev_active_insn (target_label) == insn
421818334Speter	  && ! condjump_in_parallel_p (delay_insn)
421918334Speter#ifdef HAVE_cc0
422018334Speter	  /* If the last insn in the delay slot sets CC0 for some insn,
422118334Speter	     various code assumes that it is in a delay slot.  We could
422218334Speter	     put it back where it belonged and delete the register notes,
422318334Speter	     but it doesn't seem worthwhile in this uncommon case.  */
422418334Speter	  && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
422518334Speter			      REG_CC_USER, NULL_RTX)
422618334Speter#endif
422718334Speter	  )
422818334Speter	{
422918334Speter	  int i;
423018334Speter
423118334Speter	  /* All this insn does is execute its delay list and jump to the
423218334Speter	     following insn.  So delete the jump and just execute the delay
423318334Speter	     list insns.
423418334Speter
423518334Speter	     We do this by deleting the INSN containing the SEQUENCE, then
423618334Speter	     re-emitting the insns separately, and then deleting the jump.
423718334Speter	     This allows the count of the jump target to be properly
423818334Speter	     decremented.  */
423918334Speter
424018334Speter	  /* Clear the from target bit, since these insns are no longer
424118334Speter	     in delay slots.  */
424218334Speter	  for (i = 0; i < XVECLEN (pat, 0); i++)
424318334Speter	    INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
424418334Speter
424518334Speter	  trial = PREV_INSN (insn);
424618334Speter	  delete_insn (insn);
424718334Speter	  emit_insn_after (pat, trial);
424818334Speter	  delete_scheduled_jump (delay_insn);
424918334Speter	  continue;
425018334Speter	}
425118334Speter
425218334Speter      /* See if this is an unconditional jump around a single insn which is
425318334Speter	 identical to the one in its delay slot.  In this case, we can just
425418334Speter	 delete the branch and the insn in its delay slot.  */
425518334Speter      if (next && GET_CODE (next) == INSN
425618334Speter	  && prev_label (next_active_insn (next)) == target_label
425718334Speter	  && simplejump_p (insn)
425818334Speter	  && XVECLEN (pat, 0) == 2
425918334Speter	  && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
426018334Speter	{
426118334Speter	  delete_insn (insn);
426218334Speter	  continue;
426318334Speter	}
426418334Speter
426518334Speter      /* See if this jump (with its delay slots) branches around another
426618334Speter	 jump (without delay slots).  If so, invert this jump and point
426718334Speter	 it to the target of the second jump.  We cannot do this for
426818334Speter	 annulled jumps, though.  Again, don't convert a jump to a RETURN
426918334Speter	 here.  */
427018334Speter      if (! INSN_ANNULLED_BRANCH_P (delay_insn)
427118334Speter	  && next && GET_CODE (next) == JUMP_INSN
427218334Speter	  && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
427318334Speter	  && next_active_insn (target_label) == next_active_insn (next)
427418334Speter	  && no_labels_between_p (insn, next))
427518334Speter	{
427618334Speter	  rtx label = JUMP_LABEL (next);
427718334Speter	  rtx old_label = JUMP_LABEL (delay_insn);
427818334Speter
427918334Speter	  if (label == 0)
428018334Speter	    label = find_end_label ();
428118334Speter
428218334Speter	  if (redirect_with_delay_slots_safe_p (delay_insn, label, insn))
428318334Speter	    {
428418334Speter	      /* Be careful how we do this to avoid deleting code or labels
428518334Speter		 that are momentarily dead.  See similar optimization in
428618334Speter		 jump.c  */
428718334Speter	      if (old_label)
428818334Speter		++LABEL_NUSES (old_label);
428918334Speter
429018334Speter	      if (invert_jump (delay_insn, label))
429118334Speter		{
429218334Speter		  int i;
429318334Speter
429418334Speter		  /* Must update the INSN_FROM_TARGET_P bits now that
429518334Speter		     the branch is reversed, so that mark_target_live_regs
429618334Speter		     will handle the delay slot insn correctly.  */
429718334Speter		  for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
429818334Speter		    {
429918334Speter		      rtx slot = XVECEXP (PATTERN (insn), 0, i);
430018334Speter		      INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
430118334Speter		    }
430218334Speter
430318334Speter		  delete_insn (next);
430418334Speter		  next = insn;
430518334Speter		}
430618334Speter
430718334Speter	      if (old_label && --LABEL_NUSES (old_label) == 0)
430818334Speter		delete_insn (old_label);
430918334Speter	      continue;
431018334Speter	    }
431118334Speter	}
431218334Speter
431318334Speter      /* If we own the thread opposite the way this insn branches, see if we
431418334Speter	 can merge its delay slots with following insns.  */
431518334Speter      if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
431618334Speter	  && own_thread_p (NEXT_INSN (insn), 0, 1))
431718334Speter	try_merge_delay_insns (insn, next);
431818334Speter      else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
431918334Speter	       && own_thread_p (target_label, target_label, 0))
432018334Speter	try_merge_delay_insns (insn, next_active_insn (target_label));
432118334Speter
432218334Speter      /* If we get here, we haven't deleted INSN.  But we may have deleted
432318334Speter	 NEXT, so recompute it.  */
432418334Speter      next = next_active_insn (insn);
432518334Speter    }
432618334Speter}
432718334Speter
432818334Speter#ifdef HAVE_return
432918334Speter
433018334Speter/* Look for filled jumps to the end of function label.  We can try to convert
433118334Speter   them into RETURN insns if the insns in the delay slot are valid for the
433218334Speter   RETURN as well.  */
433318334Speter
433418334Speterstatic void
433518334Spetermake_return_insns (first)
433618334Speter     rtx first;
433718334Speter{
433818334Speter  rtx insn, jump_insn, pat;
433918334Speter  rtx real_return_label = end_of_function_label;
434018334Speter  int slots, i;
434118334Speter
434218334Speter  /* See if there is a RETURN insn in the function other than the one we
434318334Speter     made for END_OF_FUNCTION_LABEL.  If so, set up anything we can't change
434418334Speter     into a RETURN to jump to it.  */
434518334Speter  for (insn = first; insn; insn = NEXT_INSN (insn))
434618334Speter    if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
434718334Speter      {
434818334Speter	real_return_label = get_label_before (insn);
434918334Speter	break;
435018334Speter      }
435118334Speter
435218334Speter  /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
435318334Speter     was equal to END_OF_FUNCTION_LABEL.  */
435418334Speter  LABEL_NUSES (real_return_label)++;
435518334Speter
435618334Speter  /* Clear the list of insns to fill so we can use it.  */
435718334Speter  obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
435818334Speter
435918334Speter  for (insn = first; insn; insn = NEXT_INSN (insn))
436018334Speter    {
436118334Speter      int flags;
436218334Speter
436318334Speter      /* Only look at filled JUMP_INSNs that go to the end of function
436418334Speter	 label.  */
436518334Speter      if (GET_CODE (insn) != INSN
436618334Speter	  || GET_CODE (PATTERN (insn)) != SEQUENCE
436718334Speter	  || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
436818334Speter	  || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
436918334Speter	continue;
437018334Speter
437118334Speter      pat = PATTERN (insn);
437218334Speter      jump_insn = XVECEXP (pat, 0, 0);
437318334Speter
437418334Speter      /* If we can't make the jump into a RETURN, try to redirect it to the best
437518334Speter	 RETURN and go on to the next insn.  */
437618334Speter      if (! reorg_redirect_jump (jump_insn, NULL_RTX))
437718334Speter	{
437818334Speter	  /* Make sure redirecting the jump will not invalidate the delay
437918334Speter	     slot insns.  */
438018334Speter	  if (redirect_with_delay_slots_safe_p (jump_insn,
438118334Speter						real_return_label,
438218334Speter						insn))
438318334Speter	    reorg_redirect_jump (jump_insn, real_return_label);
438418334Speter	  continue;
438518334Speter	}
438618334Speter
438718334Speter      /* See if this RETURN can accept the insns current in its delay slot.
438818334Speter	 It can if it has more or an equal number of slots and the contents
438918334Speter	 of each is valid.  */
439018334Speter
439118334Speter      flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
439218334Speter      slots = num_delay_slots (jump_insn);
439318334Speter      if (slots >= XVECLEN (pat, 0) - 1)
439418334Speter	{
439518334Speter	  for (i = 1; i < XVECLEN (pat, 0); i++)
439618334Speter	    if (! (
439718334Speter#ifdef ANNUL_IFFALSE_SLOTS
439818334Speter		   (INSN_ANNULLED_BRANCH_P (jump_insn)
439918334Speter		    && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
440018334Speter		   ? eligible_for_annul_false (jump_insn, i - 1,
440118334Speter					       XVECEXP (pat, 0, i), flags) :
440218334Speter#endif
440318334Speter#ifdef ANNUL_IFTRUE_SLOTS
440418334Speter		   (INSN_ANNULLED_BRANCH_P (jump_insn)
440518334Speter		    && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
440618334Speter		   ? eligible_for_annul_true (jump_insn, i - 1,
440718334Speter					      XVECEXP (pat, 0, i), flags) :
440818334Speter#endif
440918334Speter		   eligible_for_delay (jump_insn, i -1, XVECEXP (pat, 0, i), flags)))
441018334Speter	      break;
441118334Speter	}
441218334Speter      else
441318334Speter	i = 0;
441418334Speter
441518334Speter      if (i == XVECLEN (pat, 0))
441618334Speter	continue;
441718334Speter
441818334Speter      /* We have to do something with this insn.  If it is an unconditional
441918334Speter	 RETURN, delete the SEQUENCE and output the individual insns,
442018334Speter	 followed by the RETURN.  Then set things up so we try to find
442118334Speter	 insns for its delay slots, if it needs some.  */
442218334Speter      if (GET_CODE (PATTERN (jump_insn)) == RETURN)
442318334Speter	{
442418334Speter	  rtx prev = PREV_INSN (insn);
442518334Speter
442618334Speter	  delete_insn (insn);
442718334Speter	  for (i = 1; i < XVECLEN (pat, 0); i++)
442818334Speter	    prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
442918334Speter
443018334Speter	  insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
443118334Speter	  emit_barrier_after (insn);
443218334Speter
443318334Speter	  if (slots)
443418334Speter	    obstack_ptr_grow (&unfilled_slots_obstack, insn);
443518334Speter	}
443618334Speter      else
443718334Speter	/* It is probably more efficient to keep this with its current
443818334Speter	   delay slot as a branch to a RETURN.  */
443918334Speter	reorg_redirect_jump (jump_insn, real_return_label);
444018334Speter    }
444118334Speter
444218334Speter  /* Now delete REAL_RETURN_LABEL if we never used it.  Then try to fill any
444318334Speter     new delay slots we have created.  */
444418334Speter  if (--LABEL_NUSES (real_return_label) == 0)
444518334Speter    delete_insn (real_return_label);
444618334Speter
444750397Sobrien  fill_simple_delay_slots (1);
444850397Sobrien  fill_simple_delay_slots (0);
444918334Speter}
445018334Speter#endif
445118334Speter
445218334Speter/* Try to find insns to place in delay slots.  */
445318334Speter
445418334Spetervoid
445518334Speterdbr_schedule (first, file)
445618334Speter     rtx first;
445718334Speter     FILE *file;
445818334Speter{
445918334Speter  rtx insn, next, epilogue_insn = 0;
446018334Speter  int i;
446118334Speter#if 0
446218334Speter  int old_flag_no_peephole = flag_no_peephole;
446318334Speter
446418334Speter  /* Execute `final' once in prescan mode to delete any insns that won't be
446518334Speter     used.  Don't let final try to do any peephole optimization--it will
446618334Speter     ruin dataflow information for this pass.  */
446718334Speter
446818334Speter  flag_no_peephole = 1;
446918334Speter  final (first, 0, NO_DEBUG, 1, 1);
447018334Speter  flag_no_peephole = old_flag_no_peephole;
447118334Speter#endif
447218334Speter
447318334Speter  /* If the current function has no insns other than the prologue and
447418334Speter     epilogue, then do not try to fill any delay slots.  */
447518334Speter  if (n_basic_blocks == 0)
447618334Speter    return;
447718334Speter
447818334Speter  /* Find the highest INSN_UID and allocate and initialize our map from
447918334Speter     INSN_UID's to position in code.  */
448018334Speter  for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
448118334Speter    {
448218334Speter      if (INSN_UID (insn) > max_uid)
448318334Speter	max_uid = INSN_UID (insn);
448418334Speter      if (GET_CODE (insn) == NOTE
448518334Speter	  && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
448618334Speter	epilogue_insn = insn;
448718334Speter    }
448818334Speter
448918334Speter  uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int *));
449018334Speter  for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
449118334Speter    uid_to_ruid[INSN_UID (insn)] = i;
449218334Speter
449318334Speter  /* Initialize the list of insns that need filling.  */
449418334Speter  if (unfilled_firstobj == 0)
449518334Speter    {
449618334Speter      gcc_obstack_init (&unfilled_slots_obstack);
449718334Speter      unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
449818334Speter    }
449918334Speter
450018334Speter  for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
450118334Speter    {
450218334Speter      rtx target;
450318334Speter
450418334Speter      INSN_ANNULLED_BRANCH_P (insn) = 0;
450518334Speter      INSN_FROM_TARGET_P (insn) = 0;
450618334Speter
450718334Speter      /* Skip vector tables.  We can't get attributes for them.  */
450818334Speter      if (GET_CODE (insn) == JUMP_INSN
450918334Speter	  && (GET_CODE (PATTERN (insn)) == ADDR_VEC
451018334Speter	      || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
451118334Speter	continue;
451218334Speter
451318334Speter      if (num_delay_slots (insn) > 0)
451418334Speter	obstack_ptr_grow (&unfilled_slots_obstack, insn);
451518334Speter
451618334Speter      /* Ensure all jumps go to the last of a set of consecutive labels.  */
451718334Speter      if (GET_CODE (insn) == JUMP_INSN
451818334Speter	  && (condjump_p (insn) || condjump_in_parallel_p (insn))
451918334Speter	  && JUMP_LABEL (insn) != 0
452018334Speter	  && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
452118334Speter	      != JUMP_LABEL (insn)))
452218334Speter	redirect_jump (insn, target);
452318334Speter    }
452418334Speter
452518334Speter  /* Indicate what resources are required to be valid at the end of the current
452618334Speter     function.  The condition code never is and memory always is.  If the
452718334Speter     frame pointer is needed, it is and so is the stack pointer unless
452818334Speter     EXIT_IGNORE_STACK is non-zero.  If the frame pointer is not needed, the
452918334Speter     stack pointer is.  Registers used to return the function value are
453018334Speter     needed.  Registers holding global variables are needed.  */
453118334Speter
453218334Speter  end_of_function_needs.cc = 0;
453318334Speter  end_of_function_needs.memory = 1;
453418334Speter  end_of_function_needs.unch_memory = 0;
453518334Speter  CLEAR_HARD_REG_SET (end_of_function_needs.regs);
453618334Speter
453718334Speter  if (frame_pointer_needed)
453818334Speter    {
453918334Speter      SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
454018334Speter#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
454118334Speter      SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
454218334Speter#endif
454318334Speter#ifdef EXIT_IGNORE_STACK
454418334Speter      if (! EXIT_IGNORE_STACK)
454518334Speter#endif
454618334Speter	SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
454718334Speter    }
454818334Speter  else
454918334Speter    SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
455018334Speter
455150397Sobrien  if (current_function_return_rtx != 0)
455218334Speter    mark_referenced_resources (current_function_return_rtx,
455318334Speter			       &end_of_function_needs, 1);
455418334Speter
455518334Speter  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
455650397Sobrien    if (global_regs[i]
455750397Sobrien#ifdef EPILOGUE_USES
455850397Sobrien	|| EPILOGUE_USES (i)
455950397Sobrien#endif
456050397Sobrien	)
456118334Speter      SET_HARD_REG_BIT (end_of_function_needs.regs, i);
456218334Speter
456318334Speter  /* The registers required to be live at the end of the function are
456418334Speter     represented in the flow information as being dead just prior to
456518334Speter     reaching the end of the function.  For example, the return of a value
456618334Speter     might be represented by a USE of the return register immediately
456718334Speter     followed by an unconditional jump to the return label where the
456818334Speter     return label is the end of the RTL chain.  The end of the RTL chain
456918334Speter     is then taken to mean that the return register is live.
457018334Speter
457118334Speter     This sequence is no longer maintained when epilogue instructions are
457218334Speter     added to the RTL chain.  To reconstruct the original meaning, the
457318334Speter     start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
457418334Speter     point where these registers become live (start_of_epilogue_needs).
457518334Speter     If epilogue instructions are present, the registers set by those
457618334Speter     instructions won't have been processed by flow.  Thus, those
457718334Speter     registers are additionally required at the end of the RTL chain
457818334Speter     (end_of_function_needs).  */
457918334Speter
458018334Speter  start_of_epilogue_needs = end_of_function_needs;
458118334Speter
458250397Sobrien  while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
458318334Speter    mark_set_resources (epilogue_insn, &end_of_function_needs, 0, 1);
458418334Speter
458518334Speter  /* Show we haven't computed an end-of-function label yet.  */
458618334Speter  end_of_function_label = 0;
458718334Speter
458818334Speter  /* Allocate and initialize the tables used by mark_target_live_regs.  */
458918334Speter  target_hash_table
459018334Speter    = (struct target_info **) alloca ((TARGET_HASH_PRIME
459118334Speter				       * sizeof (struct target_info *)));
459218334Speter  bzero ((char *) target_hash_table,
459318334Speter	 TARGET_HASH_PRIME * sizeof (struct target_info *));
459418334Speter
459518334Speter  bb_ticks = (int *) alloca (n_basic_blocks * sizeof (int));
459618334Speter  bzero ((char *) bb_ticks, n_basic_blocks * sizeof (int));
459718334Speter
459818334Speter  /* Initialize the statistics for this function.  */
459918334Speter  bzero ((char *) num_insns_needing_delays, sizeof num_insns_needing_delays);
460018334Speter  bzero ((char *) num_filled_delays, sizeof num_filled_delays);
460118334Speter
460218334Speter  /* Now do the delay slot filling.  Try everything twice in case earlier
460318334Speter     changes make more slots fillable.  */
460418334Speter
460518334Speter  for (reorg_pass_number = 0;
460618334Speter       reorg_pass_number < MAX_REORG_PASSES;
460718334Speter       reorg_pass_number++)
460818334Speter    {
460950397Sobrien      fill_simple_delay_slots (1);
461050397Sobrien      fill_simple_delay_slots (0);
461150397Sobrien      fill_eager_delay_slots ();
461218334Speter      relax_delay_slots (first);
461318334Speter    }
461418334Speter
461518334Speter  /* Delete any USE insns made by update_block; subsequent passes don't need
461618334Speter     them or know how to deal with them.  */
461718334Speter  for (insn = first; insn; insn = next)
461818334Speter    {
461918334Speter      next = NEXT_INSN (insn);
462018334Speter
462118334Speter      if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
462218334Speter	  && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
462318334Speter	next = delete_insn (insn);
462418334Speter    }
462518334Speter
462618334Speter  /* If we made an end of function label, indicate that it is now
462718334Speter     safe to delete it by undoing our prior adjustment to LABEL_NUSES.
462818334Speter     If it is now unused, delete it.  */
462918334Speter  if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
463018334Speter    delete_insn (end_of_function_label);
463118334Speter
463218334Speter#ifdef HAVE_return
463318334Speter  if (HAVE_return && end_of_function_label != 0)
463418334Speter    make_return_insns (first);
463518334Speter#endif
463618334Speter
463718334Speter  obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
463818334Speter
463918334Speter  /* It is not clear why the line below is needed, but it does seem to be.  */
464018334Speter  unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
464118334Speter
464218334Speter  /* Reposition the prologue and epilogue notes in case we moved the
464318334Speter     prologue/epilogue insns.  */
464418334Speter  reposition_prologue_and_epilogue_notes (first);
464518334Speter
464618334Speter  if (file)
464718334Speter    {
464818334Speter      register int i, j, need_comma;
464918334Speter
465018334Speter      for (reorg_pass_number = 0;
465118334Speter	   reorg_pass_number < MAX_REORG_PASSES;
465218334Speter	   reorg_pass_number++)
465318334Speter	{
465418334Speter	  fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
465518334Speter	  for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
465618334Speter	    {
465718334Speter	      need_comma = 0;
465818334Speter	      fprintf (file, ";; Reorg function #%d\n", i);
465918334Speter
466018334Speter	      fprintf (file, ";; %d insns needing delay slots\n;; ",
466118334Speter		       num_insns_needing_delays[i][reorg_pass_number]);
466218334Speter
466318334Speter	      for (j = 0; j < MAX_DELAY_HISTOGRAM; j++)
466418334Speter		if (num_filled_delays[i][j][reorg_pass_number])
466518334Speter		  {
466618334Speter		    if (need_comma)
466718334Speter		      fprintf (file, ", ");
466818334Speter		    need_comma = 1;
466918334Speter		    fprintf (file, "%d got %d delays",
467018334Speter			     num_filled_delays[i][j][reorg_pass_number], j);
467118334Speter		  }
467218334Speter	      fprintf (file, "\n");
467318334Speter	    }
467418334Speter	}
467518334Speter    }
467650397Sobrien
467750397Sobrien  /* For all JUMP insns, fill in branch prediction notes, so that during
467850397Sobrien     assembler output a target can set branch prediction bits in the code.
467950397Sobrien     We have to do this now, as up until this point the destinations of
468050397Sobrien     JUMPS can be moved around and changed, but past right here that cannot
468150397Sobrien     happen.  */
468250397Sobrien  for (insn = first; insn; insn = NEXT_INSN (insn))
468350397Sobrien    {
468450397Sobrien      int pred_flags;
468550397Sobrien
468650397Sobrien      if (GET_CODE (insn) != JUMP_INSN)
468750397Sobrien	continue;
468850397Sobrien
468950397Sobrien      pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
469050397Sobrien      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_BR_PRED,
469150397Sobrien					    GEN_INT (pred_flags),
469250397Sobrien					    REG_NOTES (insn));
469350397Sobrien    }
469418334Speter}
469518334Speter#endif /* DELAY_SLOTS */
4696