reorg.c revision 52284
118334Speter/* Perform instruction reorganizations for delay slot filling.
252284Sobrien   Copyright (C) 1992, 93-98, 1999 Free Software Foundation, Inc.
318334Speter   Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
418334Speter   Hacked by Michael Tiemann (tiemann@cygnus.com).
518334Speter
618334SpeterThis file is part of GNU CC.
718334Speter
818334SpeterGNU CC is free software; you can redistribute it and/or modify
918334Speterit under the terms of the GNU General Public License as published by
1018334Speterthe Free Software Foundation; either version 2, or (at your option)
1118334Speterany later version.
1218334Speter
1318334SpeterGNU CC is distributed in the hope that it will be useful,
1418334Speterbut WITHOUT ANY WARRANTY; without even the implied warranty of
1518334SpeterMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
1618334SpeterGNU General Public License for more details.
1718334Speter
1818334SpeterYou should have received a copy of the GNU General Public License
1918334Speteralong with GNU CC; see the file COPYING.  If not, write to
2018334Speterthe Free Software Foundation, 59 Temple Place - Suite 330,
2118334SpeterBoston, MA 02111-1307, USA.  */
2218334Speter
2318334Speter/* Instruction reorganization pass.
2418334Speter
2518334Speter   This pass runs after register allocation and final jump
2618334Speter   optimization.  It should be the last pass to run before peephole.
2718334Speter   It serves primarily to fill delay slots of insns, typically branch
2818334Speter   and call insns.  Other insns typically involve more complicated
2918334Speter   interactions of data dependencies and resource constraints, and
3018334Speter   are better handled by scheduling before register allocation (by the
3118334Speter   function `schedule_insns').
3218334Speter
3318334Speter   The Branch Penalty is the number of extra cycles that are needed to
3418334Speter   execute a branch insn.  On an ideal machine, branches take a single
3518334Speter   cycle, and the Branch Penalty is 0.  Several RISC machines approach
3618334Speter   branch delays differently:
3718334Speter
3818334Speter   The MIPS and AMD 29000 have a single branch delay slot.  Most insns
3918334Speter   (except other branches) can be used to fill this slot.  When the
4018334Speter   slot is filled, two insns execute in two cycles, reducing the
4118334Speter   branch penalty to zero.
4218334Speter
4318334Speter   The Motorola 88000 conditionally exposes its branch delay slot,
4418334Speter   so code is shorter when it is turned off, but will run faster
4518334Speter   when useful insns are scheduled there.
4618334Speter
4718334Speter   The IBM ROMP has two forms of branch and call insns, both with and
4818334Speter   without a delay slot.  Much like the 88k, insns not using the delay
4918334Speter   slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
5018334Speter
5118334Speter   The SPARC always has a branch delay slot, but its effects can be
5218334Speter   annulled when the branch is not taken.  This means that failing to
5318334Speter   find other sources of insns, we can hoist an insn from the branch
5418334Speter   target that would only be safe to execute knowing that the branch
5518334Speter   is taken.
5618334Speter
5718334Speter   The HP-PA always has a branch delay slot.  For unconditional branches
5818334Speter   its effects can be annulled when the branch is taken.  The effects
5918334Speter   of the delay slot in a conditional branch can be nullified for forward
6018334Speter   taken branches, or for untaken backward branches.  This means
6118334Speter   we can hoist insns from the fall-through path for forward branches or
6218334Speter   steal insns from the target of backward branches.
6318334Speter
6452284Sobrien   The TMS320C3x and C4x have three branch delay slots.  When the three
6552284Sobrien   slots are filled, the branch penalty is zero.  Most insns can fill the
6652284Sobrien   delay slots except jump insns.
6752284Sobrien
6818334Speter   Three techniques for filling delay slots have been implemented so far:
6918334Speter
7018334Speter   (1) `fill_simple_delay_slots' is the simplest, most efficient way
7118334Speter   to fill delay slots.  This pass first looks for insns which come
7218334Speter   from before the branch and which are safe to execute after the
7318334Speter   branch.  Then it searches after the insn requiring delay slots or,
7418334Speter   in the case of a branch, for insns that are after the point at
7518334Speter   which the branch merges into the fallthrough code, if such a point
7618334Speter   exists.  When such insns are found, the branch penalty decreases
7718334Speter   and no code expansion takes place.
7818334Speter
7918334Speter   (2) `fill_eager_delay_slots' is more complicated: it is used for
8018334Speter   scheduling conditional jumps, or for scheduling jumps which cannot
8118334Speter   be filled using (1).  A machine need not have annulled jumps to use
8218334Speter   this strategy, but it helps (by keeping more options open).
8318334Speter   `fill_eager_delay_slots' tries to guess the direction the branch
8418334Speter   will go; if it guesses right 100% of the time, it can reduce the
8518334Speter   branch penalty as much as `fill_simple_delay_slots' does.  If it
8618334Speter   guesses wrong 100% of the time, it might as well schedule nops (or
8718334Speter   on the m88k, unexpose the branch slot).  When
8818334Speter   `fill_eager_delay_slots' takes insns from the fall-through path of
8918334Speter   the jump, usually there is no code expansion; when it takes insns
9018334Speter   from the branch target, there is code expansion if it is not the
9118334Speter   only way to reach that target.
9218334Speter
9318334Speter   (3) `relax_delay_slots' uses a set of rules to simplify code that
9418334Speter   has been reorganized by (1) and (2).  It finds cases where
9518334Speter   conditional test can be eliminated, jumps can be threaded, extra
9618334Speter   insns can be eliminated, etc.  It is the job of (1) and (2) to do a
9718334Speter   good job of scheduling locally; `relax_delay_slots' takes care of
9818334Speter   making the various individual schedules work well together.  It is
9918334Speter   especially tuned to handle the control flow interactions of branch
10018334Speter   insns.  It does nothing for insns with delay slots that do not
10118334Speter   branch.
10218334Speter
10318334Speter   On machines that use CC0, we are very conservative.  We will not make
10418334Speter   a copy of an insn involving CC0 since we want to maintain a 1-1
10518334Speter   correspondence between the insn that sets and uses CC0.  The insns are
10618334Speter   allowed to be separated by placing an insn that sets CC0 (but not an insn
10718334Speter   that uses CC0; we could do this, but it doesn't seem worthwhile) in a
10818334Speter   delay slot.  In that case, we point each insn at the other with REG_CC_USER
10918334Speter   and REG_CC_SETTER notes.  Note that these restrictions affect very few
11018334Speter   machines because most RISC machines with delay slots will not use CC0
11118334Speter   (the RT is the only known exception at this point).
11218334Speter
11318334Speter   Not yet implemented:
11418334Speter
11518334Speter   The Acorn Risc Machine can conditionally execute most insns, so
11618334Speter   it is profitable to move single insns into a position to execute
11718334Speter   based on the condition code of the previous insn.
11818334Speter
11918334Speter   The HP-PA can conditionally nullify insns, providing a similar
12018334Speter   effect to the ARM, differing mostly in which insn is "in charge".   */
12118334Speter
12218334Speter#include "config.h"
12350397Sobrien#include "system.h"
12452284Sobrien#include "toplev.h"
12518334Speter#include "rtl.h"
12650397Sobrien#include "expr.h"
12718334Speter#include "insn-config.h"
12818334Speter#include "conditions.h"
12918334Speter#include "hard-reg-set.h"
13018334Speter#include "basic-block.h"
13118334Speter#include "regs.h"
13218334Speter#include "insn-flags.h"
13318334Speter#include "recog.h"
13418334Speter#include "flags.h"
13518334Speter#include "output.h"
13618334Speter#include "obstack.h"
13718334Speter#include "insn-attr.h"
13852284Sobrien#include "resource.h"
13918334Speter
14050397Sobrien
14118334Speter#ifdef DELAY_SLOTS
14218334Speter
14318334Speter#define obstack_chunk_alloc xmalloc
14418334Speter#define obstack_chunk_free free
14518334Speter
14618334Speter#ifndef ANNUL_IFTRUE_SLOTS
14718334Speter#define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
14818334Speter#endif
14918334Speter#ifndef ANNUL_IFFALSE_SLOTS
15018334Speter#define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
15118334Speter#endif
15218334Speter
15318334Speter/* Insns which have delay slots that have not yet been filled.  */
15418334Speter
15518334Speterstatic struct obstack unfilled_slots_obstack;
15618334Speterstatic rtx *unfilled_firstobj;
15718334Speter
15818334Speter/* Define macros to refer to the first and last slot containing unfilled
15918334Speter   insns.  These are used because the list may move and its address
16018334Speter   should be recomputed at each use.  */
16118334Speter
16218334Speter#define unfilled_slots_base	\
16318334Speter  ((rtx *) obstack_base (&unfilled_slots_obstack))
16418334Speter
16518334Speter#define unfilled_slots_next	\
16618334Speter  ((rtx *) obstack_next_free (&unfilled_slots_obstack))
16718334Speter
16818334Speter/* Points to the label before the end of the function.  */
16918334Speterstatic rtx end_of_function_label;
17018334Speter
17118334Speter/* Mapping between INSN_UID's and position in the code since INSN_UID's do
17218334Speter   not always monotonically increase.  */
17318334Speterstatic int *uid_to_ruid;
17418334Speter
17518334Speter/* Highest valid index in `uid_to_ruid'.  */
17618334Speterstatic int max_uid;
17718334Speter
17852284Sobrienstatic int stop_search_p		PROTO((rtx, int));
17952284Sobrienstatic int resource_conflicts_p		PROTO((struct resources *,
18052284Sobrien					       struct resources *));
18152284Sobrienstatic int insn_references_resource_p	PROTO((rtx, struct resources *, int));
18252284Sobrienstatic int insn_sets_resource_p		PROTO((rtx, struct resources *, int));
18352284Sobrienstatic rtx find_end_label		PROTO((void));
18452284Sobrienstatic rtx emit_delay_sequence		PROTO((rtx, rtx, int));
18552284Sobrienstatic rtx add_to_delay_list		PROTO((rtx, rtx));
18652284Sobrienstatic rtx delete_from_delay_slot	PROTO((rtx));
18752284Sobrienstatic void delete_scheduled_jump	PROTO((rtx));
18852284Sobrienstatic void note_delay_statistics	PROTO((int, int));
18952284Sobrienstatic rtx optimize_skip		PROTO((rtx));
19052284Sobrienstatic int get_jump_flags		PROTO((rtx, rtx));
19152284Sobrienstatic int rare_destination		PROTO((rtx));
19252284Sobrienstatic int mostly_true_jump		PROTO((rtx, rtx));
19352284Sobrienstatic rtx get_branch_condition		PROTO((rtx, rtx));
19452284Sobrienstatic int condition_dominates_p	PROTO((rtx, rtx));
19552284Sobrienstatic int redirect_with_delay_slots_safe_p PROTO ((rtx, rtx, rtx));
19652284Sobrienstatic int redirect_with_delay_list_safe_p PROTO ((rtx, rtx, rtx));
19752284Sobrienstatic int check_annul_list_true_false	PROTO ((int, rtx));
19818334Speterstatic rtx steal_delay_list_from_target PROTO((rtx, rtx, rtx, rtx,
19918334Speter					       struct resources *,
20018334Speter					       struct resources *,
20118334Speter					       struct resources *,
20218334Speter					       int, int *, int *, rtx *));
20318334Speterstatic rtx steal_delay_list_from_fallthrough PROTO((rtx, rtx, rtx, rtx,
20418334Speter						    struct resources *,
20518334Speter						    struct resources *,
20618334Speter						    struct resources *,
20718334Speter						    int, int *, int *));
20852284Sobrienstatic void try_merge_delay_insns	PROTO((rtx, rtx));
20952284Sobrienstatic rtx redundant_insn		PROTO((rtx, rtx, rtx));
21052284Sobrienstatic int own_thread_p			PROTO((rtx, rtx, int));
21152284Sobrienstatic void update_block		PROTO((rtx, rtx));
21252284Sobrienstatic int reorg_redirect_jump		PROTO((rtx, rtx));
21352284Sobrienstatic void update_reg_dead_notes	PROTO((rtx, rtx));
21452284Sobrienstatic void fix_reg_dead_note		PROTO((rtx, rtx));
21552284Sobrienstatic void update_reg_unused_notes	PROTO((rtx, rtx));
21652284Sobrienstatic void fill_simple_delay_slots	PROTO((int));
21752284Sobrienstatic rtx fill_slots_from_thread	PROTO((rtx, rtx, rtx, rtx, int, int,
21852284Sobrien					       int, int, int *, rtx));
21952284Sobrienstatic void fill_eager_delay_slots	PROTO((void));
22052284Sobrienstatic void relax_delay_slots		PROTO((rtx));
22152284Sobrienstatic void make_return_insns		PROTO((rtx));
22218334Speter
22318334Speter/* Return TRUE if this insn should stop the search for insn to fill delay
22418334Speter   slots.  LABELS_P indicates that labels should terminate the search.
22518334Speter   In all cases, jumps terminate the search.  */
22618334Speter
22718334Speterstatic int
22818334Speterstop_search_p (insn, labels_p)
22918334Speter     rtx insn;
23018334Speter     int labels_p;
23118334Speter{
23218334Speter  if (insn == 0)
23318334Speter    return 1;
23418334Speter
23518334Speter  switch (GET_CODE (insn))
23618334Speter    {
23718334Speter    case NOTE:
23818334Speter    case CALL_INSN:
23918334Speter      return 0;
24018334Speter
24118334Speter    case CODE_LABEL:
24218334Speter      return labels_p;
24318334Speter
24418334Speter    case JUMP_INSN:
24518334Speter    case BARRIER:
24618334Speter      return 1;
24718334Speter
24818334Speter    case INSN:
24918334Speter      /* OK unless it contains a delay slot or is an `asm' insn of some type.
25018334Speter	 We don't know anything about these.  */
25118334Speter      return (GET_CODE (PATTERN (insn)) == SEQUENCE
25218334Speter	      || GET_CODE (PATTERN (insn)) == ASM_INPUT
25318334Speter	      || asm_noperands (PATTERN (insn)) >= 0);
25418334Speter
25518334Speter    default:
25618334Speter      abort ();
25718334Speter    }
25818334Speter}
25918334Speter
26018334Speter/* Return TRUE if any resources are marked in both RES1 and RES2 or if either
26118334Speter   resource set contains a volatile memory reference.  Otherwise, return FALSE.  */
26218334Speter
26318334Speterstatic int
26418334Speterresource_conflicts_p (res1, res2)
26518334Speter     struct resources *res1, *res2;
26618334Speter{
26718334Speter  if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
26818334Speter      || (res1->unch_memory && res2->unch_memory)
26918334Speter      || res1->volatil || res2->volatil)
27018334Speter    return 1;
27118334Speter
27218334Speter#ifdef HARD_REG_SET
27318334Speter  return (res1->regs & res2->regs) != HARD_CONST (0);
27418334Speter#else
27518334Speter  {
27618334Speter    int i;
27718334Speter
27818334Speter    for (i = 0; i < HARD_REG_SET_LONGS; i++)
27918334Speter      if ((res1->regs[i] & res2->regs[i]) != 0)
28018334Speter	return 1;
28118334Speter    return 0;
28218334Speter  }
28318334Speter#endif
28418334Speter}
28518334Speter
28618334Speter/* Return TRUE if any resource marked in RES, a `struct resources', is
28750397Sobrien   referenced by INSN.  If INCLUDE_DELAYED_EFFECTS is set, return if the called
28818334Speter   routine is using those resources.
28918334Speter
29018334Speter   We compute this by computing all the resources referenced by INSN and
29118334Speter   seeing if this conflicts with RES.  It might be faster to directly check
29218334Speter   ourselves, and this is the way it used to work, but it means duplicating
29318334Speter   a large block of complex code.  */
29418334Speter
29518334Speterstatic int
29618334Speterinsn_references_resource_p (insn, res, include_delayed_effects)
29718334Speter     register rtx insn;
29818334Speter     register struct resources *res;
29918334Speter     int include_delayed_effects;
30018334Speter{
30118334Speter  struct resources insn_res;
30218334Speter
30318334Speter  CLEAR_RESOURCE (&insn_res);
30418334Speter  mark_referenced_resources (insn, &insn_res, include_delayed_effects);
30518334Speter  return resource_conflicts_p (&insn_res, res);
30618334Speter}
30718334Speter
30818334Speter/* Return TRUE if INSN modifies resources that are marked in RES.
30950397Sobrien   INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
31018334Speter   included.   CC0 is only modified if it is explicitly set; see comments
31118334Speter   in front of mark_set_resources for details.  */
31218334Speter
31318334Speterstatic int
31418334Speterinsn_sets_resource_p (insn, res, include_delayed_effects)
31518334Speter     register rtx insn;
31618334Speter     register struct resources *res;
31718334Speter     int include_delayed_effects;
31818334Speter{
31918334Speter  struct resources insn_sets;
32018334Speter
32118334Speter  CLEAR_RESOURCE (&insn_sets);
32218334Speter  mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
32318334Speter  return resource_conflicts_p (&insn_sets, res);
32418334Speter}
32518334Speter
32618334Speter/* Find a label at the end of the function or before a RETURN.  If there is
32718334Speter   none, make one.  */
32818334Speter
32918334Speterstatic rtx
33018334Speterfind_end_label ()
33118334Speter{
33218334Speter  rtx insn;
33318334Speter
33418334Speter  /* If we found one previously, return it.  */
33518334Speter  if (end_of_function_label)
33618334Speter    return end_of_function_label;
33718334Speter
33818334Speter  /* Otherwise, see if there is a label at the end of the function.  If there
33918334Speter     is, it must be that RETURN insns aren't needed, so that is our return
34018334Speter     label and we don't have to do anything else.  */
34118334Speter
34218334Speter  insn = get_last_insn ();
34318334Speter  while (GET_CODE (insn) == NOTE
34418334Speter	 || (GET_CODE (insn) == INSN
34518334Speter	     && (GET_CODE (PATTERN (insn)) == USE
34618334Speter		 || GET_CODE (PATTERN (insn)) == CLOBBER)))
34718334Speter    insn = PREV_INSN (insn);
34818334Speter
34918334Speter  /* When a target threads its epilogue we might already have a
35018334Speter     suitable return insn.  If so put a label before it for the
35118334Speter     end_of_function_label.  */
35218334Speter  if (GET_CODE (insn) == BARRIER
35318334Speter      && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
35418334Speter      && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
35518334Speter    {
35618334Speter      rtx temp = PREV_INSN (PREV_INSN (insn));
35718334Speter      end_of_function_label = gen_label_rtx ();
35818334Speter      LABEL_NUSES (end_of_function_label) = 0;
35918334Speter
36050397Sobrien      /* Put the label before an USE insns that may proceed the RETURN insn.  */
36118334Speter      while (GET_CODE (temp) == USE)
36218334Speter	temp = PREV_INSN (temp);
36318334Speter
36418334Speter      emit_label_after (end_of_function_label, temp);
36518334Speter    }
36618334Speter
36718334Speter  else if (GET_CODE (insn) == CODE_LABEL)
36818334Speter    end_of_function_label = insn;
36918334Speter  else
37018334Speter    {
37118334Speter      /* Otherwise, make a new label and emit a RETURN and BARRIER,
37218334Speter	 if needed.  */
37318334Speter      end_of_function_label = gen_label_rtx ();
37418334Speter      LABEL_NUSES (end_of_function_label) = 0;
37518334Speter      emit_label (end_of_function_label);
37618334Speter#ifdef HAVE_return
37718334Speter      if (HAVE_return)
37818334Speter	{
37918334Speter	  /* The return we make may have delay slots too.  */
38018334Speter	  rtx insn = gen_return ();
38118334Speter	  insn = emit_jump_insn (insn);
38218334Speter	  emit_barrier ();
38318334Speter          if (num_delay_slots (insn) > 0)
38418334Speter	    obstack_ptr_grow (&unfilled_slots_obstack, insn);
38518334Speter	}
38618334Speter#endif
38718334Speter    }
38818334Speter
38918334Speter  /* Show one additional use for this label so it won't go away until
39018334Speter     we are done.  */
39118334Speter  ++LABEL_NUSES (end_of_function_label);
39218334Speter
39318334Speter  return end_of_function_label;
39418334Speter}
39518334Speter
39618334Speter/* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
39718334Speter   the pattern of INSN with the SEQUENCE.
39818334Speter
39918334Speter   Chain the insns so that NEXT_INSN of each insn in the sequence points to
40018334Speter   the next and NEXT_INSN of the last insn in the sequence points to
40118334Speter   the first insn after the sequence.  Similarly for PREV_INSN.  This makes
40218334Speter   it easier to scan all insns.
40318334Speter
40418334Speter   Returns the SEQUENCE that replaces INSN.  */
40518334Speter
40618334Speterstatic rtx
40750397Sobrienemit_delay_sequence (insn, list, length)
40818334Speter     rtx insn;
40918334Speter     rtx list;
41018334Speter     int length;
41118334Speter{
41218334Speter  register int i = 1;
41318334Speter  register rtx li;
41418334Speter  int had_barrier = 0;
41518334Speter
41650397Sobrien  /* Allocate the rtvec to hold the insns and the SEQUENCE.  */
41718334Speter  rtvec seqv = rtvec_alloc (length + 1);
41850397Sobrien  rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
41918334Speter  rtx seq_insn = make_insn_raw (seq);
42018334Speter  rtx first = get_insns ();
42118334Speter  rtx last = get_last_insn ();
42218334Speter
42350397Sobrien  /* Make a copy of the insn having delay slots.  */
42418334Speter  rtx delay_insn = copy_rtx (insn);
42518334Speter
42618334Speter  /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
42718334Speter     confuse further processing.  Update LAST in case it was the last insn.
42818334Speter     We will put the BARRIER back in later.  */
42918334Speter  if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
43018334Speter    {
43118334Speter      delete_insn (NEXT_INSN (insn));
43218334Speter      last = get_last_insn ();
43318334Speter      had_barrier = 1;
43418334Speter    }
43518334Speter
43618334Speter  /* Splice our SEQUENCE into the insn stream where INSN used to be.  */
43718334Speter  NEXT_INSN (seq_insn) = NEXT_INSN (insn);
43818334Speter  PREV_INSN (seq_insn) = PREV_INSN (insn);
43918334Speter
44050397Sobrien  if (insn != last)
44150397Sobrien    PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
44250397Sobrien
44350397Sobrien  if (insn != first)
44450397Sobrien    NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
44550397Sobrien
44650397Sobrien  /* Note the calls to set_new_first_and_last_insn must occur after
44750397Sobrien     SEQ_INSN has been completely spliced into the insn stream.
44850397Sobrien
44950397Sobrien     Otherwise CUR_INSN_UID will get set to an incorrect value because
45050397Sobrien     set_new_first_and_last_insn will not find SEQ_INSN in the chain.  */
45118334Speter  if (insn == last)
45218334Speter    set_new_first_and_last_insn (first, seq_insn);
45318334Speter
45418334Speter  if (insn == first)
45518334Speter    set_new_first_and_last_insn (seq_insn, last);
45618334Speter
45718334Speter  /* Build our SEQUENCE and rebuild the insn chain.  */
45818334Speter  XVECEXP (seq, 0, 0) = delay_insn;
45918334Speter  INSN_DELETED_P (delay_insn) = 0;
46018334Speter  PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
46118334Speter
46218334Speter  for (li = list; li; li = XEXP (li, 1), i++)
46318334Speter    {
46418334Speter      rtx tem = XEXP (li, 0);
46518334Speter      rtx note;
46618334Speter
46718334Speter      /* Show that this copy of the insn isn't deleted.  */
46818334Speter      INSN_DELETED_P (tem) = 0;
46918334Speter
47018334Speter      XVECEXP (seq, 0, i) = tem;
47118334Speter      PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
47218334Speter      NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
47318334Speter
47418334Speter      /* Remove any REG_DEAD notes because we can't rely on them now
47518334Speter	 that the insn has been moved.  */
47618334Speter      for (note = REG_NOTES (tem); note; note = XEXP (note, 1))
47718334Speter	if (REG_NOTE_KIND (note) == REG_DEAD)
47818334Speter	  XEXP (note, 0) = const0_rtx;
47918334Speter    }
48018334Speter
48118334Speter  NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
48218334Speter
48318334Speter  /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
48418334Speter     last insn in that SEQUENCE to point to us.  Similarly for the first
48518334Speter     insn in the following insn if it is a SEQUENCE.  */
48618334Speter
48718334Speter  if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
48818334Speter      && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
48918334Speter    NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
49018334Speter			XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
49118334Speter      = seq_insn;
49218334Speter
49318334Speter  if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
49418334Speter      && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
49518334Speter    PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
49618334Speter
49718334Speter  /* If there used to be a BARRIER, put it back.  */
49818334Speter  if (had_barrier)
49918334Speter    emit_barrier_after (seq_insn);
50018334Speter
50118334Speter  if (i != length + 1)
50218334Speter    abort ();
50318334Speter
50418334Speter  return seq_insn;
50518334Speter}
50618334Speter
50718334Speter/* Add INSN to DELAY_LIST and return the head of the new list.  The list must
50818334Speter   be in the order in which the insns are to be executed.  */
50918334Speter
51018334Speterstatic rtx
51118334Speteradd_to_delay_list (insn, delay_list)
51218334Speter     rtx insn;
51318334Speter     rtx delay_list;
51418334Speter{
51518334Speter  /* If we have an empty list, just make a new list element.  If
51650397Sobrien     INSN has its block number recorded, clear it since we may
51718334Speter     be moving the insn to a new block.  */
51818334Speter
51918334Speter  if (delay_list == 0)
52018334Speter    {
52152284Sobrien      clear_hashed_info_for_insn (insn);
52250397Sobrien      return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
52318334Speter    }
52418334Speter
52518334Speter  /* Otherwise this must be an INSN_LIST.  Add INSN to the end of the
52618334Speter     list.  */
52718334Speter  XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
52818334Speter
52918334Speter  return delay_list;
53018334Speter}
53118334Speter
53252284Sobrien/* Delete INSN from the delay slot of the insn that it is in, which may
53352284Sobrien   produce an insn with no delay slots.  Return the new insn.  */
53418334Speter
53552284Sobrienstatic rtx
53618334Speterdelete_from_delay_slot (insn)
53718334Speter     rtx insn;
53818334Speter{
53918334Speter  rtx trial, seq_insn, seq, prev;
54018334Speter  rtx delay_list = 0;
54118334Speter  int i;
54218334Speter
54318334Speter  /* We first must find the insn containing the SEQUENCE with INSN in its
54418334Speter     delay slot.  Do this by finding an insn, TRIAL, where
54518334Speter     PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL.  */
54618334Speter
54718334Speter  for (trial = insn;
54818334Speter       PREV_INSN (NEXT_INSN (trial)) == trial;
54918334Speter       trial = NEXT_INSN (trial))
55018334Speter    ;
55118334Speter
55218334Speter  seq_insn = PREV_INSN (NEXT_INSN (trial));
55318334Speter  seq = PATTERN (seq_insn);
55418334Speter
55518334Speter  /* Create a delay list consisting of all the insns other than the one
55618334Speter     we are deleting (unless we were the only one).  */
55718334Speter  if (XVECLEN (seq, 0) > 2)
55818334Speter    for (i = 1; i < XVECLEN (seq, 0); i++)
55918334Speter      if (XVECEXP (seq, 0, i) != insn)
56018334Speter	delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
56118334Speter
56218334Speter  /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
56318334Speter     list, and rebuild the delay list if non-empty.  */
56418334Speter  prev = PREV_INSN (seq_insn);
56518334Speter  trial = XVECEXP (seq, 0, 0);
56618334Speter  delete_insn (seq_insn);
56718334Speter  add_insn_after (trial, prev);
56818334Speter
56918334Speter  if (GET_CODE (trial) == JUMP_INSN
57018334Speter      && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
57118334Speter    emit_barrier_after (trial);
57218334Speter
57318334Speter  /* If there are any delay insns, remit them.  Otherwise clear the
57418334Speter     annul flag.  */
57518334Speter  if (delay_list)
57650397Sobrien    trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
57718334Speter  else
57818334Speter    INSN_ANNULLED_BRANCH_P (trial) = 0;
57918334Speter
58018334Speter  INSN_FROM_TARGET_P (insn) = 0;
58118334Speter
58218334Speter  /* Show we need to fill this insn again.  */
58318334Speter  obstack_ptr_grow (&unfilled_slots_obstack, trial);
58452284Sobrien
58552284Sobrien  return trial;
58618334Speter}
58718334Speter
58818334Speter/* Delete INSN, a JUMP_INSN.  If it is a conditional jump, we must track down
58918334Speter   the insn that sets CC0 for it and delete it too.  */
59018334Speter
59118334Speterstatic void
59218334Speterdelete_scheduled_jump (insn)
59318334Speter     rtx insn;
59418334Speter{
59518334Speter  /* Delete the insn that sets cc0 for us.  On machines without cc0, we could
59618334Speter     delete the insn that sets the condition code, but it is hard to find it.
59718334Speter     Since this case is rare anyway, don't bother trying; there would likely
59818334Speter     be other insns that became dead anyway, which we wouldn't know to
59918334Speter     delete.  */
60018334Speter
60118334Speter#ifdef HAVE_cc0
60218334Speter  if (reg_mentioned_p (cc0_rtx, insn))
60318334Speter    {
60418334Speter      rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
60518334Speter
60618334Speter      /* If a reg-note was found, it points to an insn to set CC0.  This
60718334Speter	 insn is in the delay list of some other insn.  So delete it from
60818334Speter	 the delay list it was in.  */
60918334Speter      if (note)
61018334Speter	{
61118334Speter	  if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
61218334Speter	      && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
61318334Speter	    delete_from_delay_slot (XEXP (note, 0));
61418334Speter	}
61518334Speter      else
61618334Speter	{
61718334Speter	  /* The insn setting CC0 is our previous insn, but it may be in
61818334Speter	     a delay slot.  It will be the last insn in the delay slot, if
61918334Speter	     it is.  */
62018334Speter	  rtx trial = previous_insn (insn);
62118334Speter	  if (GET_CODE (trial) == NOTE)
62218334Speter	    trial = prev_nonnote_insn (trial);
62318334Speter	  if (sets_cc0_p (PATTERN (trial)) != 1
62418334Speter	      || FIND_REG_INC_NOTE (trial, 0))
62518334Speter	    return;
62618334Speter	  if (PREV_INSN (NEXT_INSN (trial)) == trial)
62718334Speter	    delete_insn (trial);
62818334Speter	  else
62918334Speter	    delete_from_delay_slot (trial);
63018334Speter	}
63118334Speter    }
63218334Speter#endif
63318334Speter
63418334Speter  delete_insn (insn);
63518334Speter}
63618334Speter
63718334Speter/* Counters for delay-slot filling.  */
63818334Speter
63918334Speter#define NUM_REORG_FUNCTIONS 2
64018334Speter#define MAX_DELAY_HISTOGRAM 3
64118334Speter#define MAX_REORG_PASSES 2
64218334Speter
64318334Speterstatic int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
64418334Speter
64518334Speterstatic int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
64618334Speter
64718334Speterstatic int reorg_pass_number;
64818334Speter
64918334Speterstatic void
65018334Speternote_delay_statistics (slots_filled, index)
65118334Speter     int slots_filled, index;
65218334Speter{
65318334Speter  num_insns_needing_delays[index][reorg_pass_number]++;
65418334Speter  if (slots_filled > MAX_DELAY_HISTOGRAM)
65518334Speter    slots_filled = MAX_DELAY_HISTOGRAM;
65618334Speter  num_filled_delays[index][slots_filled][reorg_pass_number]++;
65718334Speter}
65818334Speter
65918334Speter#if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
66018334Speter
66118334Speter/* Optimize the following cases:
66218334Speter
66318334Speter   1.  When a conditional branch skips over only one instruction,
66418334Speter       use an annulling branch and put that insn in the delay slot.
66518334Speter       Use either a branch that annuls when the condition if true or
66618334Speter       invert the test with a branch that annuls when the condition is
66718334Speter       false.  This saves insns, since otherwise we must copy an insn
66818334Speter       from the L1 target.
66918334Speter
67018334Speter        (orig)		 (skip)		(otherwise)
67118334Speter	Bcc.n L1	Bcc',a L1	Bcc,a L1'
67218334Speter	insn		insn		insn2
67318334Speter      L1:	      L1:	      L1:
67418334Speter	insn2		insn2		insn2
67518334Speter	insn3		insn3	      L1':
67618334Speter					insn3
67718334Speter
67818334Speter   2.  When a conditional branch skips over only one instruction,
67918334Speter       and after that, it unconditionally branches somewhere else,
68018334Speter       perform the similar optimization. This saves executing the
68118334Speter       second branch in the case where the inverted condition is true.
68218334Speter
68318334Speter	Bcc.n L1	Bcc',a L2
68418334Speter	insn		insn
68518334Speter      L1:	      L1:
68618334Speter	Bra L2		Bra L2
68718334Speter
68818334Speter   INSN is a JUMP_INSN.
68918334Speter
69018334Speter   This should be expanded to skip over N insns, where N is the number
69118334Speter   of delay slots required.  */
69218334Speter
69318334Speterstatic rtx
69418334Speteroptimize_skip (insn)
69518334Speter     register rtx insn;
69618334Speter{
69718334Speter  register rtx trial = next_nonnote_insn (insn);
69818334Speter  rtx next_trial = next_active_insn (trial);
69918334Speter  rtx delay_list = 0;
70018334Speter  rtx target_label;
70118334Speter  int flags;
70218334Speter
70318334Speter  flags = get_jump_flags (insn, JUMP_LABEL (insn));
70418334Speter
70518334Speter  if (trial == 0
70618334Speter      || GET_CODE (trial) != INSN
70718334Speter      || GET_CODE (PATTERN (trial)) == SEQUENCE
70818334Speter      || recog_memoized (trial) < 0
70918334Speter      || (! eligible_for_annul_false (insn, 0, trial, flags)
71018334Speter	  && ! eligible_for_annul_true (insn, 0, trial, flags)))
71118334Speter    return 0;
71218334Speter
71318334Speter  /* There are two cases where we are just executing one insn (we assume
71418334Speter     here that a branch requires only one insn; this should be generalized
71518334Speter     at some point):  Where the branch goes around a single insn or where
71618334Speter     we have one insn followed by a branch to the same label we branch to.
71718334Speter     In both of these cases, inverting the jump and annulling the delay
71818334Speter     slot give the same effect in fewer insns.  */
71918334Speter  if ((next_trial == next_active_insn (JUMP_LABEL (insn)))
72018334Speter      || (next_trial != 0
72118334Speter	  && GET_CODE (next_trial) == JUMP_INSN
72218334Speter	  && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
72318334Speter	  && (simplejump_p (next_trial)
72418334Speter	      || GET_CODE (PATTERN (next_trial)) == RETURN)))
72518334Speter    {
72618334Speter      if (eligible_for_annul_false (insn, 0, trial, flags))
72718334Speter	{
72818334Speter	  if (invert_jump (insn, JUMP_LABEL (insn)))
72918334Speter	    INSN_FROM_TARGET_P (trial) = 1;
73018334Speter	  else if (! eligible_for_annul_true (insn, 0, trial, flags))
73118334Speter	    return 0;
73218334Speter	}
73318334Speter
73418334Speter      delay_list = add_to_delay_list (trial, NULL_RTX);
73518334Speter      next_trial = next_active_insn (trial);
73618334Speter      update_block (trial, trial);
73718334Speter      delete_insn (trial);
73818334Speter
73918334Speter      /* Also, if we are targeting an unconditional
74018334Speter	 branch, thread our jump to the target of that branch.  Don't
74118334Speter	 change this into a RETURN here, because it may not accept what
74218334Speter	 we have in the delay slot.  We'll fix this up later.  */
74318334Speter      if (next_trial && GET_CODE (next_trial) == JUMP_INSN
74418334Speter	  && (simplejump_p (next_trial)
74518334Speter	      || GET_CODE (PATTERN (next_trial)) == RETURN))
74618334Speter	{
74718334Speter	  target_label = JUMP_LABEL (next_trial);
74818334Speter	  if (target_label == 0)
74918334Speter	    target_label = find_end_label ();
75018334Speter
75118334Speter	  /* Recompute the flags based on TARGET_LABEL since threading
75218334Speter	     the jump to TARGET_LABEL may change the direction of the
75318334Speter	     jump (which may change the circumstances in which the
75418334Speter	     delay slot is nullified).  */
75518334Speter	  flags = get_jump_flags (insn, target_label);
75618334Speter	  if (eligible_for_annul_true (insn, 0, trial, flags))
75718334Speter	    reorg_redirect_jump (insn, target_label);
75818334Speter	}
75918334Speter
76018334Speter      INSN_ANNULLED_BRANCH_P (insn) = 1;
76118334Speter    }
76218334Speter
76318334Speter  return delay_list;
76418334Speter}
76518334Speter#endif
76618334Speter
76718334Speter
76818334Speter/*  Encode and return branch direction and prediction information for
76918334Speter    INSN assuming it will jump to LABEL.
77018334Speter
77118334Speter    Non conditional branches return no direction information and
77218334Speter    are predicted as very likely taken.  */
77350397Sobrien
77418334Speterstatic int
77518334Speterget_jump_flags (insn, label)
77618334Speter     rtx insn, label;
77718334Speter{
77818334Speter  int flags;
77918334Speter
78018334Speter  /* get_jump_flags can be passed any insn with delay slots, these may
78118334Speter     be INSNs, CALL_INSNs, or JUMP_INSNs.  Only JUMP_INSNs have branch
78218334Speter     direction information, and only if they are conditional jumps.
78318334Speter
78418334Speter     If LABEL is zero, then there is no way to determine the branch
78518334Speter     direction.  */
78618334Speter  if (GET_CODE (insn) == JUMP_INSN
78718334Speter      && (condjump_p (insn) || condjump_in_parallel_p (insn))
78818334Speter      && INSN_UID (insn) <= max_uid
78918334Speter      && label != 0
79018334Speter      && INSN_UID (label) <= max_uid)
79118334Speter    flags
79218334Speter      = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
79318334Speter	 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
79418334Speter  /* No valid direction information.  */
79518334Speter  else
79618334Speter    flags = 0;
79718334Speter
79818334Speter  /* If insn is a conditional branch call mostly_true_jump to get
79918334Speter     determine the branch prediction.
80018334Speter
80118334Speter     Non conditional branches are predicted as very likely taken.  */
80218334Speter  if (GET_CODE (insn) == JUMP_INSN
80318334Speter      && (condjump_p (insn) || condjump_in_parallel_p (insn)))
80418334Speter    {
80518334Speter      int prediction;
80618334Speter
80718334Speter      prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
80818334Speter      switch (prediction)
80918334Speter	{
81018334Speter	  case 2:
81118334Speter	    flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
81218334Speter	    break;
81318334Speter	  case 1:
81418334Speter	    flags |= ATTR_FLAG_likely;
81518334Speter	    break;
81618334Speter	  case 0:
81718334Speter	    flags |= ATTR_FLAG_unlikely;
81818334Speter	    break;
81918334Speter	  case -1:
82018334Speter	    flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
82118334Speter	    break;
82218334Speter
82318334Speter	  default:
82418334Speter	    abort();
82518334Speter	}
82618334Speter    }
82718334Speter  else
82818334Speter    flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
82918334Speter
83018334Speter  return flags;
83118334Speter}
83218334Speter
83318334Speter/* Return 1 if INSN is a destination that will be branched to rarely (the
83418334Speter   return point of a function); return 2 if DEST will be branched to very
83518334Speter   rarely (a call to a function that doesn't return).  Otherwise,
83618334Speter   return 0.  */
83718334Speter
83818334Speterstatic int
83918334Speterrare_destination (insn)
84018334Speter     rtx insn;
84118334Speter{
84218334Speter  int jump_count = 0;
84318334Speter  rtx next;
84418334Speter
84518334Speter  for (; insn; insn = next)
84618334Speter    {
84718334Speter      if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
84818334Speter	insn = XVECEXP (PATTERN (insn), 0, 0);
84918334Speter
85018334Speter      next = NEXT_INSN (insn);
85118334Speter
85218334Speter      switch (GET_CODE (insn))
85318334Speter	{
85418334Speter	case CODE_LABEL:
85518334Speter	  return 0;
85618334Speter	case BARRIER:
85718334Speter	  /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN.  We
85818334Speter	     don't scan past JUMP_INSNs, so any barrier we find here must
85918334Speter	     have been after a CALL_INSN and hence mean the call doesn't
86018334Speter	     return.  */
86118334Speter	  return 2;
86218334Speter	case JUMP_INSN:
86318334Speter	  if (GET_CODE (PATTERN (insn)) == RETURN)
86418334Speter	    return 1;
86518334Speter	  else if (simplejump_p (insn)
86618334Speter		   && jump_count++ < 10)
86718334Speter	    next = JUMP_LABEL (insn);
86818334Speter	  else
86918334Speter	    return 0;
87050397Sobrien
87150397Sobrien	default:
87250397Sobrien	  break;
87318334Speter	}
87418334Speter    }
87518334Speter
87618334Speter  /* If we got here it means we hit the end of the function.  So this
87718334Speter     is an unlikely destination.  */
87818334Speter
87918334Speter  return 1;
88018334Speter}
88118334Speter
88218334Speter/* Return truth value of the statement that this branch
88318334Speter   is mostly taken.  If we think that the branch is extremely likely
88418334Speter   to be taken, we return 2.  If the branch is slightly more likely to be
88518334Speter   taken, return 1.  If the branch is slightly less likely to be taken,
88618334Speter   return 0 and if the branch is highly unlikely to be taken, return -1.
88718334Speter
88818334Speter   CONDITION, if non-zero, is the condition that JUMP_INSN is testing.  */
88918334Speter
89018334Speterstatic int
89118334Spetermostly_true_jump (jump_insn, condition)
89218334Speter     rtx jump_insn, condition;
89318334Speter{
89418334Speter  rtx target_label = JUMP_LABEL (jump_insn);
89518334Speter  rtx insn;
89618334Speter  int rare_dest = rare_destination (target_label);
89718334Speter  int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
89818334Speter
89950397Sobrien  /* If branch probabilities are available, then use that number since it
90050397Sobrien     always gives a correct answer.  */
90150397Sobrien  if (flag_branch_probabilities)
90250397Sobrien    {
90352284Sobrien      rtx note = find_reg_note (jump_insn, REG_BR_PROB, 0);
90450397Sobrien      if (note)
90550397Sobrien	{
90650397Sobrien	  int prob = XINT (note, 0);
90750397Sobrien
90850397Sobrien	  if (prob >= REG_BR_PROB_BASE * 9 / 10)
90950397Sobrien	    return 2;
91050397Sobrien	  else if (prob >= REG_BR_PROB_BASE / 2)
91150397Sobrien	    return 1;
91250397Sobrien	  else if (prob >= REG_BR_PROB_BASE / 10)
91350397Sobrien	    return 0;
91450397Sobrien	  else
91550397Sobrien	    return -1;
91650397Sobrien	}
91750397Sobrien    }
91850397Sobrien
91918334Speter  /* If this is a branch outside a loop, it is highly unlikely.  */
92018334Speter  if (GET_CODE (PATTERN (jump_insn)) == SET
92118334Speter      && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
92218334Speter      && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
92318334Speter	   && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
92418334Speter	  || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
92518334Speter	      && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
92618334Speter    return -1;
92718334Speter
92818334Speter  if (target_label)
92918334Speter    {
93018334Speter      /* If this is the test of a loop, it is very likely true.  We scan
93118334Speter	 backwards from the target label.  If we find a NOTE_INSN_LOOP_BEG
93218334Speter	 before the next real insn, we assume the branch is to the top of
93318334Speter	 the loop.  */
93418334Speter      for (insn = PREV_INSN (target_label);
93518334Speter	   insn && GET_CODE (insn) == NOTE;
93618334Speter	   insn = PREV_INSN (insn))
93718334Speter	if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
93818334Speter	  return 2;
93918334Speter
94018334Speter      /* If this is a jump to the test of a loop, it is likely true.  We scan
94118334Speter	 forwards from the target label.  If we find a NOTE_INSN_LOOP_VTOP
94218334Speter	 before the next real insn, we assume the branch is to the loop branch
94318334Speter	 test.  */
94418334Speter      for (insn = NEXT_INSN (target_label);
94518334Speter	   insn && GET_CODE (insn) == NOTE;
94618334Speter	   insn = PREV_INSN (insn))
94718334Speter	if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
94818334Speter	  return 1;
94918334Speter    }
95018334Speter
95118334Speter  /* Look at the relative rarities of the fallthrough and destination.  If
95250397Sobrien     they differ, we can predict the branch that way.  */
95318334Speter
95418334Speter  switch (rare_fallthrough - rare_dest)
95518334Speter    {
95618334Speter    case -2:
95718334Speter      return -1;
95818334Speter    case -1:
95918334Speter      return 0;
96018334Speter    case 0:
96118334Speter      break;
96218334Speter    case 1:
96318334Speter      return 1;
96418334Speter    case 2:
96518334Speter      return 2;
96618334Speter    }
96718334Speter
96818334Speter  /* If we couldn't figure out what this jump was, assume it won't be
96918334Speter     taken.  This should be rare.  */
97018334Speter  if (condition == 0)
97118334Speter    return 0;
97218334Speter
97318334Speter  /* EQ tests are usually false and NE tests are usually true.  Also,
97418334Speter     most quantities are positive, so we can make the appropriate guesses
97518334Speter     about signed comparisons against zero.  */
97618334Speter  switch (GET_CODE (condition))
97718334Speter    {
97818334Speter    case CONST_INT:
97918334Speter      /* Unconditional branch.  */
98018334Speter      return 1;
98118334Speter    case EQ:
98218334Speter      return 0;
98318334Speter    case NE:
98418334Speter      return 1;
98518334Speter    case LE:
98618334Speter    case LT:
98718334Speter      if (XEXP (condition, 1) == const0_rtx)
98818334Speter        return 0;
98918334Speter      break;
99018334Speter    case GE:
99118334Speter    case GT:
99218334Speter      if (XEXP (condition, 1) == const0_rtx)
99318334Speter	return 1;
99418334Speter      break;
99550397Sobrien
99650397Sobrien    default:
99750397Sobrien      break;
99818334Speter    }
99918334Speter
100018334Speter  /* Predict backward branches usually take, forward branches usually not.  If
100118334Speter     we don't know whether this is forward or backward, assume the branch
100218334Speter     will be taken, since most are.  */
100318334Speter  return (target_label == 0 || INSN_UID (jump_insn) > max_uid
100418334Speter	  || INSN_UID (target_label) > max_uid
100518334Speter	  || (uid_to_ruid[INSN_UID (jump_insn)]
100618334Speter	      > uid_to_ruid[INSN_UID (target_label)]));;
100718334Speter}
100818334Speter
100918334Speter/* Return the condition under which INSN will branch to TARGET.  If TARGET
101018334Speter   is zero, return the condition under which INSN will return.  If INSN is
101118334Speter   an unconditional branch, return const_true_rtx.  If INSN isn't a simple
101218334Speter   type of jump, or it doesn't go to TARGET, return 0.  */
101318334Speter
101418334Speterstatic rtx
101518334Speterget_branch_condition (insn, target)
101618334Speter     rtx insn;
101718334Speter     rtx target;
101818334Speter{
101918334Speter  rtx pat = PATTERN (insn);
102018334Speter  rtx src;
102118334Speter
102218334Speter  if (condjump_in_parallel_p (insn))
102318334Speter    pat = XVECEXP (pat, 0, 0);
102418334Speter
102518334Speter  if (GET_CODE (pat) == RETURN)
102618334Speter    return target == 0 ? const_true_rtx : 0;
102718334Speter
102818334Speter  else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
102918334Speter    return 0;
103018334Speter
103118334Speter  src = SET_SRC (pat);
103218334Speter  if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
103318334Speter    return const_true_rtx;
103418334Speter
103518334Speter  else if (GET_CODE (src) == IF_THEN_ELSE
103618334Speter	   && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
103718334Speter	       || (GET_CODE (XEXP (src, 1)) == LABEL_REF
103818334Speter		   && XEXP (XEXP (src, 1), 0) == target))
103918334Speter	   && XEXP (src, 2) == pc_rtx)
104018334Speter    return XEXP (src, 0);
104118334Speter
104218334Speter  else if (GET_CODE (src) == IF_THEN_ELSE
104318334Speter	   && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
104418334Speter	       || (GET_CODE (XEXP (src, 2)) == LABEL_REF
104518334Speter		   && XEXP (XEXP (src, 2), 0) == target))
104618334Speter	   && XEXP (src, 1) == pc_rtx)
104750397Sobrien    return gen_rtx_fmt_ee (reverse_condition (GET_CODE (XEXP (src, 0))),
104850397Sobrien			   GET_MODE (XEXP (src, 0)),
104950397Sobrien			   XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
105018334Speter
105118334Speter  return 0;
105218334Speter}
105318334Speter
105418334Speter/* Return non-zero if CONDITION is more strict than the condition of
105518334Speter   INSN, i.e., if INSN will always branch if CONDITION is true.  */
105618334Speter
105718334Speterstatic int
105818334Spetercondition_dominates_p (condition, insn)
105918334Speter     rtx condition;
106018334Speter     rtx insn;
106118334Speter{
106218334Speter  rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
106318334Speter  enum rtx_code code = GET_CODE (condition);
106418334Speter  enum rtx_code other_code;
106518334Speter
106618334Speter  if (rtx_equal_p (condition, other_condition)
106718334Speter      || other_condition == const_true_rtx)
106818334Speter    return 1;
106918334Speter
107018334Speter  else if (condition == const_true_rtx || other_condition == 0)
107118334Speter    return 0;
107218334Speter
107318334Speter  other_code = GET_CODE (other_condition);
107418334Speter  if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
107518334Speter      || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
107618334Speter      || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
107718334Speter    return 0;
107818334Speter
107918334Speter  return comparison_dominates_p (code, other_code);
108018334Speter}
108118334Speter
108218334Speter/* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
108318334Speter   any insns already in the delay slot of JUMP.  */
108418334Speter
108518334Speterstatic int
108618334Speterredirect_with_delay_slots_safe_p (jump, newlabel, seq)
108718334Speter     rtx jump, newlabel, seq;
108818334Speter{
108950397Sobrien  int flags, i;
109018334Speter  rtx pat = PATTERN (seq);
109118334Speter
109218334Speter  /* Make sure all the delay slots of this jump would still
109318334Speter     be valid after threading the jump.  If they are still
109418334Speter     valid, then return non-zero.  */
109518334Speter
109618334Speter  flags = get_jump_flags (jump, newlabel);
109718334Speter  for (i = 1; i < XVECLEN (pat, 0); i++)
109818334Speter    if (! (
109918334Speter#ifdef ANNUL_IFFALSE_SLOTS
110018334Speter	   (INSN_ANNULLED_BRANCH_P (jump)
110118334Speter	    && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
110218334Speter	   ? eligible_for_annul_false (jump, i - 1,
110318334Speter				       XVECEXP (pat, 0, i), flags) :
110418334Speter#endif
110518334Speter#ifdef ANNUL_IFTRUE_SLOTS
110618334Speter	   (INSN_ANNULLED_BRANCH_P (jump)
110718334Speter	    && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
110818334Speter	   ? eligible_for_annul_true (jump, i - 1,
110918334Speter				      XVECEXP (pat, 0, i), flags) :
111018334Speter#endif
111118334Speter	   eligible_for_delay (jump, i -1, XVECEXP (pat, 0, i), flags)))
111218334Speter      break;
111318334Speter
111418334Speter  return (i == XVECLEN (pat, 0));
111518334Speter}
111618334Speter
111718334Speter/* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
111818334Speter   any insns we wish to place in the delay slot of JUMP.  */
111918334Speter
112018334Speterstatic int
112118334Speterredirect_with_delay_list_safe_p (jump, newlabel, delay_list)
112218334Speter     rtx jump, newlabel, delay_list;
112318334Speter{
112418334Speter  int flags, i;
112518334Speter  rtx li;
112618334Speter
112718334Speter  /* Make sure all the insns in DELAY_LIST would still be
112818334Speter     valid after threading the jump.  If they are still
112918334Speter     valid, then return non-zero.  */
113018334Speter
113118334Speter  flags = get_jump_flags (jump, newlabel);
113218334Speter  for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
113318334Speter    if (! (
113418334Speter#ifdef ANNUL_IFFALSE_SLOTS
113518334Speter	   (INSN_ANNULLED_BRANCH_P (jump)
113618334Speter	    && INSN_FROM_TARGET_P (XEXP (li, 0)))
113718334Speter	   ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
113818334Speter#endif
113918334Speter#ifdef ANNUL_IFTRUE_SLOTS
114018334Speter	   (INSN_ANNULLED_BRANCH_P (jump)
114118334Speter	    && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
114218334Speter	   ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
114318334Speter#endif
114418334Speter	   eligible_for_delay (jump, i, XEXP (li, 0), flags)))
114518334Speter      break;
114618334Speter
114718334Speter  return (li == NULL);
114818334Speter}
114918334Speter
115052284Sobrien/* DELAY_LIST is a list of insns that have already been placed into delay
115152284Sobrien   slots.  See if all of them have the same annulling status as ANNUL_TRUE_P.
115252284Sobrien   If not, return 0; otherwise return 1.  */
115352284Sobrien
115452284Sobrienstatic int
115552284Sobriencheck_annul_list_true_false (annul_true_p, delay_list)
115652284Sobrien     int annul_true_p;
115752284Sobrien     rtx delay_list;
115852284Sobrien{
115952284Sobrien  rtx temp;
116052284Sobrien
116152284Sobrien  if (delay_list)
116252284Sobrien    {
116352284Sobrien      for (temp = delay_list; temp; temp = XEXP (temp, 1))
116452284Sobrien        {
116552284Sobrien          rtx trial = XEXP (temp, 0);
116652284Sobrien
116752284Sobrien          if ((annul_true_p && INSN_FROM_TARGET_P (trial))
116852284Sobrien	      || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
116952284Sobrien	    return 0;
117052284Sobrien        }
117152284Sobrien    }
117252284Sobrien
117352284Sobrien  return 1;
117452284Sobrien}
117552284Sobrien
117618334Speter
117718334Speter/* INSN branches to an insn whose pattern SEQ is a SEQUENCE.  Given that
117818334Speter   the condition tested by INSN is CONDITION and the resources shown in
117918334Speter   OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
118018334Speter   from SEQ's delay list, in addition to whatever insns it may execute
118118334Speter   (in DELAY_LIST).   SETS and NEEDED are denote resources already set and
118218334Speter   needed while searching for delay slot insns.  Return the concatenated
118318334Speter   delay list if possible, otherwise, return 0.
118418334Speter
118518334Speter   SLOTS_TO_FILL is the total number of slots required by INSN, and
118618334Speter   PSLOTS_FILLED points to the number filled so far (also the number of
118718334Speter   insns in DELAY_LIST).  It is updated with the number that have been
118818334Speter   filled from the SEQUENCE, if any.
118918334Speter
119018334Speter   PANNUL_P points to a non-zero value if we already know that we need
119118334Speter   to annul INSN.  If this routine determines that annulling is needed,
119218334Speter   it may set that value non-zero.
119318334Speter
119418334Speter   PNEW_THREAD points to a location that is to receive the place at which
119518334Speter   execution should continue.  */
119618334Speter
119718334Speterstatic rtx
119818334Spetersteal_delay_list_from_target (insn, condition, seq, delay_list,
119918334Speter			      sets, needed, other_needed,
120018334Speter			      slots_to_fill, pslots_filled, pannul_p,
120118334Speter			      pnew_thread)
120218334Speter     rtx insn, condition;
120318334Speter     rtx seq;
120418334Speter     rtx delay_list;
120518334Speter     struct resources *sets, *needed, *other_needed;
120618334Speter     int slots_to_fill;
120718334Speter     int *pslots_filled;
120818334Speter     int *pannul_p;
120918334Speter     rtx *pnew_thread;
121018334Speter{
121118334Speter  rtx temp;
121218334Speter  int slots_remaining = slots_to_fill - *pslots_filled;
121318334Speter  int total_slots_filled = *pslots_filled;
121418334Speter  rtx new_delay_list = 0;
121518334Speter  int must_annul = *pannul_p;
121652284Sobrien  int used_annul = 0;
121718334Speter  int i;
121852284Sobrien  struct resources cc_set;
121918334Speter
122018334Speter  /* We can't do anything if there are more delay slots in SEQ than we
122118334Speter     can handle, or if we don't know that it will be a taken branch.
122218334Speter     We know that it will be a taken branch if it is either an unconditional
122318334Speter     branch or a conditional branch with a stricter branch condition.
122418334Speter
122518334Speter     Also, exit if the branch has more than one set, since then it is computing
122618334Speter     other results that can't be ignored, e.g. the HPPA mov&branch instruction.
122718334Speter     ??? It may be possible to move other sets into INSN in addition to
122852284Sobrien     moving the instructions in the delay slots.
122918334Speter
123052284Sobrien     We can not steal the delay list if one of the instructions in the
123152284Sobrien     current delay_list modifies the condition codes and the jump in the
123252284Sobrien     sequence is a conditional jump. We can not do this because we can
123352284Sobrien     not change the direction of the jump because the condition codes
123452284Sobrien     will effect the direction of the jump in the sequence. */
123552284Sobrien
123652284Sobrien  CLEAR_RESOURCE (&cc_set);
123752284Sobrien  for (temp = delay_list; temp; temp = XEXP (temp, 1))
123852284Sobrien    {
123952284Sobrien      rtx trial = XEXP (temp, 0);
124052284Sobrien
124152284Sobrien      mark_set_resources (trial, &cc_set, 0, 1);
124252284Sobrien      if (insn_references_resource_p (XVECEXP (seq , 0, 0), &cc_set, 0))
124352284Sobrien        return delay_list;
124452284Sobrien    }
124552284Sobrien
124618334Speter  if (XVECLEN (seq, 0) - 1 > slots_remaining
124718334Speter      || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
124818334Speter      || ! single_set (XVECEXP (seq, 0, 0)))
124918334Speter    return delay_list;
125018334Speter
125118334Speter  for (i = 1; i < XVECLEN (seq, 0); i++)
125218334Speter    {
125318334Speter      rtx trial = XVECEXP (seq, 0, i);
125418334Speter      int flags;
125518334Speter
125618334Speter      if (insn_references_resource_p (trial, sets, 0)
125718334Speter	  || insn_sets_resource_p (trial, needed, 0)
125818334Speter	  || insn_sets_resource_p (trial, sets, 0)
125918334Speter#ifdef HAVE_cc0
126018334Speter	  /* If TRIAL sets CC0, we can't copy it, so we can't steal this
126118334Speter	     delay list.  */
126218334Speter	  || find_reg_note (trial, REG_CC_USER, NULL_RTX)
126318334Speter#endif
126418334Speter	  /* If TRIAL is from the fallthrough code of an annulled branch insn
126518334Speter	     in SEQ, we cannot use it.  */
126618334Speter	  || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
126718334Speter	      && ! INSN_FROM_TARGET_P (trial)))
126818334Speter	return delay_list;
126918334Speter
127018334Speter      /* If this insn was already done (usually in a previous delay slot),
127118334Speter	 pretend we put it in our delay slot.  */
127218334Speter      if (redundant_insn (trial, insn, new_delay_list))
127318334Speter	continue;
127418334Speter
127518334Speter      /* We will end up re-vectoring this branch, so compute flags
127618334Speter	 based on jumping to the new label.  */
127718334Speter      flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
127818334Speter
127918334Speter      if (! must_annul
128018334Speter	  && ((condition == const_true_rtx
128118334Speter	       || (! insn_sets_resource_p (trial, other_needed, 0)
128218334Speter		   && ! may_trap_p (PATTERN (trial)))))
128318334Speter	  ? eligible_for_delay (insn, total_slots_filled, trial, flags)
128452284Sobrien	  : (must_annul || (delay_list == NULL && new_delay_list == NULL))
128552284Sobrien	     && (must_annul = 1,
128652284Sobrien	         check_annul_list_true_false (0, delay_list)
128752284Sobrien	         && check_annul_list_true_false (0, new_delay_list)
128852284Sobrien	         && eligible_for_annul_false (insn, total_slots_filled,
128952284Sobrien					      trial, flags)))
129018334Speter	{
129152284Sobrien	  if (must_annul)
129252284Sobrien	    used_annul = 1;
129318334Speter	  temp = copy_rtx (trial);
129418334Speter	  INSN_FROM_TARGET_P (temp) = 1;
129518334Speter	  new_delay_list = add_to_delay_list (temp, new_delay_list);
129618334Speter	  total_slots_filled++;
129718334Speter
129818334Speter	  if (--slots_remaining == 0)
129918334Speter	    break;
130018334Speter	}
130118334Speter      else
130218334Speter	return delay_list;
130318334Speter    }
130418334Speter
130518334Speter  /* Show the place to which we will be branching.  */
130618334Speter  *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
130718334Speter
130818334Speter  /* Add any new insns to the delay list and update the count of the
130918334Speter     number of slots filled.  */
131018334Speter  *pslots_filled = total_slots_filled;
131152284Sobrien  if (used_annul)
131252284Sobrien    *pannul_p = 1;
131318334Speter
131418334Speter  if (delay_list == 0)
131518334Speter    return new_delay_list;
131618334Speter
131718334Speter  for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
131818334Speter    delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
131918334Speter
132018334Speter  return delay_list;
132118334Speter}
132218334Speter
132318334Speter/* Similar to steal_delay_list_from_target except that SEQ is on the
132418334Speter   fallthrough path of INSN.  Here we only do something if the delay insn
132518334Speter   of SEQ is an unconditional branch.  In that case we steal its delay slot
132618334Speter   for INSN since unconditional branches are much easier to fill.  */
132718334Speter
132818334Speterstatic rtx
132918334Spetersteal_delay_list_from_fallthrough (insn, condition, seq,
133018334Speter				   delay_list, sets, needed, other_needed,
133118334Speter				   slots_to_fill, pslots_filled, pannul_p)
133218334Speter     rtx insn, condition;
133318334Speter     rtx seq;
133418334Speter     rtx delay_list;
133518334Speter     struct resources *sets, *needed, *other_needed;
133618334Speter     int slots_to_fill;
133718334Speter     int *pslots_filled;
133818334Speter     int *pannul_p;
133918334Speter{
134018334Speter  int i;
134118334Speter  int flags;
134252284Sobrien  int must_annul = *pannul_p;
134352284Sobrien  int used_annul = 0;
134418334Speter
134518334Speter  flags = get_jump_flags (insn, JUMP_LABEL (insn));
134618334Speter
134718334Speter  /* We can't do anything if SEQ's delay insn isn't an
134818334Speter     unconditional branch.  */
134918334Speter
135018334Speter  if (! simplejump_p (XVECEXP (seq, 0, 0))
135118334Speter      && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
135218334Speter    return delay_list;
135318334Speter
135418334Speter  for (i = 1; i < XVECLEN (seq, 0); i++)
135518334Speter    {
135618334Speter      rtx trial = XVECEXP (seq, 0, i);
135718334Speter
135818334Speter      /* If TRIAL sets CC0, stealing it will move it too far from the use
135918334Speter	 of CC0.  */
136018334Speter      if (insn_references_resource_p (trial, sets, 0)
136118334Speter	  || insn_sets_resource_p (trial, needed, 0)
136218334Speter	  || insn_sets_resource_p (trial, sets, 0)
136318334Speter#ifdef HAVE_cc0
136418334Speter	  || sets_cc0_p (PATTERN (trial))
136518334Speter#endif
136618334Speter	  )
136718334Speter
136818334Speter	break;
136918334Speter
137018334Speter      /* If this insn was already done, we don't need it.  */
137118334Speter      if (redundant_insn (trial, insn, delay_list))
137218334Speter	{
137318334Speter	  delete_from_delay_slot (trial);
137418334Speter	  continue;
137518334Speter	}
137618334Speter
137752284Sobrien      if (! must_annul
137818334Speter	  && ((condition == const_true_rtx
137918334Speter	       || (! insn_sets_resource_p (trial, other_needed, 0)
138018334Speter		   && ! may_trap_p (PATTERN (trial)))))
138118334Speter	  ? eligible_for_delay (insn, *pslots_filled, trial, flags)
138252284Sobrien	  : (must_annul || delay_list == NULL) && (must_annul = 1,
138352284Sobrien	     check_annul_list_true_false (1, delay_list)
138452284Sobrien	     && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
138518334Speter	{
138652284Sobrien	  if (must_annul)
138752284Sobrien	    used_annul = 1;
138818334Speter	  delete_from_delay_slot (trial);
138918334Speter	  delay_list = add_to_delay_list (trial, delay_list);
139018334Speter
139118334Speter	  if (++(*pslots_filled) == slots_to_fill)
139218334Speter	    break;
139318334Speter	}
139418334Speter      else
139518334Speter	break;
139618334Speter    }
139718334Speter
139852284Sobrien  if (used_annul)
139952284Sobrien    *pannul_p = 1;
140018334Speter  return delay_list;
140118334Speter}
140252284Sobrien
140318334Speter
140418334Speter/* Try merging insns starting at THREAD which match exactly the insns in
140518334Speter   INSN's delay list.
140618334Speter
140718334Speter   If all insns were matched and the insn was previously annulling, the
140818334Speter   annul bit will be cleared.
140918334Speter
141018334Speter   For each insn that is merged, if the branch is or will be non-annulling,
141118334Speter   we delete the merged insn.  */
141218334Speter
141318334Speterstatic void
141418334Spetertry_merge_delay_insns (insn, thread)
141518334Speter     rtx insn, thread;
141618334Speter{
141718334Speter  rtx trial, next_trial;
141818334Speter  rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
141918334Speter  int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
142018334Speter  int slot_number = 1;
142118334Speter  int num_slots = XVECLEN (PATTERN (insn), 0);
142218334Speter  rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
142318334Speter  struct resources set, needed;
142418334Speter  rtx merged_insns = 0;
142518334Speter  int i;
142618334Speter  int flags;
142718334Speter
142818334Speter  flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
142918334Speter
143018334Speter  CLEAR_RESOURCE (&needed);
143118334Speter  CLEAR_RESOURCE (&set);
143218334Speter
143318334Speter  /* If this is not an annulling branch, take into account anything needed in
143452284Sobrien     INSN's delay slot.  This prevents two increments from being incorrectly
143518334Speter     folded into one.  If we are annulling, this would be the correct
143618334Speter     thing to do.  (The alternative, looking at things set in NEXT_TO_MATCH
143718334Speter     will essentially disable this optimization.  This method is somewhat of
143818334Speter     a kludge, but I don't see a better way.)  */
143918334Speter  if (! annul_p)
144052284Sobrien    for (i = 1 ; i < num_slots ; i++)
144152284Sobrien      if (XVECEXP (PATTERN (insn), 0, i))
144252284Sobrien        mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed, 1);
144318334Speter
144418334Speter  for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
144518334Speter    {
144618334Speter      rtx pat = PATTERN (trial);
144718334Speter      rtx oldtrial = trial;
144818334Speter
144918334Speter      next_trial = next_nonnote_insn (trial);
145018334Speter
145118334Speter      /* TRIAL must be a CALL_INSN or INSN.  Skip USE and CLOBBER.  */
145218334Speter      if (GET_CODE (trial) == INSN
145318334Speter	  && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
145418334Speter	continue;
145518334Speter
145618334Speter      if (GET_CODE (next_to_match) == GET_CODE (trial)
145718334Speter#ifdef HAVE_cc0
145818334Speter	  /* We can't share an insn that sets cc0.  */
145918334Speter	  && ! sets_cc0_p (pat)
146018334Speter#endif
146118334Speter	  && ! insn_references_resource_p (trial, &set, 1)
146218334Speter	  && ! insn_sets_resource_p (trial, &set, 1)
146318334Speter	  && ! insn_sets_resource_p (trial, &needed, 1)
146418334Speter	  && (trial = try_split (pat, trial, 0)) != 0
146518334Speter	  /* Update next_trial, in case try_split succeeded.  */
146618334Speter	  && (next_trial = next_nonnote_insn (trial))
146718334Speter	  /* Likewise THREAD.  */
146818334Speter	  && (thread = oldtrial == thread ? trial : thread)
146918334Speter	  && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
147018334Speter	  /* Have to test this condition if annul condition is different
147118334Speter	     from (and less restrictive than) non-annulling one.  */
147218334Speter	  && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
147318334Speter	{
147418334Speter
147518334Speter	  if (! annul_p)
147618334Speter	    {
147718334Speter	      update_block (trial, thread);
147818334Speter	      if (trial == thread)
147918334Speter		thread = next_active_insn (thread);
148018334Speter
148118334Speter	      delete_insn (trial);
148218334Speter	      INSN_FROM_TARGET_P (next_to_match) = 0;
148318334Speter	    }
148418334Speter	  else
148550397Sobrien	    merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
148618334Speter
148718334Speter	  if (++slot_number == num_slots)
148818334Speter	    break;
148918334Speter
149018334Speter	  next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
149118334Speter	}
149218334Speter
149318334Speter      mark_set_resources (trial, &set, 0, 1);
149418334Speter      mark_referenced_resources (trial, &needed, 1);
149518334Speter    }
149618334Speter
149718334Speter  /* See if we stopped on a filled insn.  If we did, try to see if its
149818334Speter     delay slots match.  */
149918334Speter  if (slot_number != num_slots
150018334Speter      && trial && GET_CODE (trial) == INSN
150118334Speter      && GET_CODE (PATTERN (trial)) == SEQUENCE
150218334Speter      && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
150318334Speter    {
150418334Speter      rtx pat = PATTERN (trial);
150518334Speter      rtx filled_insn = XVECEXP (pat, 0, 0);
150618334Speter
150718334Speter      /* Account for resources set/needed by the filled insn.  */
150818334Speter      mark_set_resources (filled_insn, &set, 0, 1);
150918334Speter      mark_referenced_resources (filled_insn, &needed, 1);
151018334Speter
151118334Speter      for (i = 1; i < XVECLEN (pat, 0); i++)
151218334Speter	{
151318334Speter	  rtx dtrial = XVECEXP (pat, 0, i);
151418334Speter
151518334Speter	  if (! insn_references_resource_p (dtrial, &set, 1)
151618334Speter	      && ! insn_sets_resource_p (dtrial, &set, 1)
151718334Speter	      && ! insn_sets_resource_p (dtrial, &needed, 1)
151818334Speter#ifdef HAVE_cc0
151918334Speter	      && ! sets_cc0_p (PATTERN (dtrial))
152018334Speter#endif
152118334Speter	      && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
152218334Speter	      && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
152318334Speter	    {
152418334Speter	      if (! annul_p)
152518334Speter		{
152652284Sobrien		  rtx new;
152752284Sobrien
152818334Speter		  update_block (dtrial, thread);
152952284Sobrien		  new = delete_from_delay_slot (dtrial);
153052284Sobrien	          if (INSN_DELETED_P (thread))
153152284Sobrien		    thread = new;
153218334Speter		  INSN_FROM_TARGET_P (next_to_match) = 0;
153318334Speter		}
153418334Speter	      else
153550397Sobrien		merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
153650397Sobrien						  merged_insns);
153718334Speter
153818334Speter	      if (++slot_number == num_slots)
153918334Speter		break;
154018334Speter
154118334Speter	      next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
154218334Speter	    }
154352284Sobrien	  else
154452284Sobrien	    {
154552284Sobrien	      /* Keep track of the set/referenced resources for the delay
154652284Sobrien		 slots of any trial insns we encounter.  */
154752284Sobrien              mark_set_resources (dtrial, &set, 0, 1);
154852284Sobrien              mark_referenced_resources (dtrial, &needed, 1);
154952284Sobrien	    }
155018334Speter	}
155118334Speter    }
155218334Speter
155318334Speter  /* If all insns in the delay slot have been matched and we were previously
155418334Speter     annulling the branch, we need not any more.  In that case delete all the
155550397Sobrien     merged insns.  Also clear the INSN_FROM_TARGET_P bit of each insn in
155618334Speter     the delay list so that we know that it isn't only being used at the
155718334Speter     target.  */
155818334Speter  if (slot_number == num_slots && annul_p)
155918334Speter    {
156018334Speter      for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
156118334Speter	{
156218334Speter	  if (GET_MODE (merged_insns) == SImode)
156318334Speter	    {
156452284Sobrien	      rtx new;
156552284Sobrien
156618334Speter	      update_block (XEXP (merged_insns, 0), thread);
156752284Sobrien	      new = delete_from_delay_slot (XEXP (merged_insns, 0));
156852284Sobrien	      if (INSN_DELETED_P (thread))
156952284Sobrien		thread = new;
157018334Speter	    }
157118334Speter	  else
157218334Speter	    {
157318334Speter	      update_block (XEXP (merged_insns, 0), thread);
157418334Speter	      delete_insn (XEXP (merged_insns, 0));
157518334Speter	    }
157618334Speter	}
157718334Speter
157818334Speter      INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
157918334Speter
158018334Speter      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
158118334Speter	INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
158218334Speter    }
158318334Speter}
158418334Speter
158518334Speter/* See if INSN is redundant with an insn in front of TARGET.  Often this
158618334Speter   is called when INSN is a candidate for a delay slot of TARGET.
158718334Speter   DELAY_LIST are insns that will be placed in delay slots of TARGET in front
158818334Speter   of INSN.  Often INSN will be redundant with an insn in a delay slot of
158918334Speter   some previous insn.  This happens when we have a series of branches to the
159018334Speter   same label; in that case the first insn at the target might want to go
159118334Speter   into each of the delay slots.
159218334Speter
159318334Speter   If we are not careful, this routine can take up a significant fraction
159418334Speter   of the total compilation time (4%), but only wins rarely.  Hence we
159518334Speter   speed this routine up by making two passes.  The first pass goes back
159618334Speter   until it hits a label and sees if it find an insn with an identical
159718334Speter   pattern.  Only in this (relatively rare) event does it check for
159818334Speter   data conflicts.
159918334Speter
160018334Speter   We do not split insns we encounter.  This could cause us not to find a
160118334Speter   redundant insn, but the cost of splitting seems greater than the possible
160218334Speter   gain in rare cases.  */
160318334Speter
160418334Speterstatic rtx
160518334Speterredundant_insn (insn, target, delay_list)
160618334Speter     rtx insn;
160718334Speter     rtx target;
160818334Speter     rtx delay_list;
160918334Speter{
161018334Speter  rtx target_main = target;
161118334Speter  rtx ipat = PATTERN (insn);
161218334Speter  rtx trial, pat;
161318334Speter  struct resources needed, set;
161418334Speter  int i;
161518334Speter
161650397Sobrien  /* If INSN has any REG_UNUSED notes, it can't match anything since we
161750397Sobrien     are allowed to not actually assign to such a register.  */
161850397Sobrien  if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
161950397Sobrien    return 0;
162050397Sobrien
162118334Speter  /* Scan backwards looking for a match.  */
162218334Speter  for (trial = PREV_INSN (target); trial; trial = PREV_INSN (trial))
162318334Speter    {
162418334Speter      if (GET_CODE (trial) == CODE_LABEL)
162518334Speter	return 0;
162618334Speter
162718334Speter      if (GET_RTX_CLASS (GET_CODE (trial)) != 'i')
162818334Speter	continue;
162918334Speter
163018334Speter      pat = PATTERN (trial);
163118334Speter      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
163218334Speter	continue;
163318334Speter
163418334Speter      if (GET_CODE (pat) == SEQUENCE)
163518334Speter	{
163618334Speter	  /* Stop for a CALL and its delay slots because it is difficult to
163718334Speter	     track its resource needs correctly.  */
163818334Speter	  if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
163918334Speter	    return 0;
164018334Speter
164118334Speter	  /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
164218334Speter	     slots because it is difficult to track its resource needs
164318334Speter	     correctly.  */
164418334Speter
164518334Speter#ifdef INSN_SETS_ARE_DELAYED
164618334Speter	  if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
164718334Speter	    return 0;
164818334Speter#endif
164918334Speter
165018334Speter#ifdef INSN_REFERENCES_ARE_DELAYED
165118334Speter	  if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
165218334Speter	    return 0;
165318334Speter#endif
165418334Speter
165518334Speter	  /* See if any of the insns in the delay slot match, updating
165618334Speter	     resource requirements as we go.  */
165718334Speter	  for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
165818334Speter	    if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
165950397Sobrien		&& rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
166050397Sobrien		&& ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
166118334Speter	      break;
166218334Speter
166318334Speter	  /* If found a match, exit this loop early.  */
166418334Speter	  if (i > 0)
166518334Speter	    break;
166618334Speter	}
166718334Speter
166850397Sobrien      else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
166950397Sobrien	       && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
167018334Speter	break;
167118334Speter    }
167218334Speter
167318334Speter  /* If we didn't find an insn that matches, return 0.  */
167418334Speter  if (trial == 0)
167518334Speter    return 0;
167618334Speter
167718334Speter  /* See what resources this insn sets and needs.  If they overlap, or
167818334Speter     if this insn references CC0, it can't be redundant.  */
167918334Speter
168018334Speter  CLEAR_RESOURCE (&needed);
168118334Speter  CLEAR_RESOURCE (&set);
168218334Speter  mark_set_resources (insn, &set, 0, 1);
168318334Speter  mark_referenced_resources (insn, &needed, 1);
168418334Speter
168518334Speter  /* If TARGET is a SEQUENCE, get the main insn.  */
168618334Speter  if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
168718334Speter    target_main = XVECEXP (PATTERN (target), 0, 0);
168818334Speter
168918334Speter  if (resource_conflicts_p (&needed, &set)
169018334Speter#ifdef HAVE_cc0
169118334Speter      || reg_mentioned_p (cc0_rtx, ipat)
169218334Speter#endif
169318334Speter      /* The insn requiring the delay may not set anything needed or set by
169418334Speter	 INSN.  */
169518334Speter      || insn_sets_resource_p (target_main, &needed, 1)
169618334Speter      || insn_sets_resource_p (target_main, &set, 1))
169718334Speter    return 0;
169818334Speter
169918334Speter  /* Insns we pass may not set either NEEDED or SET, so merge them for
170018334Speter     simpler tests.  */
170118334Speter  needed.memory |= set.memory;
170218334Speter  needed.unch_memory |= set.unch_memory;
170318334Speter  IOR_HARD_REG_SET (needed.regs, set.regs);
170418334Speter
170518334Speter  /* This insn isn't redundant if it conflicts with an insn that either is
170618334Speter     or will be in a delay slot of TARGET.  */
170718334Speter
170818334Speter  while (delay_list)
170918334Speter    {
171018334Speter      if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
171118334Speter	return 0;
171218334Speter      delay_list = XEXP (delay_list, 1);
171318334Speter    }
171418334Speter
171518334Speter  if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
171618334Speter    for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
171718334Speter      if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
171818334Speter	return 0;
171918334Speter
172018334Speter  /* Scan backwards until we reach a label or an insn that uses something
172118334Speter     INSN sets or sets something insn uses or sets.  */
172218334Speter
172318334Speter  for (trial = PREV_INSN (target);
172418334Speter       trial && GET_CODE (trial) != CODE_LABEL;
172518334Speter       trial = PREV_INSN (trial))
172618334Speter    {
172718334Speter      if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
172818334Speter	  && GET_CODE (trial) != JUMP_INSN)
172918334Speter	continue;
173018334Speter
173118334Speter      pat = PATTERN (trial);
173218334Speter      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
173318334Speter	continue;
173418334Speter
173518334Speter      if (GET_CODE (pat) == SEQUENCE)
173618334Speter	{
173718334Speter	  /* If this is a CALL_INSN and its delay slots, it is hard to track
173818334Speter	     the resource needs properly, so give up.  */
173918334Speter	  if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
174018334Speter	    return 0;
174118334Speter
174250397Sobrien	  /* If this is an INSN or JUMP_INSN with delayed effects, it
174318334Speter	     is hard to track the resource needs properly, so give up.  */
174418334Speter
174518334Speter#ifdef INSN_SETS_ARE_DELAYED
174618334Speter	  if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
174718334Speter	    return 0;
174818334Speter#endif
174918334Speter
175018334Speter#ifdef INSN_REFERENCES_ARE_DELAYED
175118334Speter	  if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
175218334Speter	    return 0;
175318334Speter#endif
175418334Speter
175518334Speter	  /* See if any of the insns in the delay slot match, updating
175618334Speter	     resource requirements as we go.  */
175718334Speter	  for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
175818334Speter	    {
175918334Speter	      rtx candidate = XVECEXP (pat, 0, i);
176018334Speter
176118334Speter	      /* If an insn will be annulled if the branch is false, it isn't
176218334Speter		 considered as a possible duplicate insn.  */
176318334Speter	      if (rtx_equal_p (PATTERN (candidate), ipat)
176418334Speter		  && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
176518334Speter			&& INSN_FROM_TARGET_P (candidate)))
176618334Speter		{
176718334Speter		  /* Show that this insn will be used in the sequel.  */
176818334Speter		  INSN_FROM_TARGET_P (candidate) = 0;
176918334Speter		  return candidate;
177018334Speter		}
177118334Speter
177218334Speter	      /* Unless this is an annulled insn from the target of a branch,
177318334Speter		 we must stop if it sets anything needed or set by INSN.  */
177418334Speter	      if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
177518334Speter		   || ! INSN_FROM_TARGET_P (candidate))
177618334Speter		  && insn_sets_resource_p (candidate, &needed, 1))
177718334Speter		return 0;
177818334Speter	    }
177918334Speter
178018334Speter
178118334Speter	  /* If the insn requiring the delay slot conflicts with INSN, we
178218334Speter	     must stop.  */
178318334Speter	  if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
178418334Speter	    return 0;
178518334Speter	}
178618334Speter      else
178718334Speter	{
178818334Speter	  /* See if TRIAL is the same as INSN.  */
178918334Speter	  pat = PATTERN (trial);
179018334Speter	  if (rtx_equal_p (pat, ipat))
179118334Speter	    return trial;
179218334Speter
179318334Speter	  /* Can't go any further if TRIAL conflicts with INSN.  */
179418334Speter	  if (insn_sets_resource_p (trial, &needed, 1))
179518334Speter	    return 0;
179618334Speter	}
179718334Speter    }
179818334Speter
179918334Speter  return 0;
180018334Speter}
180118334Speter
180218334Speter/* Return 1 if THREAD can only be executed in one way.  If LABEL is non-zero,
180318334Speter   it is the target of the branch insn being scanned.  If ALLOW_FALLTHROUGH
180418334Speter   is non-zero, we are allowed to fall into this thread; otherwise, we are
180518334Speter   not.
180618334Speter
180718334Speter   If LABEL is used more than one or we pass a label other than LABEL before
180818334Speter   finding an active insn, we do not own this thread.  */
180918334Speter
181018334Speterstatic int
181118334Speterown_thread_p (thread, label, allow_fallthrough)
181218334Speter     rtx thread;
181318334Speter     rtx label;
181418334Speter     int allow_fallthrough;
181518334Speter{
181618334Speter  rtx active_insn;
181718334Speter  rtx insn;
181818334Speter
181918334Speter  /* We don't own the function end.  */
182018334Speter  if (thread == 0)
182118334Speter    return 0;
182218334Speter
182318334Speter  /* Get the first active insn, or THREAD, if it is an active insn.  */
182418334Speter  active_insn = next_active_insn (PREV_INSN (thread));
182518334Speter
182618334Speter  for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
182718334Speter    if (GET_CODE (insn) == CODE_LABEL
182818334Speter	&& (insn != label || LABEL_NUSES (insn) != 1))
182918334Speter      return 0;
183018334Speter
183118334Speter  if (allow_fallthrough)
183218334Speter    return 1;
183318334Speter
183418334Speter  /* Ensure that we reach a BARRIER before any insn or label.  */
183518334Speter  for (insn = prev_nonnote_insn (thread);
183618334Speter       insn == 0 || GET_CODE (insn) != BARRIER;
183718334Speter       insn = prev_nonnote_insn (insn))
183818334Speter    if (insn == 0
183918334Speter	|| GET_CODE (insn) == CODE_LABEL
184018334Speter	|| (GET_CODE (insn) == INSN
184118334Speter	    && GET_CODE (PATTERN (insn)) != USE
184218334Speter	    && GET_CODE (PATTERN (insn)) != CLOBBER))
184318334Speter      return 0;
184418334Speter
184518334Speter  return 1;
184618334Speter}
184718334Speter
184818334Speter/* Called when INSN is being moved from a location near the target of a jump.
184918334Speter   We leave a marker of the form (use (INSN)) immediately in front
185018334Speter   of WHERE for mark_target_live_regs.  These markers will be deleted when
185118334Speter   reorg finishes.
185218334Speter
185318334Speter   We used to try to update the live status of registers if WHERE is at
185418334Speter   the start of a basic block, but that can't work since we may remove a
185518334Speter   BARRIER in relax_delay_slots.  */
185618334Speter
185718334Speterstatic void
185818334Speterupdate_block (insn, where)
185918334Speter     rtx insn;
186018334Speter     rtx where;
186118334Speter{
186218334Speter  /* Ignore if this was in a delay slot and it came from the target of
186318334Speter     a branch.  */
186418334Speter  if (INSN_FROM_TARGET_P (insn))
186518334Speter    return;
186618334Speter
186750397Sobrien  emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
186818334Speter
186918334Speter  /* INSN might be making a value live in a block where it didn't use to
187018334Speter     be.  So recompute liveness information for this block.  */
187118334Speter
187252284Sobrien  incr_ticks_for_insn (insn);
187318334Speter}
187418334Speter
187518334Speter/* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
187618334Speter   the basic block containing the jump.  */
187718334Speter
187818334Speterstatic int
187918334Speterreorg_redirect_jump (jump, nlabel)
188018334Speter     rtx jump;
188118334Speter     rtx nlabel;
188218334Speter{
188352284Sobrien  incr_ticks_for_insn (jump);
188418334Speter  return redirect_jump (jump, nlabel);
188518334Speter}
188618334Speter
188718334Speter/* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
188818334Speter   We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
188918334Speter   that reference values used in INSN.  If we find one, then we move the
189018334Speter   REG_DEAD note to INSN.
189118334Speter
189218334Speter   This is needed to handle the case where an later insn (after INSN) has a
189318334Speter   REG_DEAD note for a register used by INSN, and this later insn subsequently
189418334Speter   gets moved before a CODE_LABEL because it is a redundant insn.  In this
189518334Speter   case, mark_target_live_regs may be confused into thinking the register
189618334Speter   is dead because it sees a REG_DEAD note immediately before a CODE_LABEL.  */
189718334Speter
189818334Speterstatic void
189918334Speterupdate_reg_dead_notes (insn, delayed_insn)
190018334Speter     rtx insn, delayed_insn;
190118334Speter{
190218334Speter  rtx p, link, next;
190318334Speter
190418334Speter  for (p = next_nonnote_insn (insn); p != delayed_insn;
190518334Speter       p = next_nonnote_insn (p))
190618334Speter    for (link = REG_NOTES (p); link; link = next)
190718334Speter      {
190818334Speter	next = XEXP (link, 1);
190918334Speter
191018334Speter	if (REG_NOTE_KIND (link) != REG_DEAD
191118334Speter	    || GET_CODE (XEXP (link, 0)) != REG)
191218334Speter	  continue;
191318334Speter
191418334Speter	if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
191518334Speter	  {
191618334Speter	    /* Move the REG_DEAD note from P to INSN.  */
191718334Speter	    remove_note (p, link);
191818334Speter	    XEXP (link, 1) = REG_NOTES (insn);
191918334Speter	    REG_NOTES (insn) = link;
192018334Speter	  }
192118334Speter      }
192218334Speter}
192318334Speter
192450397Sobrien/* Called when an insn redundant with start_insn is deleted.  If there
192550397Sobrien   is a REG_DEAD note for the target of start_insn between start_insn
192650397Sobrien   and stop_insn, then the REG_DEAD note needs to be deleted since the
192750397Sobrien   value no longer dies there.
192850397Sobrien
192950397Sobrien   If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
193050397Sobrien   confused into thinking the register is dead.  */
193150397Sobrien
193250397Sobrienstatic void
193350397Sobrienfix_reg_dead_note (start_insn, stop_insn)
193450397Sobrien     rtx start_insn, stop_insn;
193550397Sobrien{
193650397Sobrien  rtx p, link, next;
193750397Sobrien
193850397Sobrien  for (p = next_nonnote_insn (start_insn); p != stop_insn;
193950397Sobrien       p = next_nonnote_insn (p))
194050397Sobrien    for (link = REG_NOTES (p); link; link = next)
194150397Sobrien      {
194250397Sobrien	next = XEXP (link, 1);
194350397Sobrien
194450397Sobrien	if (REG_NOTE_KIND (link) != REG_DEAD
194550397Sobrien	    || GET_CODE (XEXP (link, 0)) != REG)
194650397Sobrien	  continue;
194750397Sobrien
194850397Sobrien	if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
194950397Sobrien	  {
195050397Sobrien	    remove_note (p, link);
195150397Sobrien	    return;
195250397Sobrien	  }
195350397Sobrien      }
195450397Sobrien}
195550397Sobrien
195618334Speter/* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
195718334Speter
195818334Speter   This handles the case of udivmodXi4 instructions which optimize their
195918334Speter   output depending on whether any REG_UNUSED notes are present.
196018334Speter   we must make sure that INSN calculates as many results as REDUNDANT_INSN
196118334Speter   does.  */
196218334Speter
196318334Speterstatic void
196418334Speterupdate_reg_unused_notes (insn, redundant_insn)
196518334Speter     rtx insn, redundant_insn;
196618334Speter{
196750397Sobrien  rtx link, next;
196818334Speter
196918334Speter  for (link = REG_NOTES (insn); link; link = next)
197018334Speter    {
197118334Speter      next = XEXP (link, 1);
197218334Speter
197318334Speter      if (REG_NOTE_KIND (link) != REG_UNUSED
197418334Speter	  || GET_CODE (XEXP (link, 0)) != REG)
197518334Speter	continue;
197618334Speter
197718334Speter      if (! find_regno_note (redundant_insn, REG_UNUSED,
197818334Speter			     REGNO (XEXP (link, 0))))
197918334Speter	remove_note (insn, link);
198018334Speter    }
198118334Speter}
198218334Speter
198318334Speter/* Scan a function looking for insns that need a delay slot and find insns to
198418334Speter   put into the delay slot.
198518334Speter
198618334Speter   NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
198718334Speter   as calls).  We do these first since we don't want jump insns (that are
198818334Speter   easier to fill) to get the only insns that could be used for non-jump insns.
198918334Speter   When it is zero, only try to fill JUMP_INSNs.
199018334Speter
199118334Speter   When slots are filled in this manner, the insns (including the
199218334Speter   delay_insn) are put together in a SEQUENCE rtx.  In this fashion,
199318334Speter   it is possible to tell whether a delay slot has really been filled
199418334Speter   or not.  `final' knows how to deal with this, by communicating
199518334Speter   through FINAL_SEQUENCE.  */
199618334Speter
199718334Speterstatic void
199850397Sobrienfill_simple_delay_slots (non_jumps_p)
199918334Speter     int non_jumps_p;
200018334Speter{
200118334Speter  register rtx insn, pat, trial, next_trial;
200250397Sobrien  register int i;
200318334Speter  int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
200418334Speter  struct resources needed, set;
200550397Sobrien  int slots_to_fill, slots_filled;
200618334Speter  rtx delay_list;
200718334Speter
200818334Speter  for (i = 0; i < num_unfilled_slots; i++)
200918334Speter    {
201018334Speter      int flags;
201118334Speter      /* Get the next insn to fill.  If it has already had any slots assigned,
201218334Speter	 we can't do anything with it.  Maybe we'll improve this later.  */
201318334Speter
201418334Speter      insn = unfilled_slots_base[i];
201518334Speter      if (insn == 0
201618334Speter	  || INSN_DELETED_P (insn)
201718334Speter	  || (GET_CODE (insn) == INSN
201818334Speter	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
201918334Speter	  || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
202018334Speter	  || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
202118334Speter	continue;
202218334Speter
202318334Speter      if (GET_CODE (insn) == JUMP_INSN)
202418334Speter	flags = get_jump_flags (insn, JUMP_LABEL (insn));
202518334Speter      else
202618334Speter	flags = get_jump_flags (insn, NULL_RTX);
202718334Speter      slots_to_fill = num_delay_slots (insn);
202850397Sobrien
202950397Sobrien      /* Some machine description have defined instructions to have
203050397Sobrien	 delay slots only in certain circumstances which may depend on
203150397Sobrien	 nearby insns (which change due to reorg's actions).
203250397Sobrien
203350397Sobrien	 For example, the PA port normally has delay slots for unconditional
203450397Sobrien	 jumps.
203550397Sobrien
203650397Sobrien	 However, the PA port claims such jumps do not have a delay slot
203750397Sobrien	 if they are immediate successors of certain CALL_INSNs.  This
203850397Sobrien	 allows the port to favor filling the delay slot of the call with
203950397Sobrien	 the unconditional jump.  */
204018334Speter      if (slots_to_fill == 0)
204150397Sobrien	continue;
204218334Speter
204318334Speter      /* This insn needs, or can use, some delay slots.  SLOTS_TO_FILL
204418334Speter	 says how many.  After initialization, first try optimizing
204518334Speter
204618334Speter	 call _foo		call _foo
204718334Speter	 nop			add %o7,.-L1,%o7
204818334Speter	 b,a L1
204918334Speter	 nop
205018334Speter
205118334Speter	 If this case applies, the delay slot of the call is filled with
205218334Speter	 the unconditional jump.  This is done first to avoid having the
205318334Speter	 delay slot of the call filled in the backward scan.  Also, since
205418334Speter	 the unconditional jump is likely to also have a delay slot, that
205518334Speter	 insn must exist when it is subsequently scanned.
205618334Speter
205718334Speter	 This is tried on each insn with delay slots as some machines
205818334Speter	 have insns which perform calls, but are not represented as
205918334Speter	 CALL_INSNs.  */
206018334Speter
206118334Speter      slots_filled = 0;
206218334Speter      delay_list = 0;
206318334Speter
206418334Speter      if ((trial = next_active_insn (insn))
206518334Speter	  && GET_CODE (trial) == JUMP_INSN
206618334Speter	  && simplejump_p (trial)
206718334Speter	  && eligible_for_delay (insn, slots_filled, trial, flags)
206818334Speter	  && no_labels_between_p (insn, trial))
206918334Speter	{
207018334Speter	  rtx *tmp;
207118334Speter	  slots_filled++;
207218334Speter	  delay_list = add_to_delay_list (trial, delay_list);
207318334Speter
207418334Speter	  /* TRIAL may have had its delay slot filled, then unfilled.  When
207518334Speter	     the delay slot is unfilled, TRIAL is placed back on the unfilled
207618334Speter	     slots obstack.  Unfortunately, it is placed on the end of the
207718334Speter	     obstack, not in its original location.  Therefore, we must search
207818334Speter	     from entry i + 1 to the end of the unfilled slots obstack to
207918334Speter	     try and find TRIAL.  */
208018334Speter	  tmp = &unfilled_slots_base[i + 1];
208118334Speter	  while (*tmp != trial && tmp != unfilled_slots_next)
208218334Speter	    tmp++;
208318334Speter
208418334Speter	  /* Remove the unconditional jump from consideration for delay slot
208518334Speter	     filling and unthread it.   */
208618334Speter	  if (*tmp == trial)
208718334Speter	    *tmp = 0;
208818334Speter	  {
208918334Speter	    rtx next = NEXT_INSN (trial);
209018334Speter	    rtx prev = PREV_INSN (trial);
209118334Speter	    if (prev)
209218334Speter	      NEXT_INSN (prev) = next;
209318334Speter	    if (next)
209418334Speter	      PREV_INSN (next) = prev;
209518334Speter	  }
209618334Speter	}
209718334Speter
209818334Speter      /* Now, scan backwards from the insn to search for a potential
209918334Speter	 delay-slot candidate.  Stop searching when a label or jump is hit.
210018334Speter
210118334Speter	 For each candidate, if it is to go into the delay slot (moved
210218334Speter	 forward in execution sequence), it must not need or set any resources
210318334Speter	 that were set by later insns and must not set any resources that
210418334Speter	 are needed for those insns.
210518334Speter
210618334Speter	 The delay slot insn itself sets resources unless it is a call
210718334Speter	 (in which case the called routine, not the insn itself, is doing
210818334Speter	 the setting).  */
210918334Speter
211018334Speter      if (slots_filled < slots_to_fill)
211118334Speter	{
211218334Speter	  CLEAR_RESOURCE (&needed);
211318334Speter	  CLEAR_RESOURCE (&set);
211418334Speter	  mark_set_resources (insn, &set, 0, 0);
211518334Speter	  mark_referenced_resources (insn, &needed, 0);
211618334Speter
211718334Speter	  for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
211818334Speter	       trial = next_trial)
211918334Speter	    {
212018334Speter	      next_trial = prev_nonnote_insn (trial);
212118334Speter
212218334Speter	      /* This must be an INSN or CALL_INSN.  */
212318334Speter	      pat = PATTERN (trial);
212418334Speter
212518334Speter	      /* USE and CLOBBER at this level was just for flow; ignore it.  */
212618334Speter	      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
212718334Speter		continue;
212818334Speter
212918334Speter	      /* Check for resource conflict first, to avoid unnecessary
213018334Speter		 splitting.  */
213118334Speter	      if (! insn_references_resource_p (trial, &set, 1)
213218334Speter		  && ! insn_sets_resource_p (trial, &set, 1)
213318334Speter		  && ! insn_sets_resource_p (trial, &needed, 1)
213418334Speter#ifdef HAVE_cc0
213518334Speter		  /* Can't separate set of cc0 from its use.  */
213618334Speter		  && ! (reg_mentioned_p (cc0_rtx, pat)
213752284Sobrien			&& ! sets_cc0_p (pat))
213818334Speter#endif
213918334Speter		  )
214018334Speter		{
214118334Speter		  trial = try_split (pat, trial, 1);
214218334Speter		  next_trial = prev_nonnote_insn (trial);
214318334Speter		  if (eligible_for_delay (insn, slots_filled, trial, flags))
214418334Speter		    {
214518334Speter		      /* In this case, we are searching backward, so if we
214618334Speter			 find insns to put on the delay list, we want
214718334Speter			 to put them at the head, rather than the
214818334Speter			 tail, of the list.  */
214918334Speter
215018334Speter		      update_reg_dead_notes (trial, insn);
215150397Sobrien		      delay_list = gen_rtx_INSN_LIST (VOIDmode,
215250397Sobrien						      trial, delay_list);
215318334Speter		      update_block (trial, trial);
215418334Speter		      delete_insn (trial);
215518334Speter		      if (slots_to_fill == ++slots_filled)
215618334Speter			break;
215718334Speter		      continue;
215818334Speter		    }
215918334Speter		}
216018334Speter
216118334Speter	      mark_set_resources (trial, &set, 0, 1);
216218334Speter	      mark_referenced_resources (trial, &needed, 1);
216318334Speter	    }
216418334Speter	}
216518334Speter
216618334Speter      /* If all needed slots haven't been filled, we come here.  */
216718334Speter
216818334Speter      /* Try to optimize case of jumping around a single insn.  */
216918334Speter#if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
217018334Speter      if (slots_filled != slots_to_fill
217118334Speter	  && delay_list == 0
217218334Speter	  && GET_CODE (insn) == JUMP_INSN
217318334Speter	  && (condjump_p (insn) || condjump_in_parallel_p (insn)))
217418334Speter	{
217518334Speter	  delay_list = optimize_skip (insn);
217618334Speter	  if (delay_list)
217718334Speter	    slots_filled += 1;
217818334Speter	}
217918334Speter#endif
218018334Speter
218118334Speter      /* Try to get insns from beyond the insn needing the delay slot.
218218334Speter	 These insns can neither set or reference resources set in insns being
218318334Speter	 skipped, cannot set resources in the insn being skipped, and, if this
218418334Speter	 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
218518334Speter	 call might not return).
218618334Speter
218718334Speter	 There used to be code which continued past the target label if
218818334Speter	 we saw all uses of the target label.  This code did not work,
218918334Speter	 because it failed to account for some instructions which were
219018334Speter	 both annulled and marked as from the target.  This can happen as a
219118334Speter	 result of optimize_skip.  Since this code was redundant with
219218334Speter	 fill_eager_delay_slots anyways, it was just deleted.  */
219318334Speter
219418334Speter      if (slots_filled != slots_to_fill
219518334Speter          && (GET_CODE (insn) != JUMP_INSN
219618334Speter	      || ((condjump_p (insn) || condjump_in_parallel_p (insn))
219718334Speter		   && ! simplejump_p (insn)
219818334Speter		   && JUMP_LABEL (insn) != 0)))
219918334Speter	{
220018334Speter	  rtx target = 0;
220118334Speter	  int maybe_never = 0;
220218334Speter	  struct resources needed_at_jump;
220318334Speter
220418334Speter	  CLEAR_RESOURCE (&needed);
220518334Speter	  CLEAR_RESOURCE (&set);
220618334Speter
220718334Speter	  if (GET_CODE (insn) == CALL_INSN)
220818334Speter	    {
220918334Speter	      mark_set_resources (insn, &set, 0, 1);
221018334Speter	      mark_referenced_resources (insn, &needed, 1);
221118334Speter	      maybe_never = 1;
221218334Speter	    }
221318334Speter	  else
221418334Speter	    {
221518334Speter	      mark_set_resources (insn, &set, 0, 1);
221618334Speter	      mark_referenced_resources (insn, &needed, 1);
221718334Speter	      if (GET_CODE (insn) == JUMP_INSN)
221818334Speter		target = JUMP_LABEL (insn);
221918334Speter	    }
222018334Speter
222118334Speter	  for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
222218334Speter	    {
222318334Speter	      rtx pat, trial_delay;
222418334Speter
222518334Speter	      next_trial = next_nonnote_insn (trial);
222618334Speter
222718334Speter	      if (GET_CODE (trial) == CODE_LABEL
222818334Speter		  || GET_CODE (trial) == BARRIER)
222918334Speter		break;
223018334Speter
223118334Speter	      /* We must have an INSN, JUMP_INSN, or CALL_INSN.  */
223218334Speter	      pat = PATTERN (trial);
223318334Speter
223418334Speter	      /* Stand-alone USE and CLOBBER are just for flow.  */
223518334Speter	      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
223618334Speter		continue;
223718334Speter
223818334Speter	      /* If this already has filled delay slots, get the insn needing
223918334Speter		 the delay slots.  */
224018334Speter	      if (GET_CODE (pat) == SEQUENCE)
224118334Speter		trial_delay = XVECEXP (pat, 0, 0);
224218334Speter	      else
224318334Speter		trial_delay = trial;
224418334Speter
224518334Speter	      /* If this is a jump insn to our target, indicate that we have
224618334Speter		 seen another jump to it.  If we aren't handling a conditional
224718334Speter		 jump, stop our search. Otherwise, compute the needs at its
224818334Speter		 target and add them to NEEDED.  */
224918334Speter	      if (GET_CODE (trial_delay) == JUMP_INSN)
225018334Speter		{
225118334Speter		  if (target == 0)
225218334Speter		    break;
225318334Speter		  else if (JUMP_LABEL (trial_delay) != target)
225418334Speter		    {
225552284Sobrien		      rtx ninsn =
225652284Sobrien			next_active_insn (JUMP_LABEL (trial_delay));
225752284Sobrien
225852284Sobrien		      mark_target_live_regs (get_insns (), ninsn,
225952284Sobrien					     &needed_at_jump);
226018334Speter		      needed.memory |= needed_at_jump.memory;
226118334Speter		      needed.unch_memory |= needed_at_jump.unch_memory;
226218334Speter		      IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
226318334Speter		    }
226418334Speter		}
226518334Speter
226618334Speter	      /* See if we have a resource problem before we try to
226718334Speter		 split.   */
226818334Speter	      if (target == 0
226918334Speter		  && GET_CODE (pat) != SEQUENCE
227018334Speter		  && ! insn_references_resource_p (trial, &set, 1)
227118334Speter		  && ! insn_sets_resource_p (trial, &set, 1)
227218334Speter		  && ! insn_sets_resource_p (trial, &needed, 1)
227318334Speter#ifdef HAVE_cc0
227418334Speter		  && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
227518334Speter#endif
227618334Speter		  && ! (maybe_never && may_trap_p (pat))
227718334Speter		  && (trial = try_split (pat, trial, 0))
227818334Speter		  && eligible_for_delay (insn, slots_filled, trial, flags))
227918334Speter		{
228018334Speter		  next_trial = next_nonnote_insn (trial);
228118334Speter		  delay_list = add_to_delay_list (trial, delay_list);
228218334Speter
228318334Speter#ifdef HAVE_cc0
228418334Speter		  if (reg_mentioned_p (cc0_rtx, pat))
228518334Speter		    link_cc0_insns (trial);
228618334Speter#endif
228718334Speter
228818334Speter		  delete_insn (trial);
228918334Speter		  if (slots_to_fill == ++slots_filled)
229018334Speter		    break;
229118334Speter		  continue;
229218334Speter		}
229318334Speter
229418334Speter	      mark_set_resources (trial, &set, 0, 1);
229518334Speter	      mark_referenced_resources (trial, &needed, 1);
229618334Speter
229718334Speter	      /* Ensure we don't put insns between the setting of cc and the
229818334Speter		 comparison by moving a setting of cc into an earlier delay
229918334Speter		 slot since these insns could clobber the condition code.  */
230018334Speter	      set.cc = 1;
230118334Speter
230218334Speter	      /* If this is a call or jump, we might not get here.  */
230318334Speter	      if (GET_CODE (trial_delay) == CALL_INSN
230418334Speter		  || GET_CODE (trial_delay) == JUMP_INSN)
230518334Speter		maybe_never = 1;
230618334Speter	    }
230718334Speter
230818334Speter	  /* If there are slots left to fill and our search was stopped by an
230918334Speter	     unconditional branch, try the insn at the branch target.  We can
231018334Speter	     redirect the branch if it works.
231118334Speter
231218334Speter	     Don't do this if the insn at the branch target is a branch.  */
231318334Speter	  if (slots_to_fill != slots_filled
231418334Speter	      && trial
231518334Speter	      && GET_CODE (trial) == JUMP_INSN
231618334Speter	      && simplejump_p (trial)
231718334Speter	      && (target == 0 || JUMP_LABEL (trial) == target)
231818334Speter	      && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
231918334Speter	      && ! (GET_CODE (next_trial) == INSN
232018334Speter		    && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
232118334Speter	      && GET_CODE (next_trial) != JUMP_INSN
232218334Speter	      && ! insn_references_resource_p (next_trial, &set, 1)
232318334Speter	      && ! insn_sets_resource_p (next_trial, &set, 1)
232418334Speter	      && ! insn_sets_resource_p (next_trial, &needed, 1)
232518334Speter#ifdef HAVE_cc0
232618334Speter	      && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
232718334Speter#endif
232818334Speter	      && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
232918334Speter	      && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
233018334Speter	      && eligible_for_delay (insn, slots_filled, next_trial, flags))
233118334Speter	    {
233218334Speter	      rtx new_label = next_active_insn (next_trial);
233318334Speter
233418334Speter	      if (new_label != 0)
233518334Speter		new_label = get_label_before (new_label);
233618334Speter	      else
233718334Speter		new_label = find_end_label ();
233818334Speter
233918334Speter	      delay_list
234018334Speter		= add_to_delay_list (copy_rtx (next_trial), delay_list);
234118334Speter	      slots_filled++;
234218334Speter	      reorg_redirect_jump (trial, new_label);
234318334Speter
234418334Speter	      /* If we merged because we both jumped to the same place,
234518334Speter		 redirect the original insn also.  */
234618334Speter	      if (target)
234718334Speter		reorg_redirect_jump (insn, new_label);
234818334Speter	    }
234918334Speter	}
235018334Speter
235150397Sobrien      /* If this is an unconditional jump, then try to get insns from the
235250397Sobrien	 target of the jump.  */
235350397Sobrien      if (GET_CODE (insn) == JUMP_INSN
235450397Sobrien	  && simplejump_p (insn)
235550397Sobrien	  && slots_filled != slots_to_fill)
235650397Sobrien	delay_list
235750397Sobrien	  = fill_slots_from_thread (insn, const_true_rtx,
235850397Sobrien				    next_active_insn (JUMP_LABEL (insn)),
235950397Sobrien				    NULL, 1, 1,
236050397Sobrien				    own_thread_p (JUMP_LABEL (insn),
236150397Sobrien						  JUMP_LABEL (insn), 0),
236250397Sobrien				    slots_to_fill, &slots_filled,
236350397Sobrien				    delay_list);
236450397Sobrien
236518334Speter      if (delay_list)
236618334Speter	unfilled_slots_base[i]
236750397Sobrien	  = emit_delay_sequence (insn, delay_list, slots_filled);
236818334Speter
236918334Speter      if (slots_to_fill == slots_filled)
237018334Speter	unfilled_slots_base[i] = 0;
237118334Speter
237218334Speter      note_delay_statistics (slots_filled, 0);
237318334Speter    }
237418334Speter
237518334Speter#ifdef DELAY_SLOTS_FOR_EPILOGUE
237618334Speter  /* See if the epilogue needs any delay slots.  Try to fill them if so.
237718334Speter     The only thing we can do is scan backwards from the end of the
237818334Speter     function.  If we did this in a previous pass, it is incorrect to do it
237918334Speter     again.  */
238018334Speter  if (current_function_epilogue_delay_list)
238118334Speter    return;
238218334Speter
238318334Speter  slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
238418334Speter  if (slots_to_fill == 0)
238518334Speter    return;
238618334Speter
238718334Speter  slots_filled = 0;
238818334Speter  CLEAR_RESOURCE (&set);
238918334Speter
239018334Speter  /* The frame pointer and stack pointer are needed at the beginning of
239118334Speter     the epilogue, so instructions setting them can not be put in the
239218334Speter     epilogue delay slot.  However, everything else needed at function
239318334Speter     end is safe, so we don't want to use end_of_function_needs here.  */
239418334Speter  CLEAR_RESOURCE (&needed);
239518334Speter  if (frame_pointer_needed)
239618334Speter    {
239718334Speter      SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
239818334Speter#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
239918334Speter      SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
240018334Speter#endif
240118334Speter#ifdef EXIT_IGNORE_STACK
240252284Sobrien      if (! EXIT_IGNORE_STACK
240352284Sobrien	  || current_function_sp_is_unchanging)
240418334Speter#endif
240518334Speter	SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
240618334Speter    }
240718334Speter  else
240818334Speter    SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
240918334Speter
241050397Sobrien#ifdef EPILOGUE_USES
241150397Sobrien  for (i = 0; i <FIRST_PSEUDO_REGISTER; i++)
241250397Sobrien    {
241350397Sobrien      if (EPILOGUE_USES (i))
241450397Sobrien	SET_HARD_REG_BIT (needed.regs, i);
241550397Sobrien    }
241650397Sobrien#endif
241750397Sobrien
241818334Speter  for (trial = get_last_insn (); ! stop_search_p (trial, 1);
241918334Speter       trial = PREV_INSN (trial))
242018334Speter    {
242118334Speter      if (GET_CODE (trial) == NOTE)
242218334Speter	continue;
242318334Speter      pat = PATTERN (trial);
242418334Speter      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
242518334Speter	continue;
242618334Speter
242718334Speter      if (! insn_references_resource_p (trial, &set, 1)
242818334Speter	  && ! insn_sets_resource_p (trial, &needed, 1)
242918334Speter	  && ! insn_sets_resource_p (trial, &set, 1)
243018334Speter#ifdef HAVE_cc0
243118334Speter	  /* Don't want to mess with cc0 here.  */
243218334Speter	  && ! reg_mentioned_p (cc0_rtx, pat)
243318334Speter#endif
243418334Speter	  )
243518334Speter	{
243618334Speter	  trial = try_split (pat, trial, 1);
243718334Speter	  if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
243818334Speter	    {
243918334Speter	      /* Here as well we are searching backward, so put the
244018334Speter		 insns we find on the head of the list.  */
244118334Speter
244218334Speter	      current_function_epilogue_delay_list
244350397Sobrien		= gen_rtx_INSN_LIST (VOIDmode, trial,
244450397Sobrien				     current_function_epilogue_delay_list);
244552284Sobrien	      mark_end_of_function_resources (trial, 1);
244618334Speter	      update_block (trial, trial);
244718334Speter	      delete_insn (trial);
244818334Speter
244918334Speter	      /* Clear deleted bit so final.c will output the insn.  */
245018334Speter	      INSN_DELETED_P (trial) = 0;
245118334Speter
245218334Speter	      if (slots_to_fill == ++slots_filled)
245318334Speter		break;
245418334Speter	      continue;
245518334Speter	    }
245618334Speter	}
245718334Speter
245818334Speter      mark_set_resources (trial, &set, 0, 1);
245918334Speter      mark_referenced_resources (trial, &needed, 1);
246018334Speter    }
246118334Speter
246218334Speter  note_delay_statistics (slots_filled, 0);
246318334Speter#endif
246418334Speter}
246518334Speter
246618334Speter/* Try to find insns to place in delay slots.
246718334Speter
246818334Speter   INSN is the jump needing SLOTS_TO_FILL delay slots.  It tests CONDITION
246918334Speter   or is an unconditional branch if CONDITION is const_true_rtx.
247018334Speter   *PSLOTS_FILLED is updated with the number of slots that we have filled.
247118334Speter
247218334Speter   THREAD is a flow-of-control, either the insns to be executed if the
247318334Speter   branch is true or if the branch is false, THREAD_IF_TRUE says which.
247418334Speter
247518334Speter   OPPOSITE_THREAD is the thread in the opposite direction.  It is used
247618334Speter   to see if any potential delay slot insns set things needed there.
247718334Speter
247818334Speter   LIKELY is non-zero if it is extremely likely that the branch will be
247918334Speter   taken and THREAD_IF_TRUE is set.  This is used for the branch at the
248018334Speter   end of a loop back up to the top.
248118334Speter
248218334Speter   OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
248318334Speter   thread.  I.e., it is the fallthrough code of our jump or the target of the
248418334Speter   jump when we are the only jump going there.
248518334Speter
248618334Speter   If OWN_THREAD is false, it must be the "true" thread of a jump.  In that
248718334Speter   case, we can only take insns from the head of the thread for our delay
248818334Speter   slot.  We then adjust the jump to point after the insns we have taken.  */
248918334Speter
249018334Speterstatic rtx
249118334Speterfill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
249250397Sobrien			thread_if_true, own_thread,
249350397Sobrien			slots_to_fill, pslots_filled, delay_list)
249418334Speter     rtx insn;
249518334Speter     rtx condition;
249618334Speter     rtx thread, opposite_thread;
249718334Speter     int likely;
249818334Speter     int thread_if_true;
249950397Sobrien     int own_thread;
250018334Speter     int slots_to_fill, *pslots_filled;
250150397Sobrien     rtx delay_list;
250218334Speter{
250318334Speter  rtx new_thread;
250418334Speter  struct resources opposite_needed, set, needed;
250518334Speter  rtx trial;
250618334Speter  int lose = 0;
250718334Speter  int must_annul = 0;
250818334Speter  int flags;
250918334Speter
251018334Speter  /* Validate our arguments.  */
251118334Speter  if ((condition == const_true_rtx && ! thread_if_true)
251218334Speter      || (! own_thread && ! thread_if_true))
251318334Speter    abort ();
251418334Speter
251518334Speter  flags = get_jump_flags (insn, JUMP_LABEL (insn));
251618334Speter
251718334Speter  /* If our thread is the end of subroutine, we can't get any delay
251818334Speter     insns from that.  */
251918334Speter  if (thread == 0)
252050397Sobrien      return delay_list;
252118334Speter
252218334Speter  /* If this is an unconditional branch, nothing is needed at the
252318334Speter     opposite thread.  Otherwise, compute what is needed there.  */
252418334Speter  if (condition == const_true_rtx)
252518334Speter    CLEAR_RESOURCE (&opposite_needed);
252618334Speter  else
252752284Sobrien    mark_target_live_regs (get_insns (), opposite_thread, &opposite_needed);
252818334Speter
252918334Speter  /* If the insn at THREAD can be split, do it here to avoid having to
253018334Speter     update THREAD and NEW_THREAD if it is done in the loop below.  Also
253118334Speter     initialize NEW_THREAD.  */
253218334Speter
253318334Speter  new_thread = thread = try_split (PATTERN (thread), thread, 0);
253418334Speter
253518334Speter  /* Scan insns at THREAD.  We are looking for an insn that can be removed
253618334Speter     from THREAD (it neither sets nor references resources that were set
253718334Speter     ahead of it and it doesn't set anything needs by the insns ahead of
253818334Speter     it) and that either can be placed in an annulling insn or aren't
253918334Speter     needed at OPPOSITE_THREAD.  */
254018334Speter
254118334Speter  CLEAR_RESOURCE (&needed);
254218334Speter  CLEAR_RESOURCE (&set);
254318334Speter
254418334Speter  /* If we do not own this thread, we must stop as soon as we find
254518334Speter     something that we can't put in a delay slot, since all we can do
254618334Speter     is branch into THREAD at a later point.  Therefore, labels stop
254718334Speter     the search if this is not the `true' thread.  */
254818334Speter
254918334Speter  for (trial = thread;
255018334Speter       ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
255118334Speter       trial = next_nonnote_insn (trial))
255218334Speter    {
255318334Speter      rtx pat, old_trial;
255418334Speter
255518334Speter      /* If we have passed a label, we no longer own this thread.  */
255618334Speter      if (GET_CODE (trial) == CODE_LABEL)
255718334Speter	{
255818334Speter	  own_thread = 0;
255918334Speter	  continue;
256018334Speter	}
256118334Speter
256218334Speter      pat = PATTERN (trial);
256318334Speter      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
256418334Speter	continue;
256518334Speter
256618334Speter      /* If TRIAL conflicts with the insns ahead of it, we lose.  Also,
256718334Speter	 don't separate or copy insns that set and use CC0.  */
256818334Speter      if (! insn_references_resource_p (trial, &set, 1)
256918334Speter	  && ! insn_sets_resource_p (trial, &set, 1)
257018334Speter	  && ! insn_sets_resource_p (trial, &needed, 1)
257118334Speter#ifdef HAVE_cc0
257218334Speter	  && ! (reg_mentioned_p (cc0_rtx, pat)
257318334Speter		&& (! own_thread || ! sets_cc0_p (pat)))
257418334Speter#endif
257518334Speter	  )
257618334Speter	{
257718334Speter	  rtx prior_insn;
257818334Speter
257918334Speter	  /* If TRIAL is redundant with some insn before INSN, we don't
258018334Speter	     actually need to add it to the delay list; we can merely pretend
258118334Speter	     we did.  */
258250397Sobrien	  if ((prior_insn = redundant_insn (trial, insn, delay_list)))
258318334Speter	    {
258450397Sobrien	      fix_reg_dead_note (prior_insn, insn);
258518334Speter	      if (own_thread)
258618334Speter		{
258718334Speter		  update_block (trial, thread);
258818334Speter		  if (trial == thread)
258918334Speter		    {
259018334Speter		      thread = next_active_insn (thread);
259118334Speter		      if (new_thread == trial)
259218334Speter			new_thread = thread;
259318334Speter		    }
259418334Speter
259518334Speter		  delete_insn (trial);
259618334Speter		}
259718334Speter	      else
259818334Speter		{
259918334Speter		  update_reg_unused_notes (prior_insn, trial);
260018334Speter		  new_thread = next_active_insn (trial);
260118334Speter		}
260218334Speter
260318334Speter	      continue;
260418334Speter	    }
260518334Speter
260618334Speter	  /* There are two ways we can win:  If TRIAL doesn't set anything
260718334Speter	     needed at the opposite thread and can't trap, or if it can
260818334Speter	     go into an annulled delay slot.  */
260952284Sobrien	  if (!must_annul
261052284Sobrien	      && (condition == const_true_rtx
261152284Sobrien	          || (! insn_sets_resource_p (trial, &opposite_needed, 1)
261252284Sobrien		      && ! may_trap_p (pat))))
261318334Speter	    {
261418334Speter	      old_trial = trial;
261518334Speter	      trial = try_split (pat, trial, 0);
261618334Speter	      if (new_thread == old_trial)
261718334Speter		new_thread = trial;
261818334Speter	      if (thread == old_trial)
261918334Speter		thread = trial;
262018334Speter	      pat = PATTERN (trial);
262118334Speter	      if (eligible_for_delay (insn, *pslots_filled, trial, flags))
262218334Speter		goto winner;
262318334Speter	    }
262418334Speter	  else if (0
262518334Speter#ifdef ANNUL_IFTRUE_SLOTS
262618334Speter		   || ! thread_if_true
262718334Speter#endif
262818334Speter#ifdef ANNUL_IFFALSE_SLOTS
262918334Speter		   || thread_if_true
263018334Speter#endif
263118334Speter		   )
263218334Speter	    {
263318334Speter	      old_trial = trial;
263418334Speter	      trial = try_split (pat, trial, 0);
263518334Speter	      if (new_thread == old_trial)
263618334Speter		new_thread = trial;
263718334Speter	      if (thread == old_trial)
263818334Speter		thread = trial;
263918334Speter	      pat = PATTERN (trial);
264052284Sobrien	      if ((must_annul || delay_list == NULL) && (thread_if_true
264152284Sobrien		   ? check_annul_list_true_false (0, delay_list)
264252284Sobrien		     && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
264352284Sobrien		   : check_annul_list_true_false (1, delay_list)
264452284Sobrien		     && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
264518334Speter		{
264618334Speter		  rtx temp;
264718334Speter
264818334Speter		  must_annul = 1;
264918334Speter		winner:
265018334Speter
265118334Speter#ifdef HAVE_cc0
265218334Speter		  if (reg_mentioned_p (cc0_rtx, pat))
265318334Speter		    link_cc0_insns (trial);
265418334Speter#endif
265518334Speter
265618334Speter		  /* If we own this thread, delete the insn.  If this is the
265718334Speter		     destination of a branch, show that a basic block status
265818334Speter		     may have been updated.  In any case, mark the new
265918334Speter		     starting point of this thread.  */
266018334Speter		  if (own_thread)
266118334Speter		    {
266218334Speter		      update_block (trial, thread);
266350397Sobrien		      if (trial == thread)
266450397Sobrien			{
266550397Sobrien			  thread = next_active_insn (thread);
266650397Sobrien			  if (new_thread == trial)
266750397Sobrien			    new_thread = thread;
266850397Sobrien			}
266918334Speter		      delete_insn (trial);
267018334Speter		    }
267118334Speter		  else
267218334Speter		    new_thread = next_active_insn (trial);
267318334Speter
267418334Speter		  temp = own_thread ? trial : copy_rtx (trial);
267518334Speter		  if (thread_if_true)
267618334Speter		    INSN_FROM_TARGET_P (temp) = 1;
267718334Speter
267818334Speter		  delay_list = add_to_delay_list (temp, delay_list);
267918334Speter
268018334Speter		  if (slots_to_fill == ++(*pslots_filled))
268118334Speter		    {
268218334Speter		      /* Even though we have filled all the slots, we
268318334Speter			 may be branching to a location that has a
268418334Speter			 redundant insn.  Skip any if so.  */
268518334Speter		      while (new_thread && ! own_thread
268618334Speter			     && ! insn_sets_resource_p (new_thread, &set, 1)
268718334Speter			     && ! insn_sets_resource_p (new_thread, &needed, 1)
268818334Speter			     && ! insn_references_resource_p (new_thread,
268918334Speter							      &set, 1)
269050397Sobrien			     && (prior_insn
269150397Sobrien				 = redundant_insn (new_thread, insn,
269250397Sobrien						   delay_list)))
269350397Sobrien			{
269450397Sobrien			  /* We know we do not own the thread, so no need
269550397Sobrien			     to call update_block and delete_insn.  */
269650397Sobrien			  fix_reg_dead_note (prior_insn, insn);
269750397Sobrien			  update_reg_unused_notes (prior_insn, new_thread);
269850397Sobrien			  new_thread = next_active_insn (new_thread);
269950397Sobrien			}
270018334Speter		      break;
270118334Speter		    }
270218334Speter
270318334Speter		  continue;
270418334Speter		}
270518334Speter	    }
270618334Speter	}
270718334Speter
270818334Speter      /* This insn can't go into a delay slot.  */
270918334Speter      lose = 1;
271018334Speter      mark_set_resources (trial, &set, 0, 1);
271118334Speter      mark_referenced_resources (trial, &needed, 1);
271218334Speter
271318334Speter      /* Ensure we don't put insns between the setting of cc and the comparison
271418334Speter	 by moving a setting of cc into an earlier delay slot since these insns
271518334Speter	 could clobber the condition code.  */
271618334Speter      set.cc = 1;
271718334Speter
271818334Speter      /* If this insn is a register-register copy and the next insn has
271918334Speter	 a use of our destination, change it to use our source.  That way,
272018334Speter	 it will become a candidate for our delay slot the next time
272118334Speter	 through this loop.  This case occurs commonly in loops that
272218334Speter	 scan a list.
272318334Speter
272418334Speter	 We could check for more complex cases than those tested below,
272518334Speter	 but it doesn't seem worth it.  It might also be a good idea to try
272618334Speter	 to swap the two insns.  That might do better.
272718334Speter
272818334Speter	 We can't do this if the next insn modifies our destination, because
272918334Speter	 that would make the replacement into the insn invalid.  We also can't
273018334Speter	 do this if it modifies our source, because it might be an earlyclobber
273118334Speter	 operand.  This latter test also prevents updating the contents of
273218334Speter	 a PRE_INC.  */
273318334Speter
273418334Speter      if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
273518334Speter	  && GET_CODE (SET_SRC (pat)) == REG
273618334Speter	  && GET_CODE (SET_DEST (pat)) == REG)
273718334Speter	{
273818334Speter	  rtx next = next_nonnote_insn (trial);
273918334Speter
274018334Speter	  if (next && GET_CODE (next) == INSN
274118334Speter	      && GET_CODE (PATTERN (next)) != USE
274218334Speter	      && ! reg_set_p (SET_DEST (pat), next)
274318334Speter	      && ! reg_set_p (SET_SRC (pat), next)
274418334Speter	      && reg_referenced_p (SET_DEST (pat), PATTERN (next)))
274518334Speter	    validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
274618334Speter	}
274718334Speter    }
274818334Speter
274918334Speter  /* If we stopped on a branch insn that has delay slots, see if we can
275018334Speter     steal some of the insns in those slots.  */
275118334Speter  if (trial && GET_CODE (trial) == INSN
275218334Speter      && GET_CODE (PATTERN (trial)) == SEQUENCE
275318334Speter      && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
275418334Speter    {
275518334Speter      /* If this is the `true' thread, we will want to follow the jump,
275618334Speter	 so we can only do this if we have taken everything up to here.  */
275752284Sobrien      if (thread_if_true && trial == new_thread)
275818334Speter	delay_list
275918334Speter	  = steal_delay_list_from_target (insn, condition, PATTERN (trial),
276018334Speter					  delay_list, &set, &needed,
276118334Speter					  &opposite_needed, slots_to_fill,
276218334Speter					  pslots_filled, &must_annul,
276318334Speter					  &new_thread);
276418334Speter      else if (! thread_if_true)
276518334Speter	delay_list
276618334Speter	  = steal_delay_list_from_fallthrough (insn, condition,
276718334Speter					       PATTERN (trial),
276818334Speter					       delay_list, &set, &needed,
276918334Speter					       &opposite_needed, slots_to_fill,
277018334Speter					       pslots_filled, &must_annul);
277118334Speter    }
277218334Speter
277318334Speter  /* If we haven't found anything for this delay slot and it is very
277418334Speter     likely that the branch will be taken, see if the insn at our target
277518334Speter     increments or decrements a register with an increment that does not
277618334Speter     depend on the destination register.  If so, try to place the opposite
277718334Speter     arithmetic insn after the jump insn and put the arithmetic insn in the
277818334Speter     delay slot.  If we can't do this, return.  */
277950397Sobrien  if (delay_list == 0 && likely && new_thread
278050397Sobrien      && GET_CODE (new_thread) == INSN
278150397Sobrien      && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
278250397Sobrien      && asm_noperands (PATTERN (new_thread)) < 0)
278318334Speter    {
278418334Speter      rtx pat = PATTERN (new_thread);
278518334Speter      rtx dest;
278618334Speter      rtx src;
278718334Speter
278818334Speter      trial = new_thread;
278918334Speter      pat = PATTERN (trial);
279018334Speter
279118334Speter      if (GET_CODE (trial) != INSN || GET_CODE (pat) != SET
279218334Speter	  || ! eligible_for_delay (insn, 0, trial, flags))
279318334Speter	return 0;
279418334Speter
279518334Speter      dest = SET_DEST (pat), src = SET_SRC (pat);
279618334Speter      if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
279718334Speter	  && rtx_equal_p (XEXP (src, 0), dest)
279818334Speter	  && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)))
279918334Speter	{
280018334Speter	  rtx other = XEXP (src, 1);
280118334Speter	  rtx new_arith;
280218334Speter	  rtx ninsn;
280318334Speter
280418334Speter	  /* If this is a constant adjustment, use the same code with
280518334Speter	     the negated constant.  Otherwise, reverse the sense of the
280618334Speter	     arithmetic.  */
280718334Speter	  if (GET_CODE (other) == CONST_INT)
280850397Sobrien	    new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
280950397Sobrien					negate_rtx (GET_MODE (src), other));
281018334Speter	  else
281150397Sobrien	    new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
281250397Sobrien					GET_MODE (src), dest, other);
281318334Speter
281450397Sobrien	  ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
281518334Speter				   insn);
281618334Speter
281718334Speter	  if (recog_memoized (ninsn) < 0
281852284Sobrien	      || (extract_insn (ninsn), ! constrain_operands (1)))
281918334Speter	    {
282018334Speter	      delete_insn (ninsn);
282118334Speter	      return 0;
282218334Speter	    }
282318334Speter
282418334Speter	  if (own_thread)
282518334Speter	    {
282618334Speter	      update_block (trial, thread);
282750397Sobrien	      if (trial == thread)
282850397Sobrien		{
282950397Sobrien		  thread = next_active_insn (thread);
283050397Sobrien		  if (new_thread == trial)
283150397Sobrien		    new_thread = thread;
283250397Sobrien		}
283318334Speter	      delete_insn (trial);
283418334Speter	    }
283518334Speter	  else
283618334Speter	    new_thread = next_active_insn (trial);
283718334Speter
283818334Speter	  ninsn = own_thread ? trial : copy_rtx (trial);
283918334Speter	  if (thread_if_true)
284018334Speter	    INSN_FROM_TARGET_P (ninsn) = 1;
284118334Speter
284218334Speter	  delay_list = add_to_delay_list (ninsn, NULL_RTX);
284318334Speter	  (*pslots_filled)++;
284418334Speter	}
284518334Speter    }
284618334Speter
284718334Speter  if (delay_list && must_annul)
284818334Speter    INSN_ANNULLED_BRANCH_P (insn) = 1;
284918334Speter
285018334Speter  /* If we are to branch into the middle of this thread, find an appropriate
285118334Speter     label or make a new one if none, and redirect INSN to it.  If we hit the
285218334Speter     end of the function, use the end-of-function label.  */
285318334Speter  if (new_thread != thread)
285418334Speter    {
285518334Speter      rtx label;
285618334Speter
285718334Speter      if (! thread_if_true)
285818334Speter	abort ();
285918334Speter
286018334Speter      if (new_thread && GET_CODE (new_thread) == JUMP_INSN
286118334Speter	  && (simplejump_p (new_thread)
286218334Speter	      || GET_CODE (PATTERN (new_thread)) == RETURN)
286318334Speter	  && redirect_with_delay_list_safe_p (insn,
286418334Speter					      JUMP_LABEL (new_thread),
286518334Speter					      delay_list))
286618334Speter	new_thread = follow_jumps (JUMP_LABEL (new_thread));
286718334Speter
286818334Speter      if (new_thread == 0)
286918334Speter	label = find_end_label ();
287018334Speter      else if (GET_CODE (new_thread) == CODE_LABEL)
287118334Speter	label = new_thread;
287218334Speter      else
287318334Speter	label = get_label_before (new_thread);
287418334Speter
287518334Speter      reorg_redirect_jump (insn, label);
287618334Speter    }
287718334Speter
287818334Speter  return delay_list;
287918334Speter}
288018334Speter
288118334Speter/* Make another attempt to find insns to place in delay slots.
288218334Speter
288318334Speter   We previously looked for insns located in front of the delay insn
288418334Speter   and, for non-jump delay insns, located behind the delay insn.
288518334Speter
288618334Speter   Here only try to schedule jump insns and try to move insns from either
288718334Speter   the target or the following insns into the delay slot.  If annulling is
288818334Speter   supported, we will be likely to do this.  Otherwise, we can do this only
288918334Speter   if safe.  */
289018334Speter
289118334Speterstatic void
289250397Sobrienfill_eager_delay_slots ()
289318334Speter{
289418334Speter  register rtx insn;
289518334Speter  register int i;
289618334Speter  int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
289718334Speter
289818334Speter  for (i = 0; i < num_unfilled_slots; i++)
289918334Speter    {
290018334Speter      rtx condition;
290118334Speter      rtx target_label, insn_at_target, fallthrough_insn;
290218334Speter      rtx delay_list = 0;
290318334Speter      int own_target;
290418334Speter      int own_fallthrough;
290518334Speter      int prediction, slots_to_fill, slots_filled;
290618334Speter
290718334Speter      insn = unfilled_slots_base[i];
290818334Speter      if (insn == 0
290918334Speter	  || INSN_DELETED_P (insn)
291018334Speter	  || GET_CODE (insn) != JUMP_INSN
291118334Speter	  || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
291218334Speter	continue;
291318334Speter
291418334Speter      slots_to_fill = num_delay_slots (insn);
291550397Sobrien      /* Some machine description have defined instructions to have
291650397Sobrien	 delay slots only in certain circumstances which may depend on
291750397Sobrien	 nearby insns (which change due to reorg's actions).
291850397Sobrien
291950397Sobrien 	 For example, the PA port normally has delay slots for unconditional
292050397Sobrien	 jumps.
292150397Sobrien
292250397Sobrien	 However, the PA port claims such jumps do not have a delay slot
292350397Sobrien	 if they are immediate successors of certain CALL_INSNs.  This
292450397Sobrien	 allows the port to favor filling the delay slot of the call with
292550397Sobrien	 the unconditional jump.  */
292618334Speter      if (slots_to_fill == 0)
292750397Sobrien        continue;
292818334Speter
292918334Speter      slots_filled = 0;
293018334Speter      target_label = JUMP_LABEL (insn);
293118334Speter      condition = get_branch_condition (insn, target_label);
293218334Speter
293318334Speter      if (condition == 0)
293418334Speter	continue;
293518334Speter
293618334Speter      /* Get the next active fallthrough and target insns and see if we own
293718334Speter	 them.  Then see whether the branch is likely true.  We don't need
293818334Speter	 to do a lot of this for unconditional branches.  */
293918334Speter
294018334Speter      insn_at_target = next_active_insn (target_label);
294118334Speter      own_target = own_thread_p (target_label, target_label, 0);
294218334Speter
294318334Speter      if (condition == const_true_rtx)
294418334Speter	{
294518334Speter	  own_fallthrough = 0;
294618334Speter	  fallthrough_insn = 0;
294718334Speter	  prediction = 2;
294818334Speter	}
294918334Speter      else
295018334Speter	{
295118334Speter	  fallthrough_insn = next_active_insn (insn);
295218334Speter	  own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
295318334Speter	  prediction = mostly_true_jump (insn, condition);
295418334Speter	}
295518334Speter
295618334Speter      /* If this insn is expected to branch, first try to get insns from our
295718334Speter	 target, then our fallthrough insns.  If it is not, expected to branch,
295818334Speter	 try the other order.  */
295918334Speter
296018334Speter      if (prediction > 0)
296118334Speter	{
296218334Speter	  delay_list
296318334Speter	    = fill_slots_from_thread (insn, condition, insn_at_target,
296418334Speter				      fallthrough_insn, prediction == 2, 1,
296550397Sobrien				      own_target,
296650397Sobrien				      slots_to_fill, &slots_filled, delay_list);
296718334Speter
296818334Speter	  if (delay_list == 0 && own_fallthrough)
296918334Speter	    {
297018334Speter	      /* Even though we didn't find anything for delay slots,
297118334Speter		 we might have found a redundant insn which we deleted
297218334Speter		 from the thread that was filled.  So we have to recompute
297318334Speter		 the next insn at the target.  */
297418334Speter	      target_label = JUMP_LABEL (insn);
297518334Speter	      insn_at_target = next_active_insn (target_label);
297618334Speter
297718334Speter	      delay_list
297818334Speter		= fill_slots_from_thread (insn, condition, fallthrough_insn,
297918334Speter					  insn_at_target, 0, 0,
298050397Sobrien					  own_fallthrough,
298150397Sobrien					  slots_to_fill, &slots_filled,
298250397Sobrien					  delay_list);
298318334Speter	    }
298418334Speter	}
298518334Speter      else
298618334Speter	{
298718334Speter	  if (own_fallthrough)
298818334Speter	    delay_list
298918334Speter	      = fill_slots_from_thread (insn, condition, fallthrough_insn,
299018334Speter					insn_at_target, 0, 0,
299150397Sobrien					own_fallthrough,
299250397Sobrien					slots_to_fill, &slots_filled,
299350397Sobrien					delay_list);
299418334Speter
299518334Speter	  if (delay_list == 0)
299618334Speter	    delay_list
299718334Speter	      = fill_slots_from_thread (insn, condition, insn_at_target,
299818334Speter					next_active_insn (insn), 0, 1,
299950397Sobrien					own_target,
300050397Sobrien					slots_to_fill, &slots_filled,
300150397Sobrien					delay_list);
300218334Speter	}
300318334Speter
300418334Speter      if (delay_list)
300518334Speter	unfilled_slots_base[i]
300650397Sobrien	  = emit_delay_sequence (insn, delay_list, slots_filled);
300718334Speter
300818334Speter      if (slots_to_fill == slots_filled)
300918334Speter	unfilled_slots_base[i] = 0;
301018334Speter
301118334Speter      note_delay_statistics (slots_filled, 1);
301218334Speter    }
301318334Speter}
301418334Speter
301518334Speter/* Once we have tried two ways to fill a delay slot, make a pass over the
301618334Speter   code to try to improve the results and to do such things as more jump
301718334Speter   threading.  */
301818334Speter
301918334Speterstatic void
302018334Speterrelax_delay_slots (first)
302118334Speter     rtx first;
302218334Speter{
302318334Speter  register rtx insn, next, pat;
302418334Speter  register rtx trial, delay_insn, target_label;
302518334Speter
302618334Speter  /* Look at every JUMP_INSN and see if we can improve it.  */
302718334Speter  for (insn = first; insn; insn = next)
302818334Speter    {
302918334Speter      rtx other;
303018334Speter
303118334Speter      next = next_active_insn (insn);
303218334Speter
303318334Speter      /* If this is a jump insn, see if it now jumps to a jump, jumps to
303418334Speter	 the next insn, or jumps to a label that is not the last of a
303518334Speter	 group of consecutive labels.  */
303618334Speter      if (GET_CODE (insn) == JUMP_INSN
303718334Speter	  && (condjump_p (insn) || condjump_in_parallel_p (insn))
303818334Speter	  && (target_label = JUMP_LABEL (insn)) != 0)
303918334Speter	{
304018334Speter	  target_label = follow_jumps (target_label);
304118334Speter	  target_label = prev_label (next_active_insn (target_label));
304218334Speter
304318334Speter	  if (target_label == 0)
304418334Speter	    target_label = find_end_label ();
304518334Speter
304618334Speter	  if (next_active_insn (target_label) == next
304718334Speter	      && ! condjump_in_parallel_p (insn))
304818334Speter	    {
304918334Speter	      delete_jump (insn);
305018334Speter	      continue;
305118334Speter	    }
305218334Speter
305318334Speter	  if (target_label != JUMP_LABEL (insn))
305418334Speter	    reorg_redirect_jump (insn, target_label);
305518334Speter
305618334Speter	  /* See if this jump branches around a unconditional jump.
305718334Speter	     If so, invert this jump and point it to the target of the
305818334Speter	     second jump.  */
305918334Speter	  if (next && GET_CODE (next) == JUMP_INSN
306018334Speter	      && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
306118334Speter	      && next_active_insn (target_label) == next_active_insn (next)
306218334Speter	      && no_labels_between_p (insn, next))
306318334Speter	    {
306418334Speter	      rtx label = JUMP_LABEL (next);
306518334Speter
306618334Speter	      /* Be careful how we do this to avoid deleting code or
306718334Speter		 labels that are momentarily dead.  See similar optimization
306818334Speter		 in jump.c.
306918334Speter
307018334Speter		 We also need to ensure we properly handle the case when
307118334Speter		 invert_jump fails.  */
307218334Speter
307318334Speter	      ++LABEL_NUSES (target_label);
307418334Speter	      if (label)
307518334Speter		++LABEL_NUSES (label);
307618334Speter
307718334Speter	      if (invert_jump (insn, label))
307818334Speter		{
307918334Speter		  delete_insn (next);
308018334Speter		  next = insn;
308118334Speter		}
308218334Speter
308318334Speter	      if (label)
308418334Speter		--LABEL_NUSES (label);
308518334Speter
308618334Speter	      if (--LABEL_NUSES (target_label) == 0)
308718334Speter		delete_insn (target_label);
308818334Speter
308918334Speter	      continue;
309018334Speter	    }
309118334Speter	}
309218334Speter
309318334Speter      /* If this is an unconditional jump and the previous insn is a
309418334Speter	 conditional jump, try reversing the condition of the previous
309518334Speter	 insn and swapping our targets.  The next pass might be able to
309618334Speter	 fill the slots.
309718334Speter
309818334Speter	 Don't do this if we expect the conditional branch to be true, because
309918334Speter	 we would then be making the more common case longer.  */
310018334Speter
310118334Speter      if (GET_CODE (insn) == JUMP_INSN
310218334Speter	  && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
310318334Speter	  && (other = prev_active_insn (insn)) != 0
310418334Speter	  && (condjump_p (other) || condjump_in_parallel_p (other))
310518334Speter	  && no_labels_between_p (other, insn)
310652284Sobrien	  && 0 > mostly_true_jump (other,
310718334Speter				   get_branch_condition (other,
310818334Speter							 JUMP_LABEL (other))))
310918334Speter	{
311018334Speter	  rtx other_target = JUMP_LABEL (other);
311118334Speter	  target_label = JUMP_LABEL (insn);
311218334Speter
311318334Speter	  /* Increment the count of OTHER_TARGET, so it doesn't get deleted
311418334Speter	     as we move the label.  */
311518334Speter	  if (other_target)
311618334Speter	    ++LABEL_NUSES (other_target);
311718334Speter
311818334Speter	  if (invert_jump (other, target_label))
311918334Speter	    reorg_redirect_jump (insn, other_target);
312018334Speter
312118334Speter	  if (other_target)
312218334Speter	    --LABEL_NUSES (other_target);
312318334Speter	}
312418334Speter
312518334Speter      /* Now look only at cases where we have filled a delay slot.  */
312618334Speter      if (GET_CODE (insn) != INSN
312718334Speter	  || GET_CODE (PATTERN (insn)) != SEQUENCE)
312818334Speter	continue;
312918334Speter
313018334Speter      pat = PATTERN (insn);
313118334Speter      delay_insn = XVECEXP (pat, 0, 0);
313218334Speter
313318334Speter      /* See if the first insn in the delay slot is redundant with some
313418334Speter	 previous insn.  Remove it from the delay slot if so; then set up
313518334Speter	 to reprocess this insn.  */
313618334Speter      if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
313718334Speter	{
313818334Speter	  delete_from_delay_slot (XVECEXP (pat, 0, 1));
313918334Speter	  next = prev_active_insn (next);
314018334Speter	  continue;
314118334Speter	}
314218334Speter
314352284Sobrien      /* See if we have a RETURN insn with a filled delay slot followed
314452284Sobrien	 by a RETURN insn with an unfilled a delay slot.  If so, we can delete
314552284Sobrien	 the first RETURN (but not it's delay insn).  This gives the same
314652284Sobrien	 effect in fewer instructions.
314752284Sobrien
314852284Sobrien	 Only do so if optimizing for size since this results in slower, but
314952284Sobrien	 smaller code.  */
315052284Sobrien      if (optimize_size
315152284Sobrien	  && GET_CODE (PATTERN (delay_insn)) == RETURN
315252284Sobrien	  && next
315352284Sobrien	  && GET_CODE (next) == JUMP_INSN
315452284Sobrien	  && GET_CODE (PATTERN (next)) == RETURN)
315552284Sobrien	{
315652284Sobrien	  int i;
315752284Sobrien
315852284Sobrien	  /* Delete the RETURN and just execute the delay list insns.
315952284Sobrien
316052284Sobrien	     We do this by deleting the INSN containing the SEQUENCE, then
316152284Sobrien	     re-emitting the insns separately, and then deleting the RETURN.
316252284Sobrien	     This allows the count of the jump target to be properly
316352284Sobrien	     decremented.  */
316452284Sobrien
316552284Sobrien	  /* Clear the from target bit, since these insns are no longer
316652284Sobrien	     in delay slots.  */
316752284Sobrien	  for (i = 0; i < XVECLEN (pat, 0); i++)
316852284Sobrien	    INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
316952284Sobrien
317052284Sobrien	  trial = PREV_INSN (insn);
317152284Sobrien	  delete_insn (insn);
317252284Sobrien	  emit_insn_after (pat, trial);
317352284Sobrien	  delete_scheduled_jump (delay_insn);
317452284Sobrien	  continue;
317552284Sobrien	}
317652284Sobrien
317718334Speter      /* Now look only at the cases where we have a filled JUMP_INSN.  */
317818334Speter      if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
317918334Speter	  || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
318018334Speter		|| condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
318118334Speter	continue;
318218334Speter
318318334Speter      target_label = JUMP_LABEL (delay_insn);
318418334Speter
318518334Speter      if (target_label)
318618334Speter	{
318718334Speter	  /* If this jump goes to another unconditional jump, thread it, but
318818334Speter	     don't convert a jump into a RETURN here.  */
318918334Speter	  trial = follow_jumps (target_label);
319018334Speter	  /* We use next_real_insn instead of next_active_insn, so that
319118334Speter	     the special USE insns emitted by reorg won't be ignored.
319218334Speter	     If they are ignored, then they will get deleted if target_label
319318334Speter	     is now unreachable, and that would cause mark_target_live_regs
319418334Speter	     to fail.  */
319518334Speter	  trial = prev_label (next_real_insn (trial));
319618334Speter	  if (trial == 0 && target_label != 0)
319718334Speter	    trial = find_end_label ();
319818334Speter
319918334Speter	  if (trial != target_label
320018334Speter	      && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
320118334Speter	    {
320218334Speter	      reorg_redirect_jump (delay_insn, trial);
320318334Speter	      target_label = trial;
320418334Speter	    }
320518334Speter
320618334Speter	  /* If the first insn at TARGET_LABEL is redundant with a previous
320718334Speter	     insn, redirect the jump to the following insn process again.  */
320818334Speter	  trial = next_active_insn (target_label);
320918334Speter	  if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
321018334Speter	      && redundant_insn (trial, insn, 0))
321118334Speter	    {
321250397Sobrien	      rtx tmp;
321350397Sobrien
321450397Sobrien	      /* Figure out where to emit the special USE insn so we don't
321550397Sobrien		 later incorrectly compute register live/death info.  */
321650397Sobrien	      tmp = next_active_insn (trial);
321750397Sobrien	      if (tmp == 0)
321850397Sobrien		tmp = find_end_label ();
321950397Sobrien
322050397Sobrien	      /* Insert the special USE insn and update dataflow info.  */
322150397Sobrien              update_block (trial, tmp);
322250397Sobrien
322350397Sobrien	      /* Now emit a label before the special USE insn, and
322450397Sobrien		 redirect our jump to the new label.  */
322550397Sobrien	      target_label = get_label_before (PREV_INSN (tmp));
322618334Speter	      reorg_redirect_jump (delay_insn, target_label);
322718334Speter	      next = insn;
322818334Speter	      continue;
322918334Speter	    }
323018334Speter
323118334Speter	  /* Similarly, if it is an unconditional jump with one insn in its
323218334Speter	     delay list and that insn is redundant, thread the jump.  */
323318334Speter	  if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
323418334Speter	      && XVECLEN (PATTERN (trial), 0) == 2
323518334Speter	      && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
323618334Speter	      && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
323718334Speter		  || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
323818334Speter	      && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
323918334Speter	    {
324018334Speter	      target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
324118334Speter	      if (target_label == 0)
324218334Speter		target_label = find_end_label ();
324318334Speter
324418334Speter	      if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
324518334Speter						    insn))
324618334Speter		{
324718334Speter		  reorg_redirect_jump (delay_insn, target_label);
324818334Speter		  next = insn;
324918334Speter		  continue;
325018334Speter		}
325118334Speter	    }
325218334Speter	}
325318334Speter
325418334Speter      if (! INSN_ANNULLED_BRANCH_P (delay_insn)
325518334Speter	  && prev_active_insn (target_label) == insn
325618334Speter	  && ! condjump_in_parallel_p (delay_insn)
325718334Speter#ifdef HAVE_cc0
325818334Speter	  /* If the last insn in the delay slot sets CC0 for some insn,
325918334Speter	     various code assumes that it is in a delay slot.  We could
326018334Speter	     put it back where it belonged and delete the register notes,
326118334Speter	     but it doesn't seem worthwhile in this uncommon case.  */
326218334Speter	  && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
326318334Speter			      REG_CC_USER, NULL_RTX)
326418334Speter#endif
326518334Speter	  )
326618334Speter	{
326718334Speter	  int i;
326818334Speter
326918334Speter	  /* All this insn does is execute its delay list and jump to the
327018334Speter	     following insn.  So delete the jump and just execute the delay
327118334Speter	     list insns.
327218334Speter
327318334Speter	     We do this by deleting the INSN containing the SEQUENCE, then
327418334Speter	     re-emitting the insns separately, and then deleting the jump.
327518334Speter	     This allows the count of the jump target to be properly
327618334Speter	     decremented.  */
327718334Speter
327818334Speter	  /* Clear the from target bit, since these insns are no longer
327918334Speter	     in delay slots.  */
328018334Speter	  for (i = 0; i < XVECLEN (pat, 0); i++)
328118334Speter	    INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
328218334Speter
328318334Speter	  trial = PREV_INSN (insn);
328418334Speter	  delete_insn (insn);
328518334Speter	  emit_insn_after (pat, trial);
328618334Speter	  delete_scheduled_jump (delay_insn);
328718334Speter	  continue;
328818334Speter	}
328918334Speter
329018334Speter      /* See if this is an unconditional jump around a single insn which is
329118334Speter	 identical to the one in its delay slot.  In this case, we can just
329218334Speter	 delete the branch and the insn in its delay slot.  */
329318334Speter      if (next && GET_CODE (next) == INSN
329418334Speter	  && prev_label (next_active_insn (next)) == target_label
329518334Speter	  && simplejump_p (insn)
329618334Speter	  && XVECLEN (pat, 0) == 2
329718334Speter	  && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
329818334Speter	{
329918334Speter	  delete_insn (insn);
330018334Speter	  continue;
330118334Speter	}
330218334Speter
330318334Speter      /* See if this jump (with its delay slots) branches around another
330418334Speter	 jump (without delay slots).  If so, invert this jump and point
330518334Speter	 it to the target of the second jump.  We cannot do this for
330618334Speter	 annulled jumps, though.  Again, don't convert a jump to a RETURN
330718334Speter	 here.  */
330818334Speter      if (! INSN_ANNULLED_BRANCH_P (delay_insn)
330918334Speter	  && next && GET_CODE (next) == JUMP_INSN
331018334Speter	  && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
331118334Speter	  && next_active_insn (target_label) == next_active_insn (next)
331218334Speter	  && no_labels_between_p (insn, next))
331318334Speter	{
331418334Speter	  rtx label = JUMP_LABEL (next);
331518334Speter	  rtx old_label = JUMP_LABEL (delay_insn);
331618334Speter
331718334Speter	  if (label == 0)
331818334Speter	    label = find_end_label ();
331918334Speter
332018334Speter	  if (redirect_with_delay_slots_safe_p (delay_insn, label, insn))
332118334Speter	    {
332218334Speter	      /* Be careful how we do this to avoid deleting code or labels
332318334Speter		 that are momentarily dead.  See similar optimization in
332418334Speter		 jump.c  */
332518334Speter	      if (old_label)
332618334Speter		++LABEL_NUSES (old_label);
332718334Speter
332818334Speter	      if (invert_jump (delay_insn, label))
332918334Speter		{
333018334Speter		  int i;
333118334Speter
333218334Speter		  /* Must update the INSN_FROM_TARGET_P bits now that
333318334Speter		     the branch is reversed, so that mark_target_live_regs
333418334Speter		     will handle the delay slot insn correctly.  */
333518334Speter		  for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
333618334Speter		    {
333718334Speter		      rtx slot = XVECEXP (PATTERN (insn), 0, i);
333818334Speter		      INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
333918334Speter		    }
334018334Speter
334118334Speter		  delete_insn (next);
334218334Speter		  next = insn;
334318334Speter		}
334418334Speter
334518334Speter	      if (old_label && --LABEL_NUSES (old_label) == 0)
334618334Speter		delete_insn (old_label);
334718334Speter	      continue;
334818334Speter	    }
334918334Speter	}
335018334Speter
335118334Speter      /* If we own the thread opposite the way this insn branches, see if we
335218334Speter	 can merge its delay slots with following insns.  */
335318334Speter      if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
335418334Speter	  && own_thread_p (NEXT_INSN (insn), 0, 1))
335518334Speter	try_merge_delay_insns (insn, next);
335618334Speter      else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
335718334Speter	       && own_thread_p (target_label, target_label, 0))
335818334Speter	try_merge_delay_insns (insn, next_active_insn (target_label));
335918334Speter
336018334Speter      /* If we get here, we haven't deleted INSN.  But we may have deleted
336118334Speter	 NEXT, so recompute it.  */
336218334Speter      next = next_active_insn (insn);
336318334Speter    }
336418334Speter}
336518334Speter
336618334Speter#ifdef HAVE_return
336718334Speter
336818334Speter/* Look for filled jumps to the end of function label.  We can try to convert
336918334Speter   them into RETURN insns if the insns in the delay slot are valid for the
337018334Speter   RETURN as well.  */
337118334Speter
337218334Speterstatic void
337318334Spetermake_return_insns (first)
337418334Speter     rtx first;
337518334Speter{
337618334Speter  rtx insn, jump_insn, pat;
337718334Speter  rtx real_return_label = end_of_function_label;
337818334Speter  int slots, i;
337918334Speter
338018334Speter  /* See if there is a RETURN insn in the function other than the one we
338118334Speter     made for END_OF_FUNCTION_LABEL.  If so, set up anything we can't change
338218334Speter     into a RETURN to jump to it.  */
338318334Speter  for (insn = first; insn; insn = NEXT_INSN (insn))
338418334Speter    if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
338518334Speter      {
338618334Speter	real_return_label = get_label_before (insn);
338718334Speter	break;
338818334Speter      }
338918334Speter
339018334Speter  /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
339118334Speter     was equal to END_OF_FUNCTION_LABEL.  */
339218334Speter  LABEL_NUSES (real_return_label)++;
339318334Speter
339418334Speter  /* Clear the list of insns to fill so we can use it.  */
339518334Speter  obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
339618334Speter
339718334Speter  for (insn = first; insn; insn = NEXT_INSN (insn))
339818334Speter    {
339918334Speter      int flags;
340018334Speter
340118334Speter      /* Only look at filled JUMP_INSNs that go to the end of function
340218334Speter	 label.  */
340318334Speter      if (GET_CODE (insn) != INSN
340418334Speter	  || GET_CODE (PATTERN (insn)) != SEQUENCE
340518334Speter	  || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
340618334Speter	  || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
340718334Speter	continue;
340818334Speter
340918334Speter      pat = PATTERN (insn);
341018334Speter      jump_insn = XVECEXP (pat, 0, 0);
341118334Speter
341218334Speter      /* If we can't make the jump into a RETURN, try to redirect it to the best
341318334Speter	 RETURN and go on to the next insn.  */
341418334Speter      if (! reorg_redirect_jump (jump_insn, NULL_RTX))
341518334Speter	{
341618334Speter	  /* Make sure redirecting the jump will not invalidate the delay
341718334Speter	     slot insns.  */
341818334Speter	  if (redirect_with_delay_slots_safe_p (jump_insn,
341918334Speter						real_return_label,
342018334Speter						insn))
342118334Speter	    reorg_redirect_jump (jump_insn, real_return_label);
342218334Speter	  continue;
342318334Speter	}
342418334Speter
342518334Speter      /* See if this RETURN can accept the insns current in its delay slot.
342618334Speter	 It can if it has more or an equal number of slots and the contents
342718334Speter	 of each is valid.  */
342818334Speter
342918334Speter      flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
343018334Speter      slots = num_delay_slots (jump_insn);
343118334Speter      if (slots >= XVECLEN (pat, 0) - 1)
343218334Speter	{
343318334Speter	  for (i = 1; i < XVECLEN (pat, 0); i++)
343418334Speter	    if (! (
343518334Speter#ifdef ANNUL_IFFALSE_SLOTS
343618334Speter		   (INSN_ANNULLED_BRANCH_P (jump_insn)
343718334Speter		    && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
343818334Speter		   ? eligible_for_annul_false (jump_insn, i - 1,
343918334Speter					       XVECEXP (pat, 0, i), flags) :
344018334Speter#endif
344118334Speter#ifdef ANNUL_IFTRUE_SLOTS
344218334Speter		   (INSN_ANNULLED_BRANCH_P (jump_insn)
344318334Speter		    && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
344418334Speter		   ? eligible_for_annul_true (jump_insn, i - 1,
344518334Speter					      XVECEXP (pat, 0, i), flags) :
344618334Speter#endif
344718334Speter		   eligible_for_delay (jump_insn, i -1, XVECEXP (pat, 0, i), flags)))
344818334Speter	      break;
344918334Speter	}
345018334Speter      else
345118334Speter	i = 0;
345218334Speter
345318334Speter      if (i == XVECLEN (pat, 0))
345418334Speter	continue;
345518334Speter
345618334Speter      /* We have to do something with this insn.  If it is an unconditional
345718334Speter	 RETURN, delete the SEQUENCE and output the individual insns,
345818334Speter	 followed by the RETURN.  Then set things up so we try to find
345918334Speter	 insns for its delay slots, if it needs some.  */
346018334Speter      if (GET_CODE (PATTERN (jump_insn)) == RETURN)
346118334Speter	{
346218334Speter	  rtx prev = PREV_INSN (insn);
346318334Speter
346418334Speter	  delete_insn (insn);
346518334Speter	  for (i = 1; i < XVECLEN (pat, 0); i++)
346618334Speter	    prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
346718334Speter
346818334Speter	  insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
346918334Speter	  emit_barrier_after (insn);
347018334Speter
347118334Speter	  if (slots)
347218334Speter	    obstack_ptr_grow (&unfilled_slots_obstack, insn);
347318334Speter	}
347418334Speter      else
347518334Speter	/* It is probably more efficient to keep this with its current
347618334Speter	   delay slot as a branch to a RETURN.  */
347718334Speter	reorg_redirect_jump (jump_insn, real_return_label);
347818334Speter    }
347918334Speter
348018334Speter  /* Now delete REAL_RETURN_LABEL if we never used it.  Then try to fill any
348118334Speter     new delay slots we have created.  */
348218334Speter  if (--LABEL_NUSES (real_return_label) == 0)
348318334Speter    delete_insn (real_return_label);
348418334Speter
348550397Sobrien  fill_simple_delay_slots (1);
348650397Sobrien  fill_simple_delay_slots (0);
348718334Speter}
348818334Speter#endif
348918334Speter
349018334Speter/* Try to find insns to place in delay slots.  */
349118334Speter
349218334Spetervoid
349318334Speterdbr_schedule (first, file)
349418334Speter     rtx first;
349518334Speter     FILE *file;
349618334Speter{
349718334Speter  rtx insn, next, epilogue_insn = 0;
349818334Speter  int i;
349918334Speter#if 0
350018334Speter  int old_flag_no_peephole = flag_no_peephole;
350118334Speter
350218334Speter  /* Execute `final' once in prescan mode to delete any insns that won't be
350318334Speter     used.  Don't let final try to do any peephole optimization--it will
350418334Speter     ruin dataflow information for this pass.  */
350518334Speter
350618334Speter  flag_no_peephole = 1;
350718334Speter  final (first, 0, NO_DEBUG, 1, 1);
350818334Speter  flag_no_peephole = old_flag_no_peephole;
350918334Speter#endif
351018334Speter
351118334Speter  /* If the current function has no insns other than the prologue and
351218334Speter     epilogue, then do not try to fill any delay slots.  */
351318334Speter  if (n_basic_blocks == 0)
351418334Speter    return;
351518334Speter
351618334Speter  /* Find the highest INSN_UID and allocate and initialize our map from
351718334Speter     INSN_UID's to position in code.  */
351818334Speter  for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
351918334Speter    {
352018334Speter      if (INSN_UID (insn) > max_uid)
352118334Speter	max_uid = INSN_UID (insn);
352218334Speter      if (GET_CODE (insn) == NOTE
352318334Speter	  && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
352418334Speter	epilogue_insn = insn;
352518334Speter    }
352618334Speter
352752284Sobrien  uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int));
352818334Speter  for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
352918334Speter    uid_to_ruid[INSN_UID (insn)] = i;
353018334Speter
353118334Speter  /* Initialize the list of insns that need filling.  */
353218334Speter  if (unfilled_firstobj == 0)
353318334Speter    {
353418334Speter      gcc_obstack_init (&unfilled_slots_obstack);
353518334Speter      unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
353618334Speter    }
353718334Speter
353818334Speter  for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
353918334Speter    {
354018334Speter      rtx target;
354118334Speter
354218334Speter      INSN_ANNULLED_BRANCH_P (insn) = 0;
354318334Speter      INSN_FROM_TARGET_P (insn) = 0;
354418334Speter
354518334Speter      /* Skip vector tables.  We can't get attributes for them.  */
354618334Speter      if (GET_CODE (insn) == JUMP_INSN
354718334Speter	  && (GET_CODE (PATTERN (insn)) == ADDR_VEC
354818334Speter	      || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
354918334Speter	continue;
355018334Speter
355118334Speter      if (num_delay_slots (insn) > 0)
355218334Speter	obstack_ptr_grow (&unfilled_slots_obstack, insn);
355318334Speter
355418334Speter      /* Ensure all jumps go to the last of a set of consecutive labels.  */
355518334Speter      if (GET_CODE (insn) == JUMP_INSN
355618334Speter	  && (condjump_p (insn) || condjump_in_parallel_p (insn))
355718334Speter	  && JUMP_LABEL (insn) != 0
355818334Speter	  && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
355918334Speter	      != JUMP_LABEL (insn)))
356018334Speter	redirect_jump (insn, target);
356118334Speter    }
356218334Speter
356352284Sobrien  init_resource_info (epilogue_insn);
356418334Speter
356518334Speter  /* Show we haven't computed an end-of-function label yet.  */
356618334Speter  end_of_function_label = 0;
356718334Speter
356818334Speter  /* Initialize the statistics for this function.  */
356918334Speter  bzero ((char *) num_insns_needing_delays, sizeof num_insns_needing_delays);
357018334Speter  bzero ((char *) num_filled_delays, sizeof num_filled_delays);
357118334Speter
357218334Speter  /* Now do the delay slot filling.  Try everything twice in case earlier
357318334Speter     changes make more slots fillable.  */
357418334Speter
357518334Speter  for (reorg_pass_number = 0;
357618334Speter       reorg_pass_number < MAX_REORG_PASSES;
357718334Speter       reorg_pass_number++)
357818334Speter    {
357950397Sobrien      fill_simple_delay_slots (1);
358050397Sobrien      fill_simple_delay_slots (0);
358150397Sobrien      fill_eager_delay_slots ();
358218334Speter      relax_delay_slots (first);
358318334Speter    }
358418334Speter
358518334Speter  /* Delete any USE insns made by update_block; subsequent passes don't need
358618334Speter     them or know how to deal with them.  */
358718334Speter  for (insn = first; insn; insn = next)
358818334Speter    {
358918334Speter      next = NEXT_INSN (insn);
359018334Speter
359118334Speter      if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
359218334Speter	  && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
359318334Speter	next = delete_insn (insn);
359418334Speter    }
359518334Speter
359618334Speter  /* If we made an end of function label, indicate that it is now
359718334Speter     safe to delete it by undoing our prior adjustment to LABEL_NUSES.
359818334Speter     If it is now unused, delete it.  */
359918334Speter  if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
360018334Speter    delete_insn (end_of_function_label);
360118334Speter
360218334Speter#ifdef HAVE_return
360318334Speter  if (HAVE_return && end_of_function_label != 0)
360418334Speter    make_return_insns (first);
360518334Speter#endif
360618334Speter
360718334Speter  obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
360818334Speter
360918334Speter  /* It is not clear why the line below is needed, but it does seem to be.  */
361018334Speter  unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
361118334Speter
361218334Speter  /* Reposition the prologue and epilogue notes in case we moved the
361318334Speter     prologue/epilogue insns.  */
361418334Speter  reposition_prologue_and_epilogue_notes (first);
361518334Speter
361618334Speter  if (file)
361718334Speter    {
361818334Speter      register int i, j, need_comma;
361918334Speter
362018334Speter      for (reorg_pass_number = 0;
362118334Speter	   reorg_pass_number < MAX_REORG_PASSES;
362218334Speter	   reorg_pass_number++)
362318334Speter	{
362418334Speter	  fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
362518334Speter	  for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
362618334Speter	    {
362718334Speter	      need_comma = 0;
362818334Speter	      fprintf (file, ";; Reorg function #%d\n", i);
362918334Speter
363018334Speter	      fprintf (file, ";; %d insns needing delay slots\n;; ",
363118334Speter		       num_insns_needing_delays[i][reorg_pass_number]);
363218334Speter
363318334Speter	      for (j = 0; j < MAX_DELAY_HISTOGRAM; j++)
363418334Speter		if (num_filled_delays[i][j][reorg_pass_number])
363518334Speter		  {
363618334Speter		    if (need_comma)
363718334Speter		      fprintf (file, ", ");
363818334Speter		    need_comma = 1;
363918334Speter		    fprintf (file, "%d got %d delays",
364018334Speter			     num_filled_delays[i][j][reorg_pass_number], j);
364118334Speter		  }
364218334Speter	      fprintf (file, "\n");
364318334Speter	    }
364418334Speter	}
364518334Speter    }
364650397Sobrien
364750397Sobrien  /* For all JUMP insns, fill in branch prediction notes, so that during
364850397Sobrien     assembler output a target can set branch prediction bits in the code.
364950397Sobrien     We have to do this now, as up until this point the destinations of
365050397Sobrien     JUMPS can be moved around and changed, but past right here that cannot
365150397Sobrien     happen.  */
365250397Sobrien  for (insn = first; insn; insn = NEXT_INSN (insn))
365350397Sobrien    {
365450397Sobrien      int pred_flags;
365550397Sobrien
365652284Sobrien      if (GET_CODE (insn) == INSN)
365752284Sobrien        {
365852284Sobrien	  rtx pat = PATTERN (insn);
365952284Sobrien
366052284Sobrien	  if (GET_CODE (pat) == SEQUENCE)
366152284Sobrien	    insn = XVECEXP (pat, 0, 0);
366252284Sobrien	}
366350397Sobrien      if (GET_CODE (insn) != JUMP_INSN)
366450397Sobrien	continue;
366550397Sobrien
366650397Sobrien      pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
366750397Sobrien      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_BR_PRED,
366850397Sobrien					    GEN_INT (pred_flags),
366950397Sobrien					    REG_NOTES (insn));
367050397Sobrien    }
367152284Sobrien  free_resource_info ();
367218334Speter}
367318334Speter#endif /* DELAY_SLOTS */
3674