190075Sobrien/* Instruction scheduling pass.
290075Sobrien   Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3132718Skan   1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
490075Sobrien   Contributed by Michael Tiemann (tiemann@cygnus.com) Enhanced by,
590075Sobrien   and currently maintained by, Jim Wilson (wilson@cygnus.com)
690075Sobrien
790075SobrienThis file is part of GCC.
890075Sobrien
990075SobrienGCC is free software; you can redistribute it and/or modify it under
1090075Sobrienthe terms of the GNU General Public License as published by the Free
1190075SobrienSoftware Foundation; either version 2, or (at your option) any later
1290075Sobrienversion.
1390075Sobrien
1490075SobrienGCC is distributed in the hope that it will be useful, but WITHOUT ANY
1590075SobrienWARRANTY; without even the implied warranty of MERCHANTABILITY or
1690075SobrienFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1790075Sobrienfor more details.
1890075Sobrien
1990075SobrienYou should have received a copy of the GNU General Public License
2090075Sobrienalong with GCC; see the file COPYING.  If not, write to the Free
21169689SkanSoftware Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22169689Skan02110-1301, USA.  */
2390075Sobrien
2490075Sobrien#include "config.h"
2590075Sobrien#include "system.h"
26132718Skan#include "coretypes.h"
27132718Skan#include "tm.h"
2890075Sobrien#include "toplev.h"
2990075Sobrien#include "rtl.h"
3090075Sobrien#include "tm_p.h"
3190075Sobrien#include "hard-reg-set.h"
3290075Sobrien#include "regs.h"
3390075Sobrien#include "function.h"
3490075Sobrien#include "flags.h"
3590075Sobrien#include "insn-config.h"
3690075Sobrien#include "insn-attr.h"
3790075Sobrien#include "except.h"
3890075Sobrien#include "toplev.h"
3990075Sobrien#include "recog.h"
4090075Sobrien#include "cfglayout.h"
41132718Skan#include "params.h"
4290075Sobrien#include "sched-int.h"
43132718Skan#include "target.h"
44169689Skan#include "output.h"
4590075Sobrien
4690075Sobrien/* The number of insns scheduled so far.  */
4790075Sobrienstatic int sched_n_insns;
4890075Sobrien
49169689Skan/* The number of insns to be scheduled in total.  */
50169689Skanstatic int n_insns;
51169689Skan
52169689Skan/* Set of blocks, that already have their dependencies calculated.  */
53169689Skanstatic bitmap_head dont_calc_deps;
54169689Skan/* Set of basic blocks, that are ebb heads of tails respectively.  */
55169689Skanstatic bitmap_head ebb_head, ebb_tail;
56169689Skan
57169689Skan/* Last basic block in current ebb.  */
58169689Skanstatic basic_block last_bb;
59169689Skan
6090075Sobrien/* Implementations of the sched_info functions for region scheduling.  */
61169689Skanstatic void init_ready_list (void);
62169689Skanstatic void begin_schedule_ready (rtx, rtx);
63132718Skanstatic int schedule_more_p (void);
64132718Skanstatic const char *ebb_print_insn (rtx, int);
65132718Skanstatic int rank (rtx, rtx);
66132718Skanstatic int contributes_to_priority (rtx, rtx);
67132718Skanstatic void compute_jump_reg_dependencies (rtx, regset, regset, regset);
68132718Skanstatic basic_block earliest_block_with_similiar_load (basic_block, rtx);
69132718Skanstatic void add_deps_for_risky_insns (rtx, rtx);
70132718Skanstatic basic_block schedule_ebb (rtx, rtx);
7190075Sobrien
72169689Skanstatic void add_remove_insn (rtx, int);
73169689Skanstatic void add_block1 (basic_block, basic_block);
74169689Skanstatic basic_block advance_target_bb (basic_block, rtx);
75169689Skanstatic void fix_recovery_cfg (int, int, int);
76169689Skan
77169689Skan#ifdef ENABLE_CHECKING
78169689Skanstatic int ebb_head_or_leaf_p (basic_block, int);
79169689Skan#endif
80169689Skan
8190075Sobrien/* Return nonzero if there are more insns that should be scheduled.  */
8290075Sobrien
8390075Sobrienstatic int
84132718Skanschedule_more_p (void)
8590075Sobrien{
86169689Skan  return sched_n_insns < n_insns;
8790075Sobrien}
8890075Sobrien
8990075Sobrien/* Add all insns that are initially ready to the ready list READY.  Called
9090075Sobrien   once before scheduling a set of insns.  */
9190075Sobrien
9290075Sobrienstatic void
93169689Skaninit_ready_list (void)
9490075Sobrien{
95169689Skan  int n = 0;
9690075Sobrien  rtx prev_head = current_sched_info->prev_head;
9790075Sobrien  rtx next_tail = current_sched_info->next_tail;
9890075Sobrien  rtx insn;
9990075Sobrien
10090075Sobrien  sched_n_insns = 0;
10190075Sobrien
10290075Sobrien#if 0
10390075Sobrien  /* Print debugging information.  */
10490075Sobrien  if (sched_verbose >= 5)
10590075Sobrien    debug_dependencies ();
10690075Sobrien#endif
10790075Sobrien
10890075Sobrien  /* Initialize ready list with all 'ready' insns in target block.
10990075Sobrien     Count number of insns in the target block being scheduled.  */
11090075Sobrien  for (insn = NEXT_INSN (prev_head); insn != next_tail; insn = NEXT_INSN (insn))
11190075Sobrien    {
112169689Skan      try_ready (insn);
113169689Skan      n++;
11490075Sobrien    }
115169689Skan
116169689Skan  gcc_assert (n == n_insns);
11790075Sobrien}
11890075Sobrien
119169689Skan/* INSN is being scheduled after LAST.  Update counters.  */
120169689Skanstatic void
121169689Skanbegin_schedule_ready (rtx insn, rtx last)
12290075Sobrien{
12390075Sobrien  sched_n_insns++;
12490075Sobrien
125169689Skan  if (BLOCK_FOR_INSN (insn) == last_bb
126169689Skan      /* INSN is a jump in the last block, ...  */
127169689Skan      && control_flow_insn_p (insn)
128169689Skan      /* that is going to be moved over some instructions.  */
129169689Skan      && last != PREV_INSN (insn))
130169689Skan    {
131169689Skan      edge e;
132169689Skan      edge_iterator ei;
133169689Skan      basic_block bb;
134169689Skan
135169689Skan      /* An obscure special case, where we do have partially dead
136169689Skan	 instruction scheduled after last control flow instruction.
137169689Skan	 In this case we can create new basic block.  It is
138169689Skan	 always exactly one basic block last in the sequence.  */
139169689Skan
140169689Skan      FOR_EACH_EDGE (e, ei, last_bb->succs)
141169689Skan	if (e->flags & EDGE_FALLTHRU)
142169689Skan	  break;
143169689Skan
144169689Skan#ifdef ENABLE_CHECKING
145169689Skan      gcc_assert (!e || !(e->flags & EDGE_COMPLEX));
146169689Skan
147169689Skan      gcc_assert (BLOCK_FOR_INSN (insn) == last_bb
148169689Skan		  && !IS_SPECULATION_CHECK_P (insn)
149169689Skan		  && BB_HEAD (last_bb) != insn
150169689Skan		  && BB_END (last_bb) == insn);
151169689Skan
152169689Skan      {
153169689Skan	rtx x;
154169689Skan
155169689Skan	x = NEXT_INSN (insn);
156169689Skan	if (e)
157169689Skan	  gcc_assert (NOTE_P (x) || LABEL_P (x));
158169689Skan	else
159169689Skan	  gcc_assert (BARRIER_P (x));
160169689Skan      }
161169689Skan#endif
162169689Skan
163169689Skan      if (e)
164169689Skan	{
165169689Skan	  bb = split_edge (e);
166169689Skan	  gcc_assert (NOTE_INSN_BASIC_BLOCK_P (BB_END (bb)));
167169689Skan	}
168169689Skan      else
169169689Skan	/* Create an empty unreachable block after the INSN.  */
170169689Skan	bb = create_basic_block (NEXT_INSN (insn), NULL_RTX, last_bb);
171169689Skan
172169689Skan      /* split_edge () creates BB before E->DEST.  Keep in mind, that
173169689Skan	 this operation extends scheduling region till the end of BB.
174169689Skan	 Hence, we need to shift NEXT_TAIL, so haifa-sched.c won't go out
175169689Skan	 of the scheduling region.  */
176169689Skan      current_sched_info->next_tail = NEXT_INSN (BB_END (bb));
177169689Skan      gcc_assert (current_sched_info->next_tail);
178169689Skan
179169689Skan      add_block (bb, last_bb);
180169689Skan      gcc_assert (last_bb == bb);
181169689Skan    }
18290075Sobrien}
18390075Sobrien
18490075Sobrien/* Return a string that contains the insn uid and optionally anything else
18590075Sobrien   necessary to identify this insn in an output.  It's valid to use a
18690075Sobrien   static buffer for this.  The ALIGNED parameter should cause the string
18790075Sobrien   to be formatted so that multiple output lines will line up nicely.  */
18890075Sobrien
18990075Sobrienstatic const char *
190132718Skanebb_print_insn (rtx insn, int aligned ATTRIBUTE_UNUSED)
19190075Sobrien{
19290075Sobrien  static char tmp[80];
19390075Sobrien
19490075Sobrien  sprintf (tmp, "%4d", INSN_UID (insn));
19590075Sobrien  return tmp;
19690075Sobrien}
19790075Sobrien
19890075Sobrien/* Compare priority of two insns.  Return a positive number if the second
19990075Sobrien   insn is to be preferred for scheduling, and a negative one if the first
20090075Sobrien   is to be preferred.  Zero if they are equally good.  */
20190075Sobrien
20290075Sobrienstatic int
203132718Skanrank (rtx insn1, rtx insn2)
20490075Sobrien{
205132718Skan  basic_block bb1 = BLOCK_FOR_INSN (insn1);
206132718Skan  basic_block bb2 = BLOCK_FOR_INSN (insn2);
207132718Skan
208132718Skan  if (bb1->count > bb2->count
209132718Skan      || bb1->frequency > bb2->frequency)
210132718Skan    return -1;
211132718Skan  if (bb1->count < bb2->count
212132718Skan      || bb1->frequency < bb2->frequency)
213132718Skan    return 1;
21490075Sobrien  return 0;
21590075Sobrien}
21690075Sobrien
21790075Sobrien/* NEXT is an instruction that depends on INSN (a backward dependence);
21890075Sobrien   return nonzero if we should include this dependence in priority
21990075Sobrien   calculations.  */
22090075Sobrien
22190075Sobrienstatic int
222132718Skancontributes_to_priority (rtx next ATTRIBUTE_UNUSED,
223132718Skan			 rtx insn ATTRIBUTE_UNUSED)
22490075Sobrien{
22590075Sobrien  return 1;
22690075Sobrien}
22790075Sobrien
228132718Skan /* INSN is a JUMP_INSN, COND_SET is the set of registers that are
229132718Skan    conditionally set before INSN.  Store the set of registers that
230132718Skan    must be considered as used by this jump in USED and that of
231132718Skan    registers that must be considered as set in SET.  */
23290075Sobrien
23390075Sobrienstatic void
234132718Skancompute_jump_reg_dependencies (rtx insn, regset cond_set, regset used,
235132718Skan			       regset set)
23690075Sobrien{
23790075Sobrien  basic_block b = BLOCK_FOR_INSN (insn);
23890075Sobrien  edge e;
239169689Skan  edge_iterator ei;
240169689Skan
241169689Skan  FOR_EACH_EDGE (e, ei, b->succs)
242119256Skan    if (e->flags & EDGE_FALLTHRU)
243119256Skan      /* The jump may be a by-product of a branch that has been merged
244119256Skan	 in the main codepath after being conditionalized.  Therefore
245119256Skan	 it may guard the fallthrough block from using a value that has
246119256Skan	 conditionally overwritten that of the main codepath.  So we
247119256Skan	 consider that it restores the value of the main codepath.  */
248169689Skan      bitmap_and (set, glat_start [e->dest->index], cond_set);
249119256Skan    else
250169689Skan      bitmap_ior_into (used, glat_start [e->dest->index]);
25190075Sobrien}
25290075Sobrien
25390075Sobrien/* Used in schedule_insns to initialize current_sched_info for scheduling
25490075Sobrien   regions (or single basic blocks).  */
25590075Sobrien
25690075Sobrienstatic struct sched_info ebb_sched_info =
25790075Sobrien{
25890075Sobrien  init_ready_list,
259169689Skan  NULL,
26090075Sobrien  schedule_more_p,
261169689Skan  NULL,
26290075Sobrien  rank,
263117395Skan  ebb_print_insn,
26490075Sobrien  contributes_to_priority,
26590075Sobrien  compute_jump_reg_dependencies,
26690075Sobrien
26790075Sobrien  NULL, NULL,
26890075Sobrien  NULL, NULL,
269169689Skan  0, 1, 0,
270169689Skan
271169689Skan  add_remove_insn,
272169689Skan  begin_schedule_ready,
273169689Skan  add_block1,
274169689Skan  advance_target_bb,
275169689Skan  fix_recovery_cfg,
276169689Skan#ifdef ENABLE_CHECKING
277169689Skan  ebb_head_or_leaf_p,
278169689Skan#endif
279169689Skan  /* We need to DETACH_LIVE_INFO to be able to create new basic blocks.
280169689Skan     See begin_schedule_ready ().  */
281169689Skan  SCHED_EBB | USE_GLAT | DETACH_LIFE_INFO
28290075Sobrien};
28390075Sobrien
284132718Skan/* Returns the earliest block in EBB currently being processed where a
285132718Skan   "similar load" 'insn2' is found, and hence LOAD_INSN can move
286132718Skan   speculatively into the found block.  All the following must hold:
287132718Skan
288132718Skan   (1) both loads have 1 base register (PFREE_CANDIDATEs).
289132718Skan   (2) load_insn and load2 have a def-use dependence upon
290132718Skan   the same insn 'insn1'.
291132718Skan
292132718Skan   From all these we can conclude that the two loads access memory
293132718Skan   addresses that differ at most by a constant, and hence if moving
294132718Skan   load_insn would cause an exception, it would have been caused by
295132718Skan   load2 anyhow.
296132718Skan
297132718Skan   The function uses list (given by LAST_BLOCK) of already processed
298132718Skan   blocks in EBB.  The list is formed in `add_deps_for_risky_insns'.  */
299132718Skan
300132718Skanstatic basic_block
301132718Skanearliest_block_with_similiar_load (basic_block last_block, rtx load_insn)
302132718Skan{
303132718Skan  rtx back_link;
304132718Skan  basic_block bb, earliest_block = NULL;
305132718Skan
306132718Skan  for (back_link = LOG_LINKS (load_insn);
307132718Skan       back_link;
308132718Skan       back_link = XEXP (back_link, 1))
309132718Skan    {
310132718Skan      rtx insn1 = XEXP (back_link, 0);
311132718Skan
312132718Skan      if (GET_MODE (back_link) == VOIDmode)
313132718Skan	{
314132718Skan	  /* Found a DEF-USE dependence (insn1, load_insn).  */
315132718Skan	  rtx fore_link;
316132718Skan
317132718Skan	  for (fore_link = INSN_DEPEND (insn1);
318132718Skan	       fore_link;
319132718Skan	       fore_link = XEXP (fore_link, 1))
320132718Skan	    {
321132718Skan	      rtx insn2 = XEXP (fore_link, 0);
322132718Skan	      basic_block insn2_block = BLOCK_FOR_INSN (insn2);
323132718Skan
324132718Skan	      if (GET_MODE (fore_link) == VOIDmode)
325132718Skan		{
326132718Skan		  if (earliest_block != NULL
327132718Skan		      && earliest_block->index < insn2_block->index)
328132718Skan		    continue;
329132718Skan
330132718Skan		  /* Found a DEF-USE dependence (insn1, insn2).  */
331132718Skan		  if (haifa_classify_insn (insn2) != PFREE_CANDIDATE)
332132718Skan		    /* insn2 not guaranteed to be a 1 base reg load.  */
333132718Skan		    continue;
334132718Skan
335132718Skan		  for (bb = last_block; bb; bb = bb->aux)
336132718Skan		    if (insn2_block == bb)
337132718Skan		      break;
338132718Skan
339132718Skan		  if (!bb)
340132718Skan		    /* insn2 is the similar load.  */
341132718Skan		    earliest_block = insn2_block;
342132718Skan		}
343132718Skan	    }
344132718Skan	}
345132718Skan    }
346132718Skan
347132718Skan  return earliest_block;
348132718Skan}
349132718Skan
350132718Skan/* The following function adds dependencies between jumps and risky
351132718Skan   insns in given ebb.  */
352132718Skan
353132718Skanstatic void
354132718Skanadd_deps_for_risky_insns (rtx head, rtx tail)
355132718Skan{
356132718Skan  rtx insn, prev;
357132718Skan  int class;
358132718Skan  rtx last_jump = NULL_RTX;
359132718Skan  rtx next_tail = NEXT_INSN (tail);
360132718Skan  basic_block last_block = NULL, bb;
361132718Skan
362132718Skan  for (insn = head; insn != next_tail; insn = NEXT_INSN (insn))
363169689Skan    if (control_flow_insn_p (insn))
364132718Skan      {
365132718Skan	bb = BLOCK_FOR_INSN (insn);
366132718Skan	bb->aux = last_block;
367132718Skan	last_block = bb;
368132718Skan	last_jump = insn;
369132718Skan      }
370132718Skan    else if (INSN_P (insn) && last_jump != NULL_RTX)
371132718Skan      {
372132718Skan	class = haifa_classify_insn (insn);
373132718Skan	prev = last_jump;
374132718Skan	switch (class)
375132718Skan	  {
376132718Skan	  case PFREE_CANDIDATE:
377132718Skan	    if (flag_schedule_speculative_load)
378132718Skan	      {
379132718Skan		bb = earliest_block_with_similiar_load (last_block, insn);
380132718Skan		if (bb)
381132718Skan		  {
382132718Skan		    bb = bb->aux;
383132718Skan		    if (!bb)
384132718Skan		      break;
385132718Skan		    prev = BB_END (bb);
386132718Skan		  }
387132718Skan	      }
388132718Skan	    /* Fall through.  */
389132718Skan	  case TRAP_RISKY:
390132718Skan	  case IRISKY:
391132718Skan	  case PRISKY_CANDIDATE:
392132718Skan	    /* ??? We could implement better checking PRISKY_CANDIDATEs
393132718Skan	       analogous to sched-rgn.c.  */
394132718Skan	    /* We can not change the mode of the backward
395132718Skan	       dependency because REG_DEP_ANTI has the lowest
396132718Skan	       rank.  */
397169689Skan	    if (! sched_insns_conditions_mutex_p (insn, prev))
398169689Skan	      {
399169689Skan		if (!(current_sched_info->flags & DO_SPECULATION))
400169689Skan		  {
401169689Skan		    enum DEPS_ADJUST_RESULT res;
402169689Skan
403169689Skan		    res = add_or_update_back_dep (insn, prev,
404169689Skan						  REG_DEP_ANTI, DEP_ANTI);
405169689Skan
406169689Skan		    if (res == DEP_CREATED)
407169689Skan		      add_forw_dep (insn, LOG_LINKS (insn));
408169689Skan		    else
409169689Skan		      gcc_assert (res != DEP_CHANGED);
410169689Skan		  }
411169689Skan		else
412169689Skan		  add_or_update_back_forw_dep (insn, prev, REG_DEP_ANTI,
413169689Skan					       set_dep_weak (DEP_ANTI,
414169689Skan							     BEGIN_CONTROL,
415169689Skan							     MAX_DEP_WEAK));
416169689Skan	      }
417169689Skan
418132718Skan            break;
419132718Skan
420132718Skan          default:
421132718Skan            break;
422132718Skan	  }
423132718Skan      }
424132718Skan  /* Maintain the invariant that bb->aux is clear after use.  */
425132718Skan  while (last_block)
426132718Skan    {
427132718Skan      bb = last_block->aux;
428132718Skan      last_block->aux = NULL;
429132718Skan      last_block = bb;
430132718Skan    }
431132718Skan}
432132718Skan
43390075Sobrien/* Schedule a single extended basic block, defined by the boundaries HEAD
43490075Sobrien   and TAIL.  */
43590075Sobrien
436132718Skanstatic basic_block
437132718Skanschedule_ebb (rtx head, rtx tail)
43890075Sobrien{
439169689Skan  basic_block first_bb, target_bb;
44090075Sobrien  struct deps tmp_deps;
441169689Skan
442169689Skan  first_bb = BLOCK_FOR_INSN (head);
443169689Skan  last_bb = BLOCK_FOR_INSN (tail);
44490075Sobrien
44590075Sobrien  if (no_real_insns_p (head, tail))
446132718Skan    return BLOCK_FOR_INSN (tail);
44790075Sobrien
448169689Skan  gcc_assert (INSN_P (head) && INSN_P (tail));
44990075Sobrien
450169689Skan  if (!bitmap_bit_p (&dont_calc_deps, first_bb->index))
451169689Skan    {
452169689Skan      init_deps_global ();
45390075Sobrien
454169689Skan      /* Compute LOG_LINKS.  */
455169689Skan      init_deps (&tmp_deps);
456169689Skan      sched_analyze (&tmp_deps, head, tail);
457169689Skan      free_deps (&tmp_deps);
45890075Sobrien
459169689Skan      /* Compute INSN_DEPEND.  */
460169689Skan      compute_forward_dependences (head, tail);
461132718Skan
462169689Skan      add_deps_for_risky_insns (head, tail);
463132718Skan
464169689Skan      if (targetm.sched.dependencies_evaluation_hook)
465169689Skan        targetm.sched.dependencies_evaluation_hook (head, tail);
466169689Skan
467169689Skan      finish_deps_global ();
468169689Skan    }
469169689Skan  else
470169689Skan    /* Only recovery blocks can have their dependencies already calculated,
471169689Skan       and they always are single block ebbs.  */
472169689Skan    gcc_assert (first_bb == last_bb);
473169689Skan
47490075Sobrien  /* Set priorities.  */
475169689Skan  current_sched_info->sched_max_insns_priority = 0;
47690075Sobrien  n_insns = set_priorities (head, tail);
477169689Skan  current_sched_info->sched_max_insns_priority++;
47890075Sobrien
47990075Sobrien  current_sched_info->prev_head = PREV_INSN (head);
48090075Sobrien  current_sched_info->next_tail = NEXT_INSN (tail);
48190075Sobrien
48290075Sobrien  if (write_symbols != NO_DEBUG)
48390075Sobrien    {
484132718Skan      save_line_notes (first_bb->index, head, tail);
48590075Sobrien      rm_line_notes (head, tail);
48690075Sobrien    }
48790075Sobrien
48890075Sobrien  /* rm_other_notes only removes notes which are _inside_ the
48990075Sobrien     block---that is, it won't remove notes before the first real insn
49090075Sobrien     or after the last real insn of the block.  So if the first insn
49190075Sobrien     has a REG_SAVE_NOTE which would otherwise be emitted before the
49290075Sobrien     insn, it is redundant with the note before the start of the
49390075Sobrien     block, and so we have to take it out.  */
49490075Sobrien  if (INSN_P (head))
49590075Sobrien    {
49690075Sobrien      rtx note;
49790075Sobrien
49890075Sobrien      for (note = REG_NOTES (head); note; note = XEXP (note, 1))
49990075Sobrien	if (REG_NOTE_KIND (note) == REG_SAVE_NOTE)
500169689Skan	  remove_note (head, note);
50190075Sobrien    }
50290075Sobrien
50390075Sobrien  /* Remove remaining note insns from the block, save them in
50490075Sobrien     note_list.  These notes are restored at the end of
50590075Sobrien     schedule_block ().  */
50690075Sobrien  rm_other_notes (head, tail);
50790075Sobrien
508169689Skan  unlink_bb_notes (first_bb, last_bb);
509169689Skan
51090075Sobrien  current_sched_info->queue_must_finish_empty = 1;
51190075Sobrien
512169689Skan  target_bb = first_bb;
513169689Skan  schedule_block (&target_bb, n_insns);
51490075Sobrien
515169689Skan  /* We might pack all instructions into fewer blocks,
516169689Skan     so we may made some of them empty.  Can't assert (b == last_bb).  */
517169689Skan
51890075Sobrien  /* Sanity check: verify that all region insns were scheduled.  */
519169689Skan  gcc_assert (sched_n_insns == n_insns);
52090075Sobrien  head = current_sched_info->head;
52190075Sobrien  tail = current_sched_info->tail;
52290075Sobrien
52390075Sobrien  if (write_symbols != NO_DEBUG)
52490075Sobrien    restore_line_notes (head, tail);
52590075Sobrien
526169689Skan  if (EDGE_COUNT (last_bb->preds) == 0)
527169689Skan    /* LAST_BB is unreachable.  */
528169689Skan    {
529169689Skan      gcc_assert (first_bb != last_bb
530169689Skan		  && EDGE_COUNT (last_bb->succs) == 0);
531169689Skan      last_bb = last_bb->prev_bb;
532169689Skan      delete_basic_block (last_bb->next_bb);
533169689Skan    }
534169689Skan
535169689Skan  return last_bb;
53690075Sobrien}
53790075Sobrien
538169689Skan/* The one entry point in this file.  */
53990075Sobrien
54090075Sobrienvoid
541169689Skanschedule_ebbs (void)
54290075Sobrien{
543117395Skan  basic_block bb;
544132718Skan  int probability_cutoff;
545169689Skan  rtx tail;
546169689Skan  sbitmap large_region_blocks, blocks;
547169689Skan  int any_large_regions;
54890075Sobrien
549132718Skan  if (profile_info && flag_branch_probabilities)
550132718Skan    probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY_FEEDBACK);
551132718Skan  else
552132718Skan    probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY);
553132718Skan  probability_cutoff = REG_BR_PROB_BASE / 100 * probability_cutoff;
554132718Skan
55590075Sobrien  /* Taking care of this degenerate case makes the rest of
55690075Sobrien     this code simpler.  */
557169689Skan  if (n_basic_blocks == NUM_FIXED_BLOCKS)
55890075Sobrien    return;
55990075Sobrien
560169689Skan  /* We need current_sched_info in init_dependency_caches, which is
561169689Skan     invoked via sched_init.  */
56290075Sobrien  current_sched_info = &ebb_sched_info;
56390075Sobrien
564169689Skan  sched_init ();
565169689Skan
566117395Skan  compute_bb_for_insn ();
56790075Sobrien
568169689Skan  /* Initialize DONT_CALC_DEPS and ebb-{start, end} markers.  */
569169689Skan  bitmap_initialize (&dont_calc_deps, 0);
570169689Skan  bitmap_clear (&dont_calc_deps);
571169689Skan  bitmap_initialize (&ebb_head, 0);
572169689Skan  bitmap_clear (&ebb_head);
573169689Skan  bitmap_initialize (&ebb_tail, 0);
574169689Skan  bitmap_clear (&ebb_tail);
575169689Skan
57690075Sobrien  /* Schedule every region in the subroutine.  */
577117395Skan  FOR_EACH_BB (bb)
578117395Skan    {
579132718Skan      rtx head = BB_HEAD (bb);
58090075Sobrien
58190075Sobrien      for (;;)
58290075Sobrien	{
58390075Sobrien	  edge e;
584169689Skan	  edge_iterator ei;
585132718Skan	  tail = BB_END (bb);
586117395Skan	  if (bb->next_bb == EXIT_BLOCK_PTR
587169689Skan	      || LABEL_P (BB_HEAD (bb->next_bb)))
58890075Sobrien	    break;
589169689Skan	  FOR_EACH_EDGE (e, ei, bb->succs)
59090075Sobrien	    if ((e->flags & EDGE_FALLTHRU) != 0)
59190075Sobrien	      break;
59290075Sobrien	  if (! e)
59390075Sobrien	    break;
594132718Skan	  if (e->probability <= probability_cutoff)
595132718Skan	    break;
596117395Skan	  bb = bb->next_bb;
59790075Sobrien	}
59890075Sobrien
59990075Sobrien      /* Blah.  We should fix the rest of the code not to get confused by
60090075Sobrien	 a note or two.  */
60190075Sobrien      while (head != tail)
60290075Sobrien	{
603169689Skan	  if (NOTE_P (head))
60490075Sobrien	    head = NEXT_INSN (head);
605169689Skan	  else if (NOTE_P (tail))
60690075Sobrien	    tail = PREV_INSN (tail);
607169689Skan	  else if (LABEL_P (head))
60890075Sobrien	    head = NEXT_INSN (head);
60990075Sobrien	  else
61090075Sobrien	    break;
61190075Sobrien	}
61290075Sobrien
613169689Skan      bitmap_set_bit (&ebb_head, BLOCK_NUM (head));
614132718Skan      bb = schedule_ebb (head, tail);
615169689Skan      bitmap_set_bit (&ebb_tail, bb->index);
61690075Sobrien    }
617169689Skan  bitmap_clear (&dont_calc_deps);
61890075Sobrien
619169689Skan  gcc_assert (current_sched_info->flags & DETACH_LIFE_INFO);
620169689Skan  /* We can create new basic blocks during scheduling, and
621169689Skan     attach_life_info () will create regsets for them
622169689Skan     (along with attaching existing info back).  */
623169689Skan  attach_life_info ();
62490075Sobrien
625169689Skan  /* Updating register live information.  */
626169689Skan  allocate_reg_life_data ();
627169689Skan
628169689Skan  any_large_regions = 0;
629169689Skan  large_region_blocks = sbitmap_alloc (last_basic_block);
630169689Skan  sbitmap_zero (large_region_blocks);
631169689Skan  FOR_EACH_BB (bb)
632169689Skan    SET_BIT (large_region_blocks, bb->index);
633169689Skan
634169689Skan  blocks = sbitmap_alloc (last_basic_block);
635169689Skan  sbitmap_zero (blocks);
636169689Skan
637169689Skan  /* Update life information.  For regions consisting of multiple blocks
638169689Skan     we've possibly done interblock scheduling that affects global liveness.
639169689Skan     For regions consisting of single blocks we need to do only local
640169689Skan     liveness.  */
641169689Skan  FOR_EACH_BB (bb)
642169689Skan    {
643169689Skan      int bbi;
644169689Skan
645169689Skan      bbi = bb->index;
646169689Skan
647169689Skan      if (!bitmap_bit_p (&ebb_head, bbi)
648169689Skan	  || !bitmap_bit_p (&ebb_tail, bbi)
649169689Skan	  /* New blocks (e.g. recovery blocks) should be processed
650169689Skan	     as parts of large regions.  */
651169689Skan	  || !glat_start[bbi])
652169689Skan	any_large_regions = 1;
653169689Skan      else
654169689Skan	{
655169689Skan	  SET_BIT (blocks, bbi);
656169689Skan	  RESET_BIT (large_region_blocks, bbi);
657169689Skan	}
658169689Skan    }
659169689Skan
660169689Skan  update_life_info (blocks, UPDATE_LIFE_LOCAL, 0);
661169689Skan  sbitmap_free (blocks);
662169689Skan
663169689Skan  if (any_large_regions)
664169689Skan    {
665169689Skan      update_life_info (large_region_blocks, UPDATE_LIFE_GLOBAL, 0);
666169689Skan
667169689Skan#ifdef ENABLE_CHECKING
668169689Skan      /* !!! We can't check reg_live_info here because of the fact,
669169689Skan	 that destination registers of COND_EXEC's may be dead
670169689Skan	 before scheduling (while they should be alive).  Don't know why.  */
671169689Skan      /*check_reg_live (true);*/
672169689Skan#endif
673169689Skan    }
674169689Skan  sbitmap_free (large_region_blocks);
675169689Skan
676169689Skan  bitmap_clear (&ebb_head);
677169689Skan  bitmap_clear (&ebb_tail);
678169689Skan
67990075Sobrien  /* Reposition the prologue and epilogue notes in case we moved the
68090075Sobrien     prologue/epilogue insns.  */
68190075Sobrien  if (reload_completed)
68290075Sobrien    reposition_prologue_and_epilogue_notes (get_insns ());
68390075Sobrien
68490075Sobrien  if (write_symbols != NO_DEBUG)
68590075Sobrien    rm_redundant_line_notes ();
68690075Sobrien
68790075Sobrien  sched_finish ();
68890075Sobrien}
689169689Skan
690169689Skan/* INSN has been added to/removed from current ebb.  */
691169689Skanstatic void
692169689Skanadd_remove_insn (rtx insn ATTRIBUTE_UNUSED, int remove_p)
693169689Skan{
694169689Skan  if (!remove_p)
695169689Skan    n_insns++;
696169689Skan  else
697169689Skan    n_insns--;
698169689Skan}
699169689Skan
700169689Skan/* BB was added to ebb after AFTER.  */
701169689Skanstatic void
702169689Skanadd_block1 (basic_block bb, basic_block after)
703169689Skan{
704169689Skan  /* Recovery blocks are always bounded by BARRIERS,
705169689Skan     therefore, they always form single block EBB,
706169689Skan     therefore, we can use rec->index to identify such EBBs.  */
707169689Skan  if (after == EXIT_BLOCK_PTR)
708169689Skan    bitmap_set_bit (&dont_calc_deps, bb->index);
709169689Skan  else if (after == last_bb)
710169689Skan    last_bb = bb;
711169689Skan}
712169689Skan
713169689Skan/* Return next block in ebb chain.  For parameter meaning please refer to
714169689Skan   sched-int.h: struct sched_info: advance_target_bb.  */
715169689Skanstatic basic_block
716169689Skanadvance_target_bb (basic_block bb, rtx insn)
717169689Skan{
718169689Skan  if (insn)
719169689Skan    {
720169689Skan      if (BLOCK_FOR_INSN (insn) != bb
721169689Skan	  && control_flow_insn_p (insn)
722169689Skan	  /* We handle interblock movement of the speculation check
723169689Skan	     or over a speculation check in
724169689Skan	     haifa-sched.c: move_block_after_check ().  */
725169689Skan	  && !IS_SPECULATION_BRANCHY_CHECK_P (insn)
726169689Skan	  && !IS_SPECULATION_BRANCHY_CHECK_P (BB_END (bb)))
727169689Skan	{
728169689Skan	  /* Assert that we don't move jumps across blocks.  */
729169689Skan	  gcc_assert (!control_flow_insn_p (BB_END (bb))
730169689Skan		      && NOTE_INSN_BASIC_BLOCK_P (BB_HEAD (bb->next_bb)));
731169689Skan	  return bb;
732169689Skan	}
733169689Skan      else
734169689Skan	return 0;
735169689Skan    }
736169689Skan  else
737169689Skan    /* Return next non empty block.  */
738169689Skan    {
739169689Skan      do
740169689Skan	{
741169689Skan	  gcc_assert (bb != last_bb);
742169689Skan
743169689Skan	  bb = bb->next_bb;
744169689Skan	}
745169689Skan      while (bb_note (bb) == BB_END (bb));
746169689Skan
747169689Skan      return bb;
748169689Skan    }
749169689Skan}
750169689Skan
751169689Skan/* Fix internal data after interblock movement of jump instruction.
752169689Skan   For parameter meaning please refer to
753169689Skan   sched-int.h: struct sched_info: fix_recovery_cfg.  */
754169689Skanstatic void
755169689Skanfix_recovery_cfg (int bbi ATTRIBUTE_UNUSED, int jump_bbi, int jump_bb_nexti)
756169689Skan{
757169689Skan  gcc_assert (last_bb->index != bbi);
758169689Skan
759169689Skan  if (jump_bb_nexti == last_bb->index)
760169689Skan    last_bb = BASIC_BLOCK (jump_bbi);
761169689Skan}
762169689Skan
763169689Skan#ifdef ENABLE_CHECKING
764169689Skan/* Return non zero, if BB is first or last (depending of LEAF_P) block in
765169689Skan   current ebb.  For more information please refer to
766169689Skan   sched-int.h: struct sched_info: region_head_or_leaf_p.  */
767169689Skanstatic int
768169689Skanebb_head_or_leaf_p (basic_block bb, int leaf_p)
769169689Skan{
770169689Skan  if (!leaf_p)
771169689Skan    return bitmap_bit_p (&ebb_head, bb->index);
772169689Skan  else
773169689Skan    return bitmap_bit_p (&ebb_tail, bb->index);
774169689Skan}
775169689Skan#endif /* ENABLE_CHECKING  */
776