cfgbuild.c revision 132718
1/* Control flow graph building code for GNU compiler.
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22/* find_basic_blocks divides the current function's rtl into basic
23   blocks and constructs the CFG.  The blocks are recorded in the
24   basic_block_info array; the CFG exists in the edge structures
25   referenced by the blocks.
26
27   find_basic_blocks also finds any unreachable loops and deletes them.
28
29   Available functionality:
30     - CFG construction
31         find_basic_blocks
32     - Local CFG construction
33         find_sub_basic_blocks		 */
34
35#include "config.h"
36#include "system.h"
37#include "coretypes.h"
38#include "tm.h"
39#include "tree.h"
40#include "rtl.h"
41#include "hard-reg-set.h"
42#include "basic-block.h"
43#include "regs.h"
44#include "flags.h"
45#include "output.h"
46#include "function.h"
47#include "except.h"
48#include "toplev.h"
49#include "timevar.h"
50
51static int count_basic_blocks (rtx);
52static void find_basic_blocks_1 (rtx);
53static rtx find_label_refs (rtx, rtx);
54static void make_edges (rtx, basic_block, basic_block, int);
55static void make_label_edge (sbitmap *, basic_block, rtx, int);
56static void make_eh_edge (sbitmap *, basic_block, rtx);
57static void find_bb_boundaries (basic_block);
58static void compute_outgoing_frequencies (basic_block);
59
60/* Return true if insn is something that should be contained inside basic
61   block.  */
62
63bool
64inside_basic_block_p (rtx insn)
65{
66  switch (GET_CODE (insn))
67    {
68    case CODE_LABEL:
69      /* Avoid creating of basic block for jumptables.  */
70      return (NEXT_INSN (insn) == 0
71	      || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
72	      || (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC
73		  && GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC));
74
75    case JUMP_INSN:
76      return (GET_CODE (PATTERN (insn)) != ADDR_VEC
77	      && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
78
79    case CALL_INSN:
80    case INSN:
81      return true;
82
83    case BARRIER:
84    case NOTE:
85      return false;
86
87    default:
88      abort ();
89    }
90}
91
92/* Return true if INSN may cause control flow transfer, so it should be last in
93   the basic block.  */
94
95bool
96control_flow_insn_p (rtx insn)
97{
98  rtx note;
99
100  switch (GET_CODE (insn))
101    {
102    case NOTE:
103    case CODE_LABEL:
104      return false;
105
106    case JUMP_INSN:
107      /* Jump insn always causes control transfer except for tablejumps.  */
108      return (GET_CODE (PATTERN (insn)) != ADDR_VEC
109	      && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
110
111    case CALL_INSN:
112      /* Call insn may return to the nonlocal goto handler.  */
113      return ((nonlocal_goto_handler_labels
114	       && (0 == (note = find_reg_note (insn, REG_EH_REGION,
115					       NULL_RTX))
116		   || INTVAL (XEXP (note, 0)) >= 0))
117	      /* Or may trap.  */
118	      || can_throw_internal (insn));
119
120    case INSN:
121      return (flag_non_call_exceptions && can_throw_internal (insn));
122
123    case BARRIER:
124      /* It is nonsense to reach barrier when looking for the
125         end of basic block, but before dead code is eliminated
126         this may happen.  */
127      return false;
128
129    default:
130      abort ();
131    }
132}
133
134/* Count the basic blocks of the function.  */
135
136static int
137count_basic_blocks (rtx f)
138{
139  int count = 0;
140  bool saw_insn = false;
141  rtx insn;
142
143  for (insn = f; insn; insn = NEXT_INSN (insn))
144    {
145      /* Code labels and barriers causes current basic block to be
146         terminated at previous real insn.  */
147      if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
148	  && saw_insn)
149	count++, saw_insn = false;
150
151      /* Start basic block if needed.  */
152      if (!saw_insn && inside_basic_block_p (insn))
153	saw_insn = true;
154
155      /* Control flow insn causes current basic block to be terminated.  */
156      if (saw_insn && control_flow_insn_p (insn))
157	count++, saw_insn = false;
158    }
159
160  if (saw_insn)
161    count++;
162
163  /* The rest of the compiler works a bit smoother when we don't have to
164     check for the edge case of do-nothing functions with no basic blocks.  */
165  if (count == 0)
166    {
167      emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
168      count = 1;
169    }
170
171  return count;
172}
173
174/* Scan a list of insns for labels referred to other than by jumps.
175   This is used to scan the alternatives of a call placeholder.  */
176
177static rtx
178find_label_refs (rtx f, rtx lvl)
179{
180  rtx insn;
181
182  for (insn = f; insn; insn = NEXT_INSN (insn))
183    if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
184      {
185	rtx note;
186
187	/* Make a list of all labels referred to other than by jumps
188	   (which just don't have the REG_LABEL notes).
189
190	   Make a special exception for labels followed by an ADDR*VEC,
191	   as this would be a part of the tablejump setup code.
192
193	   Make a special exception to registers loaded with label
194	   values just before jump insns that use them.  */
195
196	for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
197	  if (REG_NOTE_KIND (note) == REG_LABEL)
198	    {
199	      rtx lab = XEXP (note, 0), next;
200
201	      if ((next = next_nonnote_insn (lab)) != NULL
202		  && GET_CODE (next) == JUMP_INSN
203		  && (GET_CODE (PATTERN (next)) == ADDR_VEC
204		      || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
205		;
206	      else if (GET_CODE (lab) == NOTE)
207		;
208	      else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
209		       && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
210		;
211	      else
212		lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
213	    }
214      }
215
216  return lvl;
217}
218
219/* Create an edge between two basic blocks.  FLAGS are auxiliary information
220   about the edge that is accumulated between calls.  */
221
222/* Create an edge from a basic block to a label.  */
223
224static void
225make_label_edge (sbitmap *edge_cache, basic_block src, rtx label, int flags)
226{
227  if (GET_CODE (label) != CODE_LABEL)
228    abort ();
229
230  /* If the label was never emitted, this insn is junk, but avoid a
231     crash trying to refer to BLOCK_FOR_INSN (label).  This can happen
232     as a result of a syntax error and a diagnostic has already been
233     printed.  */
234
235  if (INSN_UID (label) == 0)
236    return;
237
238  cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
239}
240
241/* Create the edges generated by INSN in REGION.  */
242
243static void
244make_eh_edge (sbitmap *edge_cache, basic_block src, rtx insn)
245{
246  int is_call = GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0;
247  rtx handlers, i;
248
249  handlers = reachable_handlers (insn);
250
251  for (i = handlers; i; i = XEXP (i, 1))
252    make_label_edge (edge_cache, src, XEXP (i, 0),
253		     EDGE_ABNORMAL | EDGE_EH | is_call);
254
255  free_INSN_LIST_list (&handlers);
256}
257
258/* Identify the edges between basic blocks MIN to MAX.
259
260   NONLOCAL_LABEL_LIST is a list of non-local labels in the function.  Blocks
261   that are otherwise unreachable may be reachable with a non-local goto.
262
263   BB_EH_END is an array indexed by basic block number in which we record
264   the list of exception regions active at the end of the basic block.  */
265
266static void
267make_edges (rtx label_value_list, basic_block min, basic_block max, int update_p)
268{
269  basic_block bb;
270  sbitmap *edge_cache = NULL;
271
272  /* Assume no computed jump; revise as we create edges.  */
273  current_function_has_computed_jump = 0;
274
275  /* Heavy use of computed goto in machine-generated code can lead to
276     nearly fully-connected CFGs.  In that case we spend a significant
277     amount of time searching the edge lists for duplicates.  */
278  if (forced_labels || label_value_list || cfun->max_jumptable_ents > 100)
279    {
280      edge_cache = sbitmap_vector_alloc (last_basic_block, last_basic_block);
281      sbitmap_vector_zero (edge_cache, last_basic_block);
282
283      if (update_p)
284        FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
285	  {
286	    edge e;
287
288	    for (e = bb->succ; e ; e = e->succ_next)
289	      if (e->dest != EXIT_BLOCK_PTR)
290		SET_BIT (edge_cache[bb->index], e->dest->index);
291	  }
292    }
293
294  /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
295     is always the entry.  */
296  if (min == ENTRY_BLOCK_PTR->next_bb)
297    cached_make_edge (edge_cache, ENTRY_BLOCK_PTR, min,
298		      EDGE_FALLTHRU);
299
300  FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
301    {
302      rtx insn, x;
303      enum rtx_code code;
304      int force_fallthru = 0;
305
306      if (GET_CODE (BB_HEAD (bb)) == CODE_LABEL
307	  && LABEL_ALT_ENTRY_P (BB_HEAD (bb)))
308	cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
309
310      /* Examine the last instruction of the block, and discover the
311	 ways we can leave the block.  */
312
313      insn = BB_END (bb);
314      code = GET_CODE (insn);
315
316      /* A branch.  */
317      if (code == JUMP_INSN)
318	{
319	  rtx tmp;
320
321	  /* Recognize exception handling placeholders.  */
322	  if (GET_CODE (PATTERN (insn)) == RESX)
323	    make_eh_edge (edge_cache, bb, insn);
324
325	  /* Recognize a non-local goto as a branch outside the
326	     current function.  */
327	  else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
328	    ;
329
330	  /* Recognize a tablejump and do the right thing.  */
331	  else if (tablejump_p (insn, NULL, &tmp))
332	    {
333	      rtvec vec;
334	      int j;
335
336	      if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
337		vec = XVEC (PATTERN (tmp), 0);
338	      else
339		vec = XVEC (PATTERN (tmp), 1);
340
341	      for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
342		make_label_edge (edge_cache, bb,
343				 XEXP (RTVEC_ELT (vec, j), 0), 0);
344
345	      /* Some targets (eg, ARM) emit a conditional jump that also
346		 contains the out-of-range target.  Scan for these and
347		 add an edge if necessary.  */
348	      if ((tmp = single_set (insn)) != NULL
349		  && SET_DEST (tmp) == pc_rtx
350		  && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
351		  && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
352		make_label_edge (edge_cache, bb,
353				 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
354
355#ifdef CASE_DROPS_THROUGH
356	      /* Silly VAXen.  The ADDR_VEC is going to be in the way of
357		 us naturally detecting fallthru into the next block.  */
358	      force_fallthru = 1;
359#endif
360	    }
361
362	  /* If this is a computed jump, then mark it as reaching
363	     everything on the label_value_list and forced_labels list.  */
364	  else if (computed_jump_p (insn))
365	    {
366	      current_function_has_computed_jump = 1;
367
368	      for (x = label_value_list; x; x = XEXP (x, 1))
369		make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
370
371	      for (x = forced_labels; x; x = XEXP (x, 1))
372		make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
373	    }
374
375	  /* Returns create an exit out.  */
376	  else if (returnjump_p (insn))
377	    cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
378
379	  /* Otherwise, we have a plain conditional or unconditional jump.  */
380	  else
381	    {
382	      if (! JUMP_LABEL (insn))
383		abort ();
384	      make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
385	    }
386	}
387
388      /* If this is a sibling call insn, then this is in effect a combined call
389	 and return, and so we need an edge to the exit block.  No need to
390	 worry about EH edges, since we wouldn't have created the sibling call
391	 in the first place.  */
392      if (code == CALL_INSN && SIBLING_CALL_P (insn))
393	cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
394			  EDGE_SIBCALL | EDGE_ABNORMAL);
395
396      /* If this is a CALL_INSN, then mark it as reaching the active EH
397	 handler for this CALL_INSN.  If we're handling non-call
398	 exceptions then any insn can reach any of the active handlers.
399	 Also mark the CALL_INSN as reaching any nonlocal goto handler.  */
400      else if (code == CALL_INSN || flag_non_call_exceptions)
401	{
402	  /* Add any appropriate EH edges.  */
403	  make_eh_edge (edge_cache, bb, insn);
404
405	  if (code == CALL_INSN && nonlocal_goto_handler_labels)
406	    {
407	      /* ??? This could be made smarter: in some cases it's possible
408		 to tell that certain calls will not do a nonlocal goto.
409		 For example, if the nested functions that do the nonlocal
410		 gotos do not have their addresses taken, then only calls to
411		 those functions or to other nested functions that use them
412		 could possibly do nonlocal gotos.  */
413
414	      /* We do know that a REG_EH_REGION note with a value less
415		 than 0 is guaranteed not to perform a non-local goto.  */
416	      rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
417
418	      if (!note || INTVAL (XEXP (note, 0)) >=  0)
419		for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
420		  make_label_edge (edge_cache, bb, XEXP (x, 0),
421				   EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
422	    }
423	}
424
425      /* Find out if we can drop through to the next block.  */
426      insn = NEXT_INSN (insn);
427      while (insn
428	     && GET_CODE (insn) == NOTE
429	     && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
430	insn = NEXT_INSN (insn);
431
432      if (!insn || (bb->next_bb == EXIT_BLOCK_PTR && force_fallthru))
433	cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
434      else if (bb->next_bb != EXIT_BLOCK_PTR)
435	{
436	  if (force_fallthru || insn == BB_HEAD (bb->next_bb))
437	    cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU);
438	}
439    }
440
441  if (edge_cache)
442    sbitmap_vector_free (edge_cache);
443}
444
445/* Find all basic blocks of the function whose first insn is F.
446
447   Collect and return a list of labels whose addresses are taken.  This
448   will be used in make_edges for use with computed gotos.  */
449
450static void
451find_basic_blocks_1 (rtx f)
452{
453  rtx insn, next;
454  rtx bb_note = NULL_RTX;
455  rtx lvl = NULL_RTX;
456  rtx trll = NULL_RTX;
457  rtx head = NULL_RTX;
458  rtx end = NULL_RTX;
459  basic_block prev = ENTRY_BLOCK_PTR;
460
461  /* We process the instructions in a slightly different way than we did
462     previously.  This is so that we see a NOTE_BASIC_BLOCK after we have
463     closed out the previous block, so that it gets attached at the proper
464     place.  Since this form should be equivalent to the previous,
465     count_basic_blocks continues to use the old form as a check.  */
466
467  for (insn = f; insn; insn = next)
468    {
469      enum rtx_code code = GET_CODE (insn);
470
471      next = NEXT_INSN (insn);
472
473      if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
474	  && head)
475	{
476	  prev = create_basic_block_structure (head, end, bb_note, prev);
477	  head = end = NULL_RTX;
478	  bb_note = NULL_RTX;
479	}
480
481      if (inside_basic_block_p (insn))
482	{
483	  if (head == NULL_RTX)
484	    head = insn;
485	  end = insn;
486	}
487
488      if (head && control_flow_insn_p (insn))
489	{
490	  prev = create_basic_block_structure (head, end, bb_note, prev);
491	  head = end = NULL_RTX;
492	  bb_note = NULL_RTX;
493	}
494
495      switch (code)
496	{
497	case NOTE:
498	  {
499	    int kind = NOTE_LINE_NUMBER (insn);
500
501	    /* Look for basic block notes with which to keep the
502	       basic_block_info pointers stable.  Unthread the note now;
503	       we'll put it back at the right place in create_basic_block.
504	       Or not at all if we've already found a note in this block.  */
505	    if (kind == NOTE_INSN_BASIC_BLOCK)
506	      {
507		if (bb_note == NULL_RTX)
508		  bb_note = insn;
509		else
510		  next = delete_insn (insn);
511	      }
512	    break;
513	  }
514
515	case CODE_LABEL:
516	case JUMP_INSN:
517	case INSN:
518	case BARRIER:
519	  break;
520
521	case CALL_INSN:
522	  if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
523	    {
524	      /* Scan each of the alternatives for label refs.  */
525	      lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
526	      lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
527	      lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
528	      /* Record its tail recursion label, if any.  */
529	      if (XEXP (PATTERN (insn), 3) != NULL_RTX)
530		trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
531	    }
532	  break;
533
534	default:
535	  abort ();
536	}
537
538      if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
539	{
540	  rtx note;
541
542	  /* Make a list of all labels referred to other than by jumps.
543
544	     Make a special exception for labels followed by an ADDR*VEC,
545	     as this would be a part of the tablejump setup code.
546
547	     Make a special exception to registers loaded with label
548	     values just before jump insns that use them.  */
549
550	  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
551	    if (REG_NOTE_KIND (note) == REG_LABEL)
552	      {
553		rtx lab = XEXP (note, 0), next;
554
555		if ((next = next_nonnote_insn (lab)) != NULL
556			 && GET_CODE (next) == JUMP_INSN
557			 && (GET_CODE (PATTERN (next)) == ADDR_VEC
558			     || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
559		  ;
560		else if (GET_CODE (lab) == NOTE)
561		  ;
562		else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
563			 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
564		  ;
565		else
566		  lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
567	      }
568	}
569    }
570
571  if (head != NULL_RTX)
572    create_basic_block_structure (head, end, bb_note, prev);
573  else if (bb_note)
574    delete_insn (bb_note);
575
576  if (last_basic_block != n_basic_blocks)
577    abort ();
578
579  label_value_list = lvl;
580  tail_recursion_label_list = trll;
581  clear_aux_for_blocks ();
582}
583
584
585/* Find basic blocks of the current function.
586   F is the first insn of the function and NREGS the number of register
587   numbers in use.  */
588
589void
590find_basic_blocks (rtx f, int nregs ATTRIBUTE_UNUSED,
591		   FILE *file ATTRIBUTE_UNUSED)
592{
593  basic_block bb;
594
595  timevar_push (TV_CFG);
596
597  /* Flush out existing data.  */
598  if (basic_block_info != NULL)
599    {
600      clear_edges ();
601
602      /* Clear bb->aux on all extant basic blocks.  We'll use this as a
603	 tag for reuse during create_basic_block, just in case some pass
604	 copies around basic block notes improperly.  */
605      FOR_EACH_BB (bb)
606	bb->aux = NULL;
607
608      VARRAY_FREE (basic_block_info);
609    }
610
611  n_basic_blocks = count_basic_blocks (f);
612  last_basic_block = 0;
613  ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
614  EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
615
616  /* Size the basic block table.  The actual structures will be allocated
617     by find_basic_blocks_1, since we want to keep the structure pointers
618     stable across calls to find_basic_blocks.  */
619  /* ??? This whole issue would be much simpler if we called find_basic_blocks
620     exactly once, and thereafter we don't have a single long chain of
621     instructions at all until close to the end of compilation when we
622     actually lay them out.  */
623
624  VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
625
626  find_basic_blocks_1 (f);
627
628  /* Discover the edges of our cfg.  */
629  make_edges (label_value_list, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, 0);
630
631  /* Do very simple cleanup now, for the benefit of code that runs between
632     here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns.  */
633  tidy_fallthru_edges ();
634
635#ifdef ENABLE_CHECKING
636  verify_flow_info ();
637#endif
638  timevar_pop (TV_CFG);
639}
640
641/* State of basic block as seen by find_sub_basic_blocks.  */
642enum state {BLOCK_NEW = 0, BLOCK_ORIGINAL, BLOCK_TO_SPLIT};
643
644#define STATE(BB) (enum state) ((size_t) (BB)->aux)
645#define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
646
647/* Scan basic block BB for possible BB boundaries inside the block
648   and create new basic blocks in the progress.  */
649
650static void
651find_bb_boundaries (basic_block bb)
652{
653  rtx insn = BB_HEAD (bb);
654  rtx end = BB_END (bb);
655  rtx flow_transfer_insn = NULL_RTX;
656  edge fallthru = NULL;
657
658  if (insn == BB_END (bb))
659    return;
660
661  if (GET_CODE (insn) == CODE_LABEL)
662    insn = NEXT_INSN (insn);
663
664  /* Scan insn chain and try to find new basic block boundaries.  */
665  while (1)
666    {
667      enum rtx_code code = GET_CODE (insn);
668
669      /* On code label, split current basic block.  */
670      if (code == CODE_LABEL)
671	{
672	  fallthru = split_block (bb, PREV_INSN (insn));
673	  if (flow_transfer_insn)
674	    BB_END (bb) = flow_transfer_insn;
675
676	  bb = fallthru->dest;
677	  remove_edge (fallthru);
678	  flow_transfer_insn = NULL_RTX;
679	  if (LABEL_ALT_ENTRY_P (insn))
680	    make_edge (ENTRY_BLOCK_PTR, bb, 0);
681	}
682
683      /* In case we've previously seen an insn that effects a control
684	 flow transfer, split the block.  */
685      if (flow_transfer_insn && inside_basic_block_p (insn))
686	{
687	  fallthru = split_block (bb, PREV_INSN (insn));
688	  BB_END (bb) = flow_transfer_insn;
689	  bb = fallthru->dest;
690	  remove_edge (fallthru);
691	  flow_transfer_insn = NULL_RTX;
692	}
693
694      if (control_flow_insn_p (insn))
695	flow_transfer_insn = insn;
696      if (insn == end)
697	break;
698      insn = NEXT_INSN (insn);
699    }
700
701  /* In case expander replaced normal insn by sequence terminating by
702     return and barrier, or possibly other sequence not behaving like
703     ordinary jump, we need to take care and move basic block boundary.  */
704  if (flow_transfer_insn)
705    BB_END (bb) = flow_transfer_insn;
706
707  /* We've possibly replaced the conditional jump by conditional jump
708     followed by cleanup at fallthru edge, so the outgoing edges may
709     be dead.  */
710  purge_dead_edges (bb);
711}
712
713/*  Assume that frequency of basic block B is known.  Compute frequencies
714    and probabilities of outgoing edges.  */
715
716static void
717compute_outgoing_frequencies (basic_block b)
718{
719  edge e, f;
720
721  if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
722    {
723      rtx note = find_reg_note (BB_END (b), REG_BR_PROB, NULL);
724      int probability;
725
726      if (!note)
727	return;
728
729      probability = INTVAL (XEXP (note, 0));
730      e = BRANCH_EDGE (b);
731      e->probability = probability;
732      e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
733		  / REG_BR_PROB_BASE);
734      f = FALLTHRU_EDGE (b);
735      f->probability = REG_BR_PROB_BASE - probability;
736      f->count = b->count - e->count;
737    }
738
739  if (b->succ && !b->succ->succ_next)
740    {
741      e = b->succ;
742      e->probability = REG_BR_PROB_BASE;
743      e->count = b->count;
744    }
745}
746
747/* Assume that someone emitted code with control flow instructions to the
748   basic block.  Update the data structure.  */
749
750void
751find_many_sub_basic_blocks (sbitmap blocks)
752{
753  basic_block bb, min, max;
754
755  FOR_EACH_BB (bb)
756    SET_STATE (bb,
757	       TEST_BIT (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
758
759  FOR_EACH_BB (bb)
760    if (STATE (bb) == BLOCK_TO_SPLIT)
761      find_bb_boundaries (bb);
762
763  FOR_EACH_BB (bb)
764    if (STATE (bb) != BLOCK_ORIGINAL)
765      break;
766
767  min = max = bb;
768  for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb)
769    if (STATE (bb) != BLOCK_ORIGINAL)
770      max = bb;
771
772  /* Now re-scan and wire in all edges.  This expect simple (conditional)
773     jumps at the end of each new basic blocks.  */
774  make_edges (NULL, min, max, 1);
775
776  /* Update branch probabilities.  Expect only (un)conditional jumps
777     to be created with only the forward edges.  */
778  FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
779    {
780      edge e;
781
782      if (STATE (bb) == BLOCK_ORIGINAL)
783	continue;
784      if (STATE (bb) == BLOCK_NEW)
785	{
786	  bb->count = 0;
787	  bb->frequency = 0;
788	  for (e = bb->pred; e; e = e->pred_next)
789	    {
790	      bb->count += e->count;
791	      bb->frequency += EDGE_FREQUENCY (e);
792	    }
793	}
794
795      compute_outgoing_frequencies (bb);
796    }
797
798  FOR_EACH_BB (bb)
799    SET_STATE (bb, 0);
800}
801
802/* Like above but for single basic block only.  */
803
804void
805find_sub_basic_blocks (basic_block bb)
806{
807  basic_block min, max, b;
808  basic_block next = bb->next_bb;
809
810  min = bb;
811  find_bb_boundaries (bb);
812  max = next->prev_bb;
813
814  /* Now re-scan and wire in all edges.  This expect simple (conditional)
815     jumps at the end of each new basic blocks.  */
816  make_edges (NULL, min, max, 1);
817
818  /* Update branch probabilities.  Expect only (un)conditional jumps
819     to be created with only the forward edges.  */
820  FOR_BB_BETWEEN (b, min, max->next_bb, next_bb)
821    {
822      edge e;
823
824      if (b != min)
825	{
826	  b->count = 0;
827	  b->frequency = 0;
828	  for (e = b->pred; e; e = e->pred_next)
829	    {
830	      b->count += e->count;
831	      b->frequency += EDGE_FREQUENCY (e);
832	    }
833	}
834
835      compute_outgoing_frequencies (b);
836    }
837}
838