1/* Control flow graph building code for GNU compiler.
2   Copyright (C) 1987-2020 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "rtl.h"
26#include "cfghooks.h"
27#include "memmodel.h"
28#include "emit-rtl.h"
29#include "cfgrtl.h"
30#include "cfganal.h"
31#include "cfgbuild.h"
32#include "except.h"
33#include "stmt.h"
34
35static void make_edges (basic_block, basic_block, int);
36static void make_label_edge (sbitmap, basic_block, rtx, int);
37static void find_bb_boundaries (basic_block);
38static void compute_outgoing_frequencies (basic_block);
39
40/* Return true if insn is something that should be contained inside basic
41   block.  */
42
43bool
44inside_basic_block_p (const rtx_insn *insn)
45{
46  switch (GET_CODE (insn))
47    {
48    case CODE_LABEL:
49      /* Avoid creating of basic block for jumptables.  */
50      return (NEXT_INSN (insn) == 0
51	      || ! JUMP_TABLE_DATA_P (NEXT_INSN (insn)));
52
53    case JUMP_INSN:
54    case CALL_INSN:
55    case INSN:
56    case DEBUG_INSN:
57      return true;
58
59    case JUMP_TABLE_DATA:
60    case BARRIER:
61    case NOTE:
62      return false;
63
64    default:
65      gcc_unreachable ();
66    }
67}
68
69/* Return true if INSN may cause control flow transfer, so it should be last in
70   the basic block.  */
71
72bool
73control_flow_insn_p (const rtx_insn *insn)
74{
75  switch (GET_CODE (insn))
76    {
77    case NOTE:
78    case CODE_LABEL:
79    case DEBUG_INSN:
80      return false;
81
82    case JUMP_INSN:
83      return true;
84
85    case CALL_INSN:
86      /* Noreturn and sibling call instructions terminate the basic blocks
87	 (but only if they happen unconditionally).  */
88      if ((SIBLING_CALL_P (insn)
89	   || find_reg_note (insn, REG_NORETURN, 0))
90	  && GET_CODE (PATTERN (insn)) != COND_EXEC)
91	return true;
92
93      /* Call insn may return to the nonlocal goto handler.  */
94      if (can_nonlocal_goto (insn))
95	return true;
96      break;
97
98    case INSN:
99      /* Treat trap instructions like noreturn calls (same provision).  */
100      if (GET_CODE (PATTERN (insn)) == TRAP_IF
101	  && XEXP (PATTERN (insn), 0) == const1_rtx)
102	return true;
103      if (!cfun->can_throw_non_call_exceptions)
104	return false;
105      break;
106
107    case JUMP_TABLE_DATA:
108    case BARRIER:
109      /* It is nonsense to reach this when looking for the
110	 end of basic block, but before dead code is eliminated
111	 this may happen.  */
112      return false;
113
114    default:
115      gcc_unreachable ();
116    }
117
118  return can_throw_internal (insn);
119}
120
121
122/* Create an edge between two basic blocks.  FLAGS are auxiliary information
123   about the edge that is accumulated between calls.  */
124
125/* Create an edge from a basic block to a label.  */
126
127static void
128make_label_edge (sbitmap edge_cache, basic_block src, rtx label, int flags)
129{
130  gcc_assert (LABEL_P (label));
131
132  /* If the label was never emitted, this insn is junk, but avoid a
133     crash trying to refer to BLOCK_FOR_INSN (label).  This can happen
134     as a result of a syntax error and a diagnostic has already been
135     printed.  */
136
137  if (INSN_UID (label) == 0)
138    return;
139
140  cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
141}
142
143/* Create the edges generated by INSN in REGION.  */
144
145void
146rtl_make_eh_edge (sbitmap edge_cache, basic_block src, rtx insn)
147{
148  eh_landing_pad lp = get_eh_landing_pad_from_rtx (insn);
149
150  if (lp)
151    {
152      rtx_insn *label = lp->landing_pad;
153
154      /* During initial rtl generation, use the post_landing_pad.  */
155      if (label == NULL)
156	{
157	  gcc_assert (lp->post_landing_pad);
158	  label = label_rtx (lp->post_landing_pad);
159	}
160
161      make_label_edge (edge_cache, src, label,
162		       EDGE_ABNORMAL | EDGE_EH
163		       | (CALL_P (insn) ? EDGE_ABNORMAL_CALL : 0));
164    }
165}
166
167/* States of basic block as seen by find_many_sub_basic_blocks.  */
168enum state {
169  /* Basic blocks created via split_block belong to this state.
170     make_edges will examine these basic blocks to see if we need to
171     create edges going out of them.  */
172  BLOCK_NEW = 0,
173
174  /* Basic blocks that do not need examining belong to this state.
175     These blocks will be left intact.  In particular, make_edges will
176     not create edges going out of these basic blocks.  */
177  BLOCK_ORIGINAL,
178
179  /* Basic blocks that may need splitting (due to a label appearing in
180     the middle, etc) belong to this state.  After splitting them,
181     make_edges will create edges going out of them as needed.  */
182  BLOCK_TO_SPLIT
183};
184
185#define STATE(BB) (enum state) ((size_t) (BB)->aux)
186#define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
187
188/* Used internally by purge_dead_tablejump_edges, ORed into state.  */
189#define BLOCK_USED_BY_TABLEJUMP		32
190#define FULL_STATE(BB) ((size_t) (BB)->aux)
191
192/* Identify the edges going out of basic blocks between MIN and MAX,
193   inclusive, that have their states set to BLOCK_NEW or
194   BLOCK_TO_SPLIT.
195
196   UPDATE_P should be nonzero if we are updating CFG and zero if we
197   are building CFG from scratch.  */
198
199static void
200make_edges (basic_block min, basic_block max, int update_p)
201{
202  basic_block bb;
203  sbitmap edge_cache = NULL;
204
205  /* Heavy use of computed goto in machine-generated code can lead to
206     nearly fully-connected CFGs.  In that case we spend a significant
207     amount of time searching the edge lists for duplicates.  */
208  if (!vec_safe_is_empty (forced_labels)
209      || cfun->cfg->max_jumptable_ents > 100)
210    edge_cache = sbitmap_alloc (last_basic_block_for_fn (cfun));
211
212  /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
213     is always the entry.  */
214  if (min == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
215    make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), min, EDGE_FALLTHRU);
216
217  FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
218    {
219      rtx_insn *insn;
220      enum rtx_code code;
221      edge e;
222      edge_iterator ei;
223
224      if (STATE (bb) == BLOCK_ORIGINAL)
225	continue;
226
227      /* If we have an edge cache, cache edges going out of BB.  */
228      if (edge_cache)
229	{
230	  bitmap_clear (edge_cache);
231	  if (update_p)
232	    {
233	      FOR_EACH_EDGE (e, ei, bb->succs)
234		if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
235		  bitmap_set_bit (edge_cache, e->dest->index);
236	    }
237	}
238
239      if (LABEL_P (BB_HEAD (bb))
240	  && LABEL_ALT_ENTRY_P (BB_HEAD (bb)))
241	cached_make_edge (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, 0);
242
243      /* Examine the last instruction of the block, and discover the
244	 ways we can leave the block.  */
245
246      insn = BB_END (bb);
247      code = GET_CODE (insn);
248
249      /* A branch.  */
250      if (code == JUMP_INSN)
251	{
252	  rtx tmp;
253	  rtx_jump_table_data *table;
254
255	  /* Recognize a non-local goto as a branch outside the
256	     current function.  */
257	  if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
258	    ;
259
260	  /* Recognize a tablejump and do the right thing.  */
261	  else if (tablejump_p (insn, NULL, &table))
262	    {
263	      rtvec vec = table->get_labels ();
264	      int j;
265
266	      for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
267		make_label_edge (edge_cache, bb,
268				 XEXP (RTVEC_ELT (vec, j), 0), 0);
269
270	      /* Some targets (eg, ARM) emit a conditional jump that also
271		 contains the out-of-range target.  Scan for these and
272		 add an edge if necessary.  */
273	      if ((tmp = single_set (insn)) != NULL
274		  && SET_DEST (tmp) == pc_rtx
275		  && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
276		  && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
277		make_label_edge (edge_cache, bb,
278				 label_ref_label (XEXP (SET_SRC (tmp), 2)), 0);
279	    }
280
281	  /* If this is a computed jump, then mark it as reaching
282	     everything on the forced_labels list.  */
283	  else if (computed_jump_p (insn))
284	    {
285	      rtx_insn *insn;
286	      unsigned int i;
287	      FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
288		make_label_edge (edge_cache, bb, insn, EDGE_ABNORMAL);
289	    }
290
291	  /* Returns create an exit out.  */
292	  else if (returnjump_p (insn))
293	    cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
294
295	  /* Recognize asm goto and do the right thing.  */
296	  else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
297	    {
298	      int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
299	      for (i = 0; i < n; ++i)
300		make_label_edge (edge_cache, bb,
301				 XEXP (ASM_OPERANDS_LABEL (tmp, i), 0), 0);
302	    }
303
304	  /* Otherwise, we have a plain conditional or unconditional jump.  */
305	  else
306	    {
307	      gcc_assert (JUMP_LABEL (insn));
308	      make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
309	    }
310	}
311
312      /* If this is a sibling call insn, then this is in effect a combined call
313	 and return, and so we need an edge to the exit block.  No need to
314	 worry about EH edges, since we wouldn't have created the sibling call
315	 in the first place.  */
316      if (code == CALL_INSN && SIBLING_CALL_P (insn))
317	cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun),
318			  EDGE_SIBCALL | EDGE_ABNORMAL);
319
320      /* If this is a CALL_INSN, then mark it as reaching the active EH
321	 handler for this CALL_INSN.  If we're handling non-call
322	 exceptions then any insn can reach any of the active handlers.
323	 Also mark the CALL_INSN as reaching any nonlocal goto handler.  */
324      else if (code == CALL_INSN || cfun->can_throw_non_call_exceptions)
325	{
326	  /* Add any appropriate EH edges.  */
327	  rtl_make_eh_edge (edge_cache, bb, insn);
328
329	  if (code == CALL_INSN)
330	    {
331	      if (can_nonlocal_goto (insn))
332		{
333		  /* ??? This could be made smarter: in some cases it's
334		     possible to tell that certain calls will not do a
335		     nonlocal goto.  For example, if the nested functions
336		     that do the nonlocal gotos do not have their addresses
337		     taken, then only calls to those functions or to other
338		     nested functions that use them could possibly do
339		     nonlocal gotos.  */
340		  for (rtx_insn_list *x = nonlocal_goto_handler_labels;
341		       x;
342		       x = x->next ())
343		    make_label_edge (edge_cache, bb, x->insn (),
344				     EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
345		}
346
347	      if (flag_tm)
348		{
349		  rtx note;
350		  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
351		    if (REG_NOTE_KIND (note) == REG_TM)
352		      make_label_edge (edge_cache, bb, XEXP (note, 0),
353				       EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
354		}
355	    }
356	}
357
358      /* Find out if we can drop through to the next block.  */
359      insn = NEXT_INSN (insn);
360      e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
361      if (e && e->flags & EDGE_FALLTHRU)
362	insn = NULL;
363
364      while (insn
365	     && NOTE_P (insn)
366	     && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK)
367	insn = NEXT_INSN (insn);
368
369      if (!insn)
370	cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun),
371			  EDGE_FALLTHRU);
372      else if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
373	{
374	  if (insn == BB_HEAD (bb->next_bb))
375	    cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU);
376	}
377    }
378
379  if (edge_cache)
380    sbitmap_free (edge_cache);
381}
382
383static void
384mark_tablejump_edge (rtx label)
385{
386  basic_block bb;
387
388  gcc_assert (LABEL_P (label));
389  /* See comment in make_label_edge.  */
390  if (INSN_UID (label) == 0)
391    return;
392  bb = BLOCK_FOR_INSN (label);
393  SET_STATE (bb, FULL_STATE (bb) | BLOCK_USED_BY_TABLEJUMP);
394}
395
396static void
397purge_dead_tablejump_edges (basic_block bb, rtx_jump_table_data *table)
398{
399  rtx_insn *insn = BB_END (bb);
400  rtx tmp;
401  rtvec vec;
402  int j;
403  edge_iterator ei;
404  edge e;
405
406  vec = table->get_labels ();
407
408  for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
409    mark_tablejump_edge (XEXP (RTVEC_ELT (vec, j), 0));
410
411  /* Some targets (eg, ARM) emit a conditional jump that also
412     contains the out-of-range target.  Scan for these and
413     add an edge if necessary.  */
414  if ((tmp = single_set (insn)) != NULL
415       && SET_DEST (tmp) == pc_rtx
416       && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
417       && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
418    mark_tablejump_edge (label_ref_label (XEXP (SET_SRC (tmp), 2)));
419
420  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
421    {
422      if (FULL_STATE (e->dest) & BLOCK_USED_BY_TABLEJUMP)
423	SET_STATE (e->dest, FULL_STATE (e->dest)
424			    & ~(size_t) BLOCK_USED_BY_TABLEJUMP);
425      else if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
426	{
427	  remove_edge (e);
428	  continue;
429	}
430      ei_next (&ei);
431    }
432}
433
434/* Scan basic block BB for possible BB boundaries inside the block
435   and create new basic blocks in the progress.  */
436
437static void
438find_bb_boundaries (basic_block bb)
439{
440  basic_block orig_bb = bb;
441  rtx_insn *insn = BB_HEAD (bb);
442  rtx_insn *end = BB_END (bb), *x;
443  rtx_jump_table_data *table;
444  rtx_insn *flow_transfer_insn = NULL;
445  rtx_insn *debug_insn = NULL;
446  edge fallthru = NULL;
447  bool skip_purge;
448  bool seen_note_after_debug = false;
449
450  if (insn == end)
451    return;
452
453  if (DEBUG_INSN_P (insn) || DEBUG_INSN_P (end))
454    {
455      /* Check whether, without debug insns, the insn==end test above
456	 would have caused us to return immediately, and behave the
457	 same way even with debug insns.  If we don't do this, debug
458	 insns could cause us to purge dead edges at different times,
459	 which could in turn change the cfg and affect codegen
460	 decisions in subtle but undesirable ways.  */
461      while (insn != end && DEBUG_INSN_P (insn))
462	insn = NEXT_INSN (insn);
463      rtx_insn *e = end;
464      while (insn != e && DEBUG_INSN_P (e))
465	e = PREV_INSN (e);
466      if (insn == e)
467	{
468	  /* If there are debug insns after a single insn that is a
469	     control flow insn in the block, we'd have left right
470	     away, but we should clean up the debug insns after the
471	     control flow insn, because they can't remain in the same
472	     block.  So, do the debug insn cleaning up, but then bail
473	     out without purging dead edges as we would if the debug
474	     insns hadn't been there.  */
475	  if (e != end && !DEBUG_INSN_P (e) && control_flow_insn_p (e))
476	    {
477	      skip_purge = true;
478	      flow_transfer_insn = e;
479	      goto clean_up_debug_after_control_flow;
480	    }
481	  return;
482	}
483    }
484
485  if (LABEL_P (insn))
486    insn = NEXT_INSN (insn);
487
488  /* Scan insn chain and try to find new basic block boundaries.  */
489  while (1)
490    {
491      enum rtx_code code = GET_CODE (insn);
492
493      if (code == DEBUG_INSN)
494	{
495	  if (flow_transfer_insn && !debug_insn)
496	    {
497	      debug_insn = insn;
498	      seen_note_after_debug = false;
499	    }
500	}
501      /* In case we've previously seen an insn that effects a control
502	 flow transfer, split the block.  */
503      else if ((flow_transfer_insn || code == CODE_LABEL)
504	       && inside_basic_block_p (insn))
505	{
506	  rtx_insn *prev = PREV_INSN (insn);
507
508	  /* If the first non-debug inside_basic_block_p insn after a control
509	     flow transfer is not a label, split the block before the debug
510	     insn instead of before the non-debug insn, so that the debug
511	     insns are not lost.  */
512	  if (debug_insn && code != CODE_LABEL && code != BARRIER)
513	    {
514	      prev = PREV_INSN (debug_insn);
515	      if (seen_note_after_debug)
516		{
517		  /* Though, if there are NOTEs intermixed with DEBUG_INSNs,
518		     move the NOTEs before the DEBUG_INSNs and split after
519		     the last NOTE.  */
520		  rtx_insn *first = NULL, *last = NULL;
521		  for (x = debug_insn; x != insn; x = NEXT_INSN (x))
522		    {
523		      if (NOTE_P (x))
524			{
525			  if (first == NULL)
526			    first = x;
527			  last = x;
528			}
529		      else
530			{
531			  gcc_assert (DEBUG_INSN_P (x));
532			  if (first)
533			    {
534			      reorder_insns_nobb (first, last, prev);
535			      prev = last;
536			      first = last = NULL;
537			    }
538			}
539		    }
540		  if (first)
541		    {
542		      reorder_insns_nobb (first, last, prev);
543		      prev = last;
544		    }
545		}
546	    }
547	  fallthru = split_block (bb, prev);
548	  if (flow_transfer_insn)
549	    {
550	      BB_END (bb) = flow_transfer_insn;
551
552	      rtx_insn *next;
553	      /* Clean up the bb field for the insns between the blocks.  */
554	      for (x = NEXT_INSN (flow_transfer_insn);
555		   x != BB_HEAD (fallthru->dest);
556		   x = next)
557		{
558		  next = NEXT_INSN (x);
559		  /* Debug insns should not be in between basic blocks,
560		     drop them on the floor.  */
561		  if (DEBUG_INSN_P (x))
562		    delete_insn (x);
563		  else if (!BARRIER_P (x))
564		    set_block_for_insn (x, NULL);
565		}
566	    }
567
568	  bb = fallthru->dest;
569	  remove_edge (fallthru);
570	  /* BB is unreachable at this point - we need to determine its profile
571	     once edges are built.  */
572	  bb->count = profile_count::uninitialized ();
573	  flow_transfer_insn = NULL;
574	  debug_insn = NULL;
575	  if (code == CODE_LABEL && LABEL_ALT_ENTRY_P (insn))
576	    make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, 0);
577	}
578      else if (code == BARRIER)
579	{
580	  /* __builtin_unreachable () may cause a barrier to be emitted in
581	     the middle of a BB.  We need to split it in the same manner as
582	     if the barrier were preceded by a control_flow_insn_p insn.  */
583	  if (!flow_transfer_insn)
584	    flow_transfer_insn = prev_nonnote_nondebug_insn_bb (insn);
585	  debug_insn = NULL;
586	}
587      else if (debug_insn)
588	{
589	  if (code == NOTE)
590	    seen_note_after_debug = true;
591	  else
592	    /* Jump tables.  */
593	    debug_insn = NULL;
594	}
595
596      if (control_flow_insn_p (insn))
597	flow_transfer_insn = insn;
598      if (insn == end)
599	break;
600      insn = NEXT_INSN (insn);
601    }
602
603  /* In case expander replaced normal insn by sequence terminating by
604     return and barrier, or possibly other sequence not behaving like
605     ordinary jump, we need to take care and move basic block boundary.  */
606  if (flow_transfer_insn && flow_transfer_insn != end)
607    {
608      skip_purge = false;
609
610    clean_up_debug_after_control_flow:
611      BB_END (bb) = flow_transfer_insn;
612
613      /* Clean up the bb field for the insns that do not belong to BB.  */
614      rtx_insn *next;
615      for (x = NEXT_INSN (flow_transfer_insn); ; x = next)
616	{
617	  next = NEXT_INSN (x);
618	  /* Debug insns should not be in between basic blocks,
619	     drop them on the floor.  */
620	  if (DEBUG_INSN_P (x))
621	    delete_insn (x);
622	  else if (!BARRIER_P (x))
623	    set_block_for_insn (x, NULL);
624	  if (x == end)
625	    break;
626	}
627
628      if (skip_purge)
629	return;
630    }
631
632  /* We've possibly replaced the conditional jump by conditional jump
633     followed by cleanup at fallthru edge, so the outgoing edges may
634     be dead.  */
635  purge_dead_edges (bb);
636
637  /* purge_dead_edges doesn't handle tablejump's, but if we have split the
638     basic block, we might need to kill some edges.  */
639  if (bb != orig_bb && tablejump_p (BB_END (bb), NULL, &table))
640    purge_dead_tablejump_edges (bb, table);
641}
642
643/*  Assume that frequency of basic block B is known.  Compute frequencies
644    and probabilities of outgoing edges.  */
645
646static void
647compute_outgoing_frequencies (basic_block b)
648{
649  edge e, f;
650  edge_iterator ei;
651
652  if (EDGE_COUNT (b->succs) == 2)
653    {
654      rtx note = find_reg_note (BB_END (b), REG_BR_PROB, NULL);
655      int probability;
656
657      if (note)
658	{
659	  probability = XINT (note, 0);
660	  e = BRANCH_EDGE (b);
661	  e->probability
662		 = profile_probability::from_reg_br_prob_note (probability);
663	  f = FALLTHRU_EDGE (b);
664	  f->probability = e->probability.invert ();
665	  return;
666	}
667      else
668        {
669          guess_outgoing_edge_probabilities (b);
670        }
671    }
672  else if (single_succ_p (b))
673    {
674      e = single_succ_edge (b);
675      e->probability = profile_probability::always ();
676      return;
677    }
678  else
679    {
680      /* We rely on BBs with more than two successors to have sane probabilities
681         and do not guess them here. For BBs terminated by switch statements
682         expanded to jump-table jump, we have done the right thing during
683         expansion. For EH edges, we still guess the probabilities here.  */
684      bool complex_edge = false;
685      FOR_EACH_EDGE (e, ei, b->succs)
686        if (e->flags & EDGE_COMPLEX)
687          {
688            complex_edge = true;
689            break;
690          }
691      if (complex_edge)
692        guess_outgoing_edge_probabilities (b);
693    }
694}
695
696/* Assume that some pass has inserted labels or control flow
697   instructions within a basic block.  Split basic blocks as needed
698   and create edges.  */
699
700void
701find_many_sub_basic_blocks (sbitmap blocks)
702{
703  basic_block bb, min, max;
704  bool found = false;
705  auto_vec<unsigned int> n_succs;
706  n_succs.safe_grow_cleared (last_basic_block_for_fn (cfun));
707
708  FOR_EACH_BB_FN (bb, cfun)
709    SET_STATE (bb,
710	       bitmap_bit_p (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
711
712  FOR_EACH_BB_FN (bb, cfun)
713    if (STATE (bb) == BLOCK_TO_SPLIT)
714      {
715	int n = last_basic_block_for_fn (cfun);
716	unsigned int ns = EDGE_COUNT (bb->succs);
717
718        find_bb_boundaries (bb);
719	if (n == last_basic_block_for_fn (cfun) && ns == EDGE_COUNT (bb->succs))
720	  n_succs[bb->index] = EDGE_COUNT (bb->succs);
721      }
722
723  FOR_EACH_BB_FN (bb, cfun)
724    if (STATE (bb) != BLOCK_ORIGINAL)
725      {
726	found = true;
727        break;
728      }
729
730  if (!found)
731    return;
732
733  min = max = bb;
734  for (; bb != EXIT_BLOCK_PTR_FOR_FN (cfun); bb = bb->next_bb)
735    if (STATE (bb) != BLOCK_ORIGINAL)
736      max = bb;
737
738  /* Now re-scan and wire in all edges.  This expect simple (conditional)
739     jumps at the end of each new basic blocks.  */
740  make_edges (min, max, 1);
741
742  /* Update branch probabilities.  Expect only (un)conditional jumps
743     to be created with only the forward edges.  */
744  if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
745    FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
746      {
747	edge e;
748	edge_iterator ei;
749
750	if (STATE (bb) == BLOCK_ORIGINAL)
751	  continue;
752	if (STATE (bb) == BLOCK_NEW)
753	  {
754	    bool initialized_src = false, uninitialized_src = false;
755	    bb->count = profile_count::zero ();
756	    FOR_EACH_EDGE (e, ei, bb->preds)
757	      {
758		if (e->count ().initialized_p ())
759		  {
760		    bb->count += e->count ();
761		    initialized_src = true;
762		  }
763		else
764		  uninitialized_src = true;
765	      }
766	    /* When some edges are missing with read profile, this is
767	       most likely because RTL expansion introduced loop.
768	       When profile is guessed we may have BB that is reachable
769	       from unlikely path as well as from normal path.
770
771	       TODO: We should handle loops created during BB expansion
772	       correctly here.  For now we assume all those loop to cycle
773	       precisely once.  */
774	    if (!initialized_src
775		|| (uninitialized_src
776		     && profile_status_for_fn (cfun) < PROFILE_GUESSED))
777	      bb->count = profile_count::uninitialized ();
778	  }
779 	/* If nothing changed, there is no need to create new BBs.  */
780	else if (EDGE_COUNT (bb->succs) == n_succs[bb->index])
781	  {
782	    /* In rare occassions RTL expansion might have mistakely assigned
783	       a probabilities different from what is in CFG.  This happens
784	       when we try to split branch to two but optimize out the
785	       second branch during the way. See PR81030.  */
786	    if (JUMP_P (BB_END (bb)) && any_condjump_p (BB_END (bb))
787		&& EDGE_COUNT (bb->succs) >= 2)
788	      update_br_prob_note (bb);
789	    continue;
790	  }
791
792	compute_outgoing_frequencies (bb);
793      }
794
795  FOR_EACH_BB_FN (bb, cfun)
796    SET_STATE (bb, 0);
797}
798