1/* Calculate branch probabilities, and basic block execution counts.
2   Copyright (C) 1990-2015 Free Software Foundation, Inc.
3   Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
4   based on some ideas from Dain Samples of UC Berkeley.
5   Further mangling by Bob Manson, Cygnus Support.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 3, or (at your option) any later
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING3.  If not see
21<http://www.gnu.org/licenses/>.  */
22
23/* Generate basic block profile instrumentation and auxiliary files.
24   Profile generation is optimized, so that not all arcs in the basic
25   block graph need instrumenting. First, the BB graph is closed with
26   one entry (function start), and one exit (function exit).  Any
27   ABNORMAL_EDGE cannot be instrumented (because there is no control
28   path to place the code). We close the graph by inserting fake
29   EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
30   edges that do not go to the exit_block. We ignore such abnormal
31   edges.  Naturally these fake edges are never directly traversed,
32   and so *cannot* be directly instrumented.  Some other graph
33   massaging is done. To optimize the instrumentation we generate the
34   BB minimal span tree, only edges that are not on the span tree
35   (plus the entry point) need instrumenting. From that information
36   all other edge counts can be deduced.  By construction all fake
37   edges must be on the spanning tree. We also attempt to place
38   EDGE_CRITICAL edges on the spanning tree.
39
40   The auxiliary files generated are <dumpbase>.gcno (at compile time)
41   and <dumpbase>.gcda (at run time).  The format is
42   described in full in gcov-io.h.  */
43
44/* ??? Register allocation should use basic block execution counts to
45   give preference to the most commonly executed blocks.  */
46
47/* ??? Should calculate branch probabilities before instrumenting code, since
48   then we can use arc counts to help decide which arcs to instrument.  */
49
50#include "config.h"
51#include "system.h"
52#include "coretypes.h"
53#include "tm.h"
54#include "rtl.h"
55#include "flags.h"
56#include "regs.h"
57#include "symtab.h"
58#include "hashtab.h"
59#include "hash-set.h"
60#include "vec.h"
61#include "machmode.h"
62#include "hard-reg-set.h"
63#include "input.h"
64#include "function.h"
65#include "statistics.h"
66#include "double-int.h"
67#include "real.h"
68#include "fixed-value.h"
69#include "alias.h"
70#include "wide-int.h"
71#include "inchash.h"
72#include "tree.h"
73#include "insn-config.h"
74#include "expmed.h"
75#include "dojump.h"
76#include "explow.h"
77#include "calls.h"
78#include "emit-rtl.h"
79#include "varasm.h"
80#include "stmt.h"
81#include "expr.h"
82#include "predict.h"
83#include "dominance.h"
84#include "cfg.h"
85#include "cfganal.h"
86#include "basic-block.h"
87#include "diagnostic-core.h"
88#include "coverage.h"
89#include "value-prof.h"
90#include "fold-const.h"
91#include "tree-ssa-alias.h"
92#include "internal-fn.h"
93#include "gimple-expr.h"
94#include "is-a.h"
95#include "gimple.h"
96#include "gimple-iterator.h"
97#include "tree-cfg.h"
98#include "cfgloop.h"
99#include "dumpfile.h"
100#include "hash-map.h"
101#include "plugin-api.h"
102#include "ipa-ref.h"
103#include "cgraph.h"
104
105#include "profile.h"
106
107struct bb_profile_info {
108  unsigned int count_valid : 1;
109
110  /* Number of successor and predecessor edges.  */
111  gcov_type succ_count;
112  gcov_type pred_count;
113};
114
115#define BB_INFO(b)  ((struct bb_profile_info *) (b)->aux)
116
117
118/* Counter summary from the last set of coverage counts read.  */
119
120const struct gcov_ctr_summary *profile_info;
121
122/* Counter working set information computed from the current counter
123   summary. Not initialized unless profile_info summary is non-NULL.  */
124static gcov_working_set_t gcov_working_sets[NUM_GCOV_WORKING_SETS];
125
126/* Collect statistics on the performance of this pass for the entire source
127   file.  */
128
129static int total_num_blocks;
130static int total_num_edges;
131static int total_num_edges_ignored;
132static int total_num_edges_instrumented;
133static int total_num_blocks_created;
134static int total_num_passes;
135static int total_num_times_called;
136static int total_hist_br_prob[20];
137static int total_num_branches;
138
139/* Helper function to update gcov_working_sets.  */
140
141void add_working_set (gcov_working_set_t *set) {
142  int i = 0;
143  for (; i < NUM_GCOV_WORKING_SETS; i++)
144    gcov_working_sets[i] = set[i];
145}
146
147/* Forward declarations.  */
148static void find_spanning_tree (struct edge_list *);
149
150/* Add edge instrumentation code to the entire insn chain.
151
152   F is the first insn of the chain.
153   NUM_BLOCKS is the number of basic blocks found in F.  */
154
155static unsigned
156instrument_edges (struct edge_list *el)
157{
158  unsigned num_instr_edges = 0;
159  int num_edges = NUM_EDGES (el);
160  basic_block bb;
161
162  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
163    {
164      edge e;
165      edge_iterator ei;
166
167      FOR_EACH_EDGE (e, ei, bb->succs)
168	{
169	  struct edge_profile_info *inf = EDGE_INFO (e);
170
171	  if (!inf->ignore && !inf->on_tree)
172	    {
173	      gcc_assert (!(e->flags & EDGE_ABNORMAL));
174	      if (dump_file)
175		fprintf (dump_file, "Edge %d to %d instrumented%s\n",
176			 e->src->index, e->dest->index,
177			 EDGE_CRITICAL_P (e) ? " (and split)" : "");
178	      gimple_gen_edge_profiler (num_instr_edges++, e);
179	    }
180	}
181    }
182
183  total_num_blocks_created += num_edges;
184  if (dump_file)
185    fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
186  return num_instr_edges;
187}
188
189/* Add code to measure histograms for values in list VALUES.  */
190static void
191instrument_values (histogram_values values)
192{
193  unsigned i;
194
195  /* Emit code to generate the histograms before the insns.  */
196
197  for (i = 0; i < values.length (); i++)
198    {
199      histogram_value hist = values[i];
200      unsigned t = COUNTER_FOR_HIST_TYPE (hist->type);
201
202      if (!coverage_counter_alloc (t, hist->n_counters))
203	continue;
204
205      switch (hist->type)
206	{
207	case HIST_TYPE_INTERVAL:
208	  gimple_gen_interval_profiler (hist, t, 0);
209	  break;
210
211	case HIST_TYPE_POW2:
212	  gimple_gen_pow2_profiler (hist, t, 0);
213	  break;
214
215	case HIST_TYPE_SINGLE_VALUE:
216	  gimple_gen_one_value_profiler (hist, t, 0);
217	  break;
218
219	case HIST_TYPE_CONST_DELTA:
220	  gimple_gen_const_delta_profiler (hist, t, 0);
221	  break;
222
223 	case HIST_TYPE_INDIR_CALL:
224 	case HIST_TYPE_INDIR_CALL_TOPN:
225 	  gimple_gen_ic_profiler (hist, t, 0);
226  	  break;
227
228	case HIST_TYPE_AVERAGE:
229	  gimple_gen_average_profiler (hist, t, 0);
230	  break;
231
232	case HIST_TYPE_IOR:
233	  gimple_gen_ior_profiler (hist, t, 0);
234	  break;
235
236  case HIST_TYPE_TIME_PROFILE:
237    {
238      basic_block bb =
239     split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
240      gimple_stmt_iterator gsi = gsi_start_bb (bb);
241
242      gimple_gen_time_profiler (t, 0, gsi);
243      break;
244    }
245
246	default:
247	  gcc_unreachable ();
248	}
249    }
250}
251
252
253/* Fill the working set information into the profile_info structure.  */
254
255void
256get_working_sets (void)
257{
258  unsigned ws_ix, pctinc, pct;
259  gcov_working_set_t *ws_info;
260
261  if (!profile_info)
262    return;
263
264  compute_working_sets (profile_info, gcov_working_sets);
265
266  if (dump_file)
267    {
268      fprintf (dump_file, "Counter working sets:\n");
269      /* Multiply the percentage by 100 to avoid float.  */
270      pctinc = 100 * 100 / NUM_GCOV_WORKING_SETS;
271      for (ws_ix = 0, pct = pctinc; ws_ix < NUM_GCOV_WORKING_SETS;
272           ws_ix++, pct += pctinc)
273        {
274          if (ws_ix == NUM_GCOV_WORKING_SETS - 1)
275            pct = 9990;
276          ws_info = &gcov_working_sets[ws_ix];
277          /* Print out the percentage using int arithmatic to avoid float.  */
278          fprintf (dump_file, "\t\t%u.%02u%%: num counts=%u, min counter="
279                   "%"PRId64 "\n",
280                   pct / 100, pct - (pct / 100 * 100),
281                   ws_info->num_counters,
282                   (int64_t)ws_info->min_counter);
283        }
284    }
285}
286
287/* Given a the desired percentage of the full profile (sum_all from the
288   summary), multiplied by 10 to avoid float in PCT_TIMES_10, returns
289   the corresponding working set information. If an exact match for
290   the percentage isn't found, the closest value is used.  */
291
292gcov_working_set_t *
293find_working_set (unsigned pct_times_10)
294{
295  unsigned i;
296  if (!profile_info)
297    return NULL;
298  gcc_assert (pct_times_10 <= 1000);
299  if (pct_times_10 >= 999)
300    return &gcov_working_sets[NUM_GCOV_WORKING_SETS - 1];
301  i = pct_times_10 * NUM_GCOV_WORKING_SETS / 1000;
302  if (!i)
303    return &gcov_working_sets[0];
304  return &gcov_working_sets[i - 1];
305}
306
307/* Computes hybrid profile for all matching entries in da_file.
308
309   CFG_CHECKSUM is the precomputed checksum for the CFG.  */
310
311static gcov_type *
312get_exec_counts (unsigned cfg_checksum, unsigned lineno_checksum)
313{
314  unsigned num_edges = 0;
315  basic_block bb;
316  gcov_type *counts;
317
318  /* Count the edges to be (possibly) instrumented.  */
319  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
320    {
321      edge e;
322      edge_iterator ei;
323
324      FOR_EACH_EDGE (e, ei, bb->succs)
325	if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
326	  num_edges++;
327    }
328
329  counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, cfg_checksum,
330				lineno_checksum, &profile_info);
331  if (!counts)
332    return NULL;
333
334  get_working_sets ();
335
336  if (dump_file && profile_info)
337    fprintf (dump_file, "Merged %u profiles with maximal count %u.\n",
338	     profile_info->runs, (unsigned) profile_info->sum_max);
339
340  return counts;
341}
342
343
344static bool
345is_edge_inconsistent (vec<edge, va_gc> *edges)
346{
347  edge e;
348  edge_iterator ei;
349  FOR_EACH_EDGE (e, ei, edges)
350    {
351      if (!EDGE_INFO (e)->ignore)
352        {
353          if (e->count < 0
354	      && (!(e->flags & EDGE_FAKE)
355	          || !block_ends_with_call_p (e->src)))
356	    {
357	      if (dump_file)
358		{
359		  fprintf (dump_file,
360		  	   "Edge %i->%i is inconsistent, count%"PRId64,
361			   e->src->index, e->dest->index, e->count);
362		  dump_bb (dump_file, e->src, 0, TDF_DETAILS);
363		  dump_bb (dump_file, e->dest, 0, TDF_DETAILS);
364		}
365              return true;
366	    }
367        }
368    }
369  return false;
370}
371
372static void
373correct_negative_edge_counts (void)
374{
375  basic_block bb;
376  edge e;
377  edge_iterator ei;
378
379  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
380    {
381      FOR_EACH_EDGE (e, ei, bb->succs)
382        {
383           if (e->count < 0)
384             e->count = 0;
385        }
386    }
387}
388
389/* Check consistency.
390   Return true if inconsistency is found.  */
391static bool
392is_inconsistent (void)
393{
394  basic_block bb;
395  bool inconsistent = false;
396  FOR_EACH_BB_FN (bb, cfun)
397    {
398      inconsistent |= is_edge_inconsistent (bb->preds);
399      if (!dump_file && inconsistent)
400	return true;
401      inconsistent |= is_edge_inconsistent (bb->succs);
402      if (!dump_file && inconsistent)
403	return true;
404      if (bb->count < 0)
405        {
406	  if (dump_file)
407	    {
408	      fprintf (dump_file, "BB %i count is negative "
409		       "%"PRId64,
410		       bb->index,
411		       bb->count);
412	      dump_bb (dump_file, bb, 0, TDF_DETAILS);
413	    }
414	  inconsistent = true;
415	}
416      if (bb->count != sum_edge_counts (bb->preds))
417        {
418	  if (dump_file)
419	    {
420	      fprintf (dump_file, "BB %i count does not match sum of incoming edges "
421		       "%"PRId64" should be %"PRId64,
422		       bb->index,
423		       bb->count,
424		       sum_edge_counts (bb->preds));
425	      dump_bb (dump_file, bb, 0, TDF_DETAILS);
426	    }
427	  inconsistent = true;
428	}
429      if (bb->count != sum_edge_counts (bb->succs) &&
430	  ! (find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun)) != NULL
431	     && block_ends_with_call_p (bb)))
432	{
433	  if (dump_file)
434	    {
435	      fprintf (dump_file, "BB %i count does not match sum of outgoing edges "
436		       "%"PRId64" should be %"PRId64,
437		       bb->index,
438		       bb->count,
439		       sum_edge_counts (bb->succs));
440	      dump_bb (dump_file, bb, 0, TDF_DETAILS);
441	    }
442	  inconsistent = true;
443	}
444      if (!dump_file && inconsistent)
445	return true;
446    }
447
448  return inconsistent;
449}
450
451/* Set each basic block count to the sum of its outgoing edge counts */
452static void
453set_bb_counts (void)
454{
455  basic_block bb;
456  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
457    {
458      bb->count = sum_edge_counts (bb->succs);
459      gcc_assert (bb->count >= 0);
460    }
461}
462
463/* Reads profile data and returns total number of edge counts read */
464static int
465read_profile_edge_counts (gcov_type *exec_counts)
466{
467  basic_block bb;
468  int num_edges = 0;
469  int exec_counts_pos = 0;
470  /* For each edge not on the spanning tree, set its execution count from
471     the .da file.  */
472  /* The first count in the .da file is the number of times that the function
473     was entered.  This is the exec_count for block zero.  */
474
475  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
476    {
477      edge e;
478      edge_iterator ei;
479
480      FOR_EACH_EDGE (e, ei, bb->succs)
481	if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
482	  {
483	    num_edges++;
484	    if (exec_counts)
485	      {
486		e->count = exec_counts[exec_counts_pos++];
487		if (e->count > profile_info->sum_max)
488		  {
489		    if (flag_profile_correction)
490		      {
491			static bool informed = 0;
492			if (dump_enabled_p () && !informed)
493		          dump_printf_loc (MSG_NOTE, input_location,
494                                           "corrupted profile info: edge count"
495                                           " exceeds maximal count\n");
496			informed = 1;
497		      }
498		    else
499		      error ("corrupted profile info: edge from %i to %i exceeds maximal count",
500			     bb->index, e->dest->index);
501		  }
502	      }
503	    else
504	      e->count = 0;
505
506	    EDGE_INFO (e)->count_valid = 1;
507	    BB_INFO (bb)->succ_count--;
508	    BB_INFO (e->dest)->pred_count--;
509	    if (dump_file)
510	      {
511		fprintf (dump_file, "\nRead edge from %i to %i, count:",
512			 bb->index, e->dest->index);
513		fprintf (dump_file, "%"PRId64,
514			 (int64_t) e->count);
515	      }
516	  }
517    }
518
519    return num_edges;
520}
521
522#define OVERLAP_BASE 10000
523
524/* Compare the static estimated profile to the actual profile, and
525   return the "degree of overlap" measure between them.
526
527   Degree of overlap is a number between 0 and OVERLAP_BASE. It is
528   the sum of each basic block's minimum relative weights between
529   two profiles. And overlap of OVERLAP_BASE means two profiles are
530   identical.  */
531
532static int
533compute_frequency_overlap (void)
534{
535  gcov_type count_total = 0, freq_total = 0;
536  int overlap = 0;
537  basic_block bb;
538
539  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
540    {
541      count_total += bb->count;
542      freq_total += bb->frequency;
543    }
544
545  if (count_total == 0 || freq_total == 0)
546    return 0;
547
548  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
549    overlap += MIN (bb->count * OVERLAP_BASE / count_total,
550		    bb->frequency * OVERLAP_BASE / freq_total);
551
552  return overlap;
553}
554
555/* Compute the branch probabilities for the various branches.
556   Annotate them accordingly.
557
558   CFG_CHECKSUM is the precomputed checksum for the CFG.  */
559
560static void
561compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
562{
563  basic_block bb;
564  int i;
565  int num_edges = 0;
566  int changes;
567  int passes;
568  int hist_br_prob[20];
569  int num_branches;
570  gcov_type *exec_counts = get_exec_counts (cfg_checksum, lineno_checksum);
571  int inconsistent = 0;
572
573  /* Very simple sanity checks so we catch bugs in our profiling code.  */
574  if (!profile_info)
575    return;
576
577  if (profile_info->sum_all < profile_info->sum_max)
578    {
579      error ("corrupted profile info: sum_all is smaller than sum_max");
580      exec_counts = NULL;
581    }
582
583  /* Attach extra info block to each bb.  */
584  alloc_aux_for_blocks (sizeof (struct bb_profile_info));
585  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
586    {
587      edge e;
588      edge_iterator ei;
589
590      FOR_EACH_EDGE (e, ei, bb->succs)
591	if (!EDGE_INFO (e)->ignore)
592	  BB_INFO (bb)->succ_count++;
593      FOR_EACH_EDGE (e, ei, bb->preds)
594	if (!EDGE_INFO (e)->ignore)
595	  BB_INFO (bb)->pred_count++;
596    }
597
598  /* Avoid predicting entry on exit nodes.  */
599  BB_INFO (EXIT_BLOCK_PTR_FOR_FN (cfun))->succ_count = 2;
600  BB_INFO (ENTRY_BLOCK_PTR_FOR_FN (cfun))->pred_count = 2;
601
602  num_edges = read_profile_edge_counts (exec_counts);
603
604  if (dump_file)
605    fprintf (dump_file, "\n%d edge counts read\n", num_edges);
606
607  /* For every block in the file,
608     - if every exit/entrance edge has a known count, then set the block count
609     - if the block count is known, and every exit/entrance edge but one has
610     a known execution count, then set the count of the remaining edge
611
612     As edge counts are set, decrement the succ/pred count, but don't delete
613     the edge, that way we can easily tell when all edges are known, or only
614     one edge is unknown.  */
615
616  /* The order that the basic blocks are iterated through is important.
617     Since the code that finds spanning trees starts with block 0, low numbered
618     edges are put on the spanning tree in preference to high numbered edges.
619     Hence, most instrumented edges are at the end.  Graph solving works much
620     faster if we propagate numbers from the end to the start.
621
622     This takes an average of slightly more than 3 passes.  */
623
624  changes = 1;
625  passes = 0;
626  while (changes)
627    {
628      passes++;
629      changes = 0;
630      FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, prev_bb)
631	{
632	  struct bb_profile_info *bi = BB_INFO (bb);
633	  if (! bi->count_valid)
634	    {
635	      if (bi->succ_count == 0)
636		{
637		  edge e;
638		  edge_iterator ei;
639		  gcov_type total = 0;
640
641		  FOR_EACH_EDGE (e, ei, bb->succs)
642		    total += e->count;
643		  bb->count = total;
644		  bi->count_valid = 1;
645		  changes = 1;
646		}
647	      else if (bi->pred_count == 0)
648		{
649		  edge e;
650		  edge_iterator ei;
651		  gcov_type total = 0;
652
653		  FOR_EACH_EDGE (e, ei, bb->preds)
654		    total += e->count;
655		  bb->count = total;
656		  bi->count_valid = 1;
657		  changes = 1;
658		}
659	    }
660	  if (bi->count_valid)
661	    {
662	      if (bi->succ_count == 1)
663		{
664		  edge e;
665		  edge_iterator ei;
666		  gcov_type total = 0;
667
668		  /* One of the counts will be invalid, but it is zero,
669		     so adding it in also doesn't hurt.  */
670		  FOR_EACH_EDGE (e, ei, bb->succs)
671		    total += e->count;
672
673		  /* Search for the invalid edge, and set its count.  */
674		  FOR_EACH_EDGE (e, ei, bb->succs)
675		    if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
676		      break;
677
678		  /* Calculate count for remaining edge by conservation.  */
679		  total = bb->count - total;
680
681		  gcc_assert (e);
682		  EDGE_INFO (e)->count_valid = 1;
683		  e->count = total;
684		  bi->succ_count--;
685
686		  BB_INFO (e->dest)->pred_count--;
687		  changes = 1;
688		}
689	      if (bi->pred_count == 1)
690		{
691		  edge e;
692		  edge_iterator ei;
693		  gcov_type total = 0;
694
695		  /* One of the counts will be invalid, but it is zero,
696		     so adding it in also doesn't hurt.  */
697		  FOR_EACH_EDGE (e, ei, bb->preds)
698		    total += e->count;
699
700		  /* Search for the invalid edge, and set its count.  */
701		  FOR_EACH_EDGE (e, ei, bb->preds)
702		    if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
703		      break;
704
705		  /* Calculate count for remaining edge by conservation.  */
706		  total = bb->count - total + e->count;
707
708		  gcc_assert (e);
709		  EDGE_INFO (e)->count_valid = 1;
710		  e->count = total;
711		  bi->pred_count--;
712
713		  BB_INFO (e->src)->succ_count--;
714		  changes = 1;
715		}
716	    }
717	}
718    }
719  if (dump_file)
720    {
721      int overlap = compute_frequency_overlap ();
722      gimple_dump_cfg (dump_file, dump_flags);
723      fprintf (dump_file, "Static profile overlap: %d.%d%%\n",
724	       overlap / (OVERLAP_BASE / 100),
725	       overlap % (OVERLAP_BASE / 100));
726    }
727
728  total_num_passes += passes;
729  if (dump_file)
730    fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
731
732  /* If the graph has been correctly solved, every block will have a
733     succ and pred count of zero.  */
734  FOR_EACH_BB_FN (bb, cfun)
735    {
736      gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
737    }
738
739  /* Check for inconsistent basic block counts */
740  inconsistent = is_inconsistent ();
741
742  if (inconsistent)
743   {
744     if (flag_profile_correction)
745       {
746         /* Inconsistency detected. Make it flow-consistent. */
747         static int informed = 0;
748         if (dump_enabled_p () && informed == 0)
749           {
750             informed = 1;
751             dump_printf_loc (MSG_NOTE, input_location,
752                              "correcting inconsistent profile data\n");
753           }
754         correct_negative_edge_counts ();
755         /* Set bb counts to the sum of the outgoing edge counts */
756         set_bb_counts ();
757         if (dump_file)
758           fprintf (dump_file, "\nCalling mcf_smooth_cfg\n");
759         mcf_smooth_cfg ();
760       }
761     else
762       error ("corrupted profile info: profile data is not flow-consistent");
763   }
764
765  /* For every edge, calculate its branch probability and add a reg_note
766     to the branch insn to indicate this.  */
767
768  for (i = 0; i < 20; i++)
769    hist_br_prob[i] = 0;
770  num_branches = 0;
771
772  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
773    {
774      edge e;
775      edge_iterator ei;
776
777      if (bb->count < 0)
778	{
779	  error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
780		 bb->index, (int)bb->count);
781	  bb->count = 0;
782	}
783      FOR_EACH_EDGE (e, ei, bb->succs)
784	{
785	  /* Function may return twice in the cased the called function is
786	     setjmp or calls fork, but we can't represent this by extra
787	     edge from the entry, since extra edge from the exit is
788	     already present.  We get negative frequency from the entry
789	     point.  */
790	  if ((e->count < 0
791	       && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
792	      || (e->count > bb->count
793		  && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)))
794	    {
795	      if (block_ends_with_call_p (bb))
796		e->count = e->count < 0 ? 0 : bb->count;
797	    }
798	  if (e->count < 0 || e->count > bb->count)
799	    {
800	      error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
801		     e->src->index, e->dest->index,
802		     (int)e->count);
803	      e->count = bb->count / 2;
804	    }
805	}
806      if (bb->count)
807	{
808	  FOR_EACH_EDGE (e, ei, bb->succs)
809	    e->probability = GCOV_COMPUTE_SCALE (e->count, bb->count);
810	  if (bb->index >= NUM_FIXED_BLOCKS
811	      && block_ends_with_condjump_p (bb)
812	      && EDGE_COUNT (bb->succs) >= 2)
813	    {
814	      int prob;
815	      edge e;
816	      int index;
817
818	      /* Find the branch edge.  It is possible that we do have fake
819		 edges here.  */
820	      FOR_EACH_EDGE (e, ei, bb->succs)
821		if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
822		  break;
823
824	      prob = e->probability;
825	      index = prob * 20 / REG_BR_PROB_BASE;
826
827	      if (index == 20)
828		index = 19;
829	      hist_br_prob[index]++;
830
831	      num_branches++;
832	    }
833	}
834      /* As a last resort, distribute the probabilities evenly.
835	 Use simple heuristics that if there are normal edges,
836	 give all abnormals frequency of 0, otherwise distribute the
837	 frequency over abnormals (this is the case of noreturn
838	 calls).  */
839      else if (profile_status_for_fn (cfun) == PROFILE_ABSENT)
840	{
841	  int total = 0;
842
843	  FOR_EACH_EDGE (e, ei, bb->succs)
844	    if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
845	      total ++;
846	  if (total)
847	    {
848	      FOR_EACH_EDGE (e, ei, bb->succs)
849		if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
850		  e->probability = REG_BR_PROB_BASE / total;
851		else
852		  e->probability = 0;
853	    }
854	  else
855	    {
856	      total += EDGE_COUNT (bb->succs);
857	      FOR_EACH_EDGE (e, ei, bb->succs)
858		e->probability = REG_BR_PROB_BASE / total;
859	    }
860	  if (bb->index >= NUM_FIXED_BLOCKS
861	      && block_ends_with_condjump_p (bb)
862	      && EDGE_COUNT (bb->succs) >= 2)
863	    num_branches++;
864	}
865    }
866  counts_to_freqs ();
867  profile_status_for_fn (cfun) = PROFILE_READ;
868  compute_function_frequency ();
869
870  if (dump_file)
871    {
872      fprintf (dump_file, "%d branches\n", num_branches);
873      if (num_branches)
874	for (i = 0; i < 10; i++)
875	  fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
876		   (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
877		   5 * i, 5 * i + 5);
878
879      total_num_branches += num_branches;
880      for (i = 0; i < 20; i++)
881	total_hist_br_prob[i] += hist_br_prob[i];
882
883      fputc ('\n', dump_file);
884      fputc ('\n', dump_file);
885    }
886
887  free_aux_for_blocks ();
888}
889
890/* Load value histograms values whose description is stored in VALUES array
891   from .gcda file.
892
893   CFG_CHECKSUM is the precomputed checksum for the CFG.  */
894
895static void
896compute_value_histograms (histogram_values values, unsigned cfg_checksum,
897                          unsigned lineno_checksum)
898{
899  unsigned i, j, t, any;
900  unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
901  gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
902  gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
903  gcov_type *aact_count;
904  struct cgraph_node *node;
905
906  for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
907    n_histogram_counters[t] = 0;
908
909  for (i = 0; i < values.length (); i++)
910    {
911      histogram_value hist = values[i];
912      n_histogram_counters[(int) hist->type] += hist->n_counters;
913    }
914
915  any = 0;
916  for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
917    {
918      if (!n_histogram_counters[t])
919	{
920	  histogram_counts[t] = NULL;
921	  continue;
922	}
923
924      histogram_counts[t] =
925	get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
926			     n_histogram_counters[t], cfg_checksum,
927			     lineno_checksum, NULL);
928      if (histogram_counts[t])
929	any = 1;
930      act_count[t] = histogram_counts[t];
931    }
932  if (!any)
933    return;
934
935  for (i = 0; i < values.length (); i++)
936    {
937      histogram_value hist = values[i];
938      gimple stmt = hist->hvalue.stmt;
939
940      t = (int) hist->type;
941
942      aact_count = act_count[t];
943
944      if (act_count[t])
945        act_count[t] += hist->n_counters;
946
947      gimple_add_histogram_value (cfun, stmt, hist);
948      hist->hvalue.counters =  XNEWVEC (gcov_type, hist->n_counters);
949      for (j = 0; j < hist->n_counters; j++)
950        if (aact_count)
951          hist->hvalue.counters[j] = aact_count[j];
952        else
953          hist->hvalue.counters[j] = 0;
954
955      /* Time profiler counter is not related to any statement,
956         so that we have to read the counter and set the value to
957         the corresponding call graph node.  */
958      if (hist->type == HIST_TYPE_TIME_PROFILE)
959        {
960	  node = cgraph_node::get (hist->fun->decl);
961	  node->tp_first_run = hist->hvalue.counters[0];
962
963          if (dump_file)
964            fprintf (dump_file, "Read tp_first_run: %d\n", node->tp_first_run);
965        }
966    }
967
968  for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
969    free (histogram_counts[t]);
970}
971
972/* When passed NULL as file_name, initialize.
973   When passed something else, output the necessary commands to change
974   line to LINE and offset to FILE_NAME.  */
975static void
976output_location (char const *file_name, int line,
977		 gcov_position_t *offset, basic_block bb)
978{
979  static char const *prev_file_name;
980  static int prev_line;
981  bool name_differs, line_differs;
982
983  if (!file_name)
984    {
985      prev_file_name = NULL;
986      prev_line = -1;
987      return;
988    }
989
990  name_differs = !prev_file_name || filename_cmp (file_name, prev_file_name);
991  line_differs = prev_line != line;
992
993  if (name_differs || line_differs)
994    {
995      if (!*offset)
996	{
997	  *offset = gcov_write_tag (GCOV_TAG_LINES);
998	  gcov_write_unsigned (bb->index);
999	  name_differs = line_differs=true;
1000	}
1001
1002      /* If this is a new source file, then output the
1003	 file's name to the .bb file.  */
1004      if (name_differs)
1005	{
1006	  prev_file_name = file_name;
1007	  gcov_write_unsigned (0);
1008	  gcov_write_string (prev_file_name);
1009	}
1010      if (line_differs)
1011	{
1012	  gcov_write_unsigned (line);
1013	  prev_line = line;
1014	}
1015     }
1016}
1017
1018/* Instrument and/or analyze program behavior based on program the CFG.
1019
1020   This function creates a representation of the control flow graph (of
1021   the function being compiled) that is suitable for the instrumentation
1022   of edges and/or converting measured edge counts to counts on the
1023   complete CFG.
1024
1025   When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
1026   the flow graph that are needed to reconstruct the dynamic behavior of the
1027   flow graph.  This data is written to the gcno file for gcov.
1028
1029   When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
1030   information from the gcda file containing edge count information from
1031   previous executions of the function being compiled.  In this case, the
1032   control flow graph is annotated with actual execution counts by
1033   compute_branch_probabilities().
1034
1035   Main entry point of this file.  */
1036
1037void
1038branch_prob (void)
1039{
1040  basic_block bb;
1041  unsigned i;
1042  unsigned num_edges, ignored_edges;
1043  unsigned num_instrumented;
1044  struct edge_list *el;
1045  histogram_values values = histogram_values ();
1046  unsigned cfg_checksum, lineno_checksum;
1047
1048  total_num_times_called++;
1049
1050  flow_call_edges_add (NULL);
1051  add_noreturn_fake_exit_edges ();
1052
1053  /* We can't handle cyclic regions constructed using abnormal edges.
1054     To avoid these we replace every source of abnormal edge by a fake
1055     edge from entry node and every destination by fake edge to exit.
1056     This keeps graph acyclic and our calculation exact for all normal
1057     edges except for exit and entrance ones.
1058
1059     We also add fake exit edges for each call and asm statement in the
1060     basic, since it may not return.  */
1061
1062  FOR_EACH_BB_FN (bb, cfun)
1063    {
1064      int need_exit_edge = 0, need_entry_edge = 0;
1065      int have_exit_edge = 0, have_entry_edge = 0;
1066      edge e;
1067      edge_iterator ei;
1068
1069      /* Functions returning multiple times are not handled by extra edges.
1070         Instead we simply allow negative counts on edges from exit to the
1071         block past call and corresponding probabilities.  We can't go
1072         with the extra edges because that would result in flowgraph that
1073	 needs to have fake edges outside the spanning tree.  */
1074
1075      FOR_EACH_EDGE (e, ei, bb->succs)
1076	{
1077	  gimple_stmt_iterator gsi;
1078	  gimple last = NULL;
1079
1080	  /* It may happen that there are compiler generated statements
1081	     without a locus at all.  Go through the basic block from the
1082	     last to the first statement looking for a locus.  */
1083	  for (gsi = gsi_last_nondebug_bb (bb);
1084	       !gsi_end_p (gsi);
1085	       gsi_prev_nondebug (&gsi))
1086	    {
1087	      last = gsi_stmt (gsi);
1088	      if (gimple_has_location (last))
1089		break;
1090	    }
1091
1092	  /* Edge with goto locus might get wrong coverage info unless
1093	     it is the only edge out of BB.
1094	     Don't do that when the locuses match, so
1095	     if (blah) goto something;
1096	     is not computed twice.  */
1097	  if (last
1098	      && gimple_has_location (last)
1099	      && LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
1100	      && !single_succ_p (bb)
1101	      && (LOCATION_FILE (e->goto_locus)
1102	          != LOCATION_FILE (gimple_location (last))
1103		  || (LOCATION_LINE (e->goto_locus)
1104		      != LOCATION_LINE (gimple_location (last)))))
1105	    {
1106	      basic_block new_bb = split_edge (e);
1107	      edge ne = single_succ_edge (new_bb);
1108	      ne->goto_locus = e->goto_locus;
1109	    }
1110	  if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1111	       && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1112	    need_exit_edge = 1;
1113	  if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1114	    have_exit_edge = 1;
1115	}
1116      FOR_EACH_EDGE (e, ei, bb->preds)
1117	{
1118	  if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1119	       && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1120	    need_entry_edge = 1;
1121	  if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1122	    have_entry_edge = 1;
1123	}
1124
1125      if (need_exit_edge && !have_exit_edge)
1126	{
1127	  if (dump_file)
1128	    fprintf (dump_file, "Adding fake exit edge to bb %i\n",
1129		     bb->index);
1130	  make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
1131	}
1132      if (need_entry_edge && !have_entry_edge)
1133	{
1134	  if (dump_file)
1135	    fprintf (dump_file, "Adding fake entry edge to bb %i\n",
1136		     bb->index);
1137	  make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FAKE);
1138	  /* Avoid bbs that have both fake entry edge and also some
1139	     exit edge.  One of those edges wouldn't be added to the
1140	     spanning tree, but we can't instrument any of them.  */
1141	  if (have_exit_edge || need_exit_edge)
1142	    {
1143	      gimple_stmt_iterator gsi;
1144	      gimple first;
1145
1146	      gsi = gsi_start_nondebug_after_labels_bb (bb);
1147	      gcc_checking_assert (!gsi_end_p (gsi));
1148	      first = gsi_stmt (gsi);
1149	      /* Don't split the bbs containing __builtin_setjmp_receiver
1150		 or ABNORMAL_DISPATCHER calls.  These are very
1151		 special and don't expect anything to be inserted before
1152		 them.  */
1153	      if (is_gimple_call (first)
1154		  && (gimple_call_builtin_p (first, BUILT_IN_SETJMP_RECEIVER)
1155		      || (gimple_call_flags (first) & ECF_RETURNS_TWICE)
1156		      || (gimple_call_internal_p (first)
1157			  && (gimple_call_internal_fn (first)
1158			      == IFN_ABNORMAL_DISPATCHER))))
1159		continue;
1160
1161	      if (dump_file)
1162		fprintf (dump_file, "Splitting bb %i after labels\n",
1163			 bb->index);
1164	      split_block_after_labels (bb);
1165	    }
1166	}
1167    }
1168
1169  el = create_edge_list ();
1170  num_edges = NUM_EDGES (el);
1171  alloc_aux_for_edges (sizeof (struct edge_profile_info));
1172
1173  /* The basic blocks are expected to be numbered sequentially.  */
1174  compact_blocks ();
1175
1176  ignored_edges = 0;
1177  for (i = 0 ; i < num_edges ; i++)
1178    {
1179      edge e = INDEX_EDGE (el, i);
1180      e->count = 0;
1181
1182      /* Mark edges we've replaced by fake edges above as ignored.  */
1183      if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1184	  && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1185	  && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1186	{
1187	  EDGE_INFO (e)->ignore = 1;
1188	  ignored_edges++;
1189	}
1190    }
1191
1192  /* Create spanning tree from basic block graph, mark each edge that is
1193     on the spanning tree.  We insert as many abnormal and critical edges
1194     as possible to minimize number of edge splits necessary.  */
1195
1196  find_spanning_tree (el);
1197
1198  /* Fake edges that are not on the tree will not be instrumented, so
1199     mark them ignored.  */
1200  for (num_instrumented = i = 0; i < num_edges; i++)
1201    {
1202      edge e = INDEX_EDGE (el, i);
1203      struct edge_profile_info *inf = EDGE_INFO (e);
1204
1205      if (inf->ignore || inf->on_tree)
1206	/*NOP*/;
1207      else if (e->flags & EDGE_FAKE)
1208	{
1209	  inf->ignore = 1;
1210	  ignored_edges++;
1211	}
1212      else
1213	num_instrumented++;
1214    }
1215
1216  total_num_blocks += n_basic_blocks_for_fn (cfun);
1217  if (dump_file)
1218    fprintf (dump_file, "%d basic blocks\n", n_basic_blocks_for_fn (cfun));
1219
1220  total_num_edges += num_edges;
1221  if (dump_file)
1222    fprintf (dump_file, "%d edges\n", num_edges);
1223
1224  total_num_edges_ignored += ignored_edges;
1225  if (dump_file)
1226    fprintf (dump_file, "%d ignored edges\n", ignored_edges);
1227
1228  total_num_edges_instrumented += num_instrumented;
1229  if (dump_file)
1230    fprintf (dump_file, "%d instrumentation edges\n", num_instrumented);
1231
1232  /* Compute two different checksums. Note that we want to compute
1233     the checksum in only once place, since it depends on the shape
1234     of the control flow which can change during
1235     various transformations.  */
1236  cfg_checksum = coverage_compute_cfg_checksum (cfun);
1237  lineno_checksum = coverage_compute_lineno_checksum ();
1238
1239  /* Write the data from which gcov can reconstruct the basic block
1240     graph and function line numbers (the gcno file).  */
1241  if (coverage_begin_function (lineno_checksum, cfg_checksum))
1242    {
1243      gcov_position_t offset;
1244
1245      /* Basic block flags */
1246      offset = gcov_write_tag (GCOV_TAG_BLOCKS);
1247      for (i = 0; i != (unsigned) (n_basic_blocks_for_fn (cfun)); i++)
1248	gcov_write_unsigned (0);
1249      gcov_write_length (offset);
1250
1251      /* Arcs */
1252      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
1253		      EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1254	{
1255	  edge e;
1256	  edge_iterator ei;
1257
1258	  offset = gcov_write_tag (GCOV_TAG_ARCS);
1259	  gcov_write_unsigned (bb->index);
1260
1261	  FOR_EACH_EDGE (e, ei, bb->succs)
1262	    {
1263	      struct edge_profile_info *i = EDGE_INFO (e);
1264	      if (!i->ignore)
1265		{
1266		  unsigned flag_bits = 0;
1267
1268		  if (i->on_tree)
1269		    flag_bits |= GCOV_ARC_ON_TREE;
1270		  if (e->flags & EDGE_FAKE)
1271		    flag_bits |= GCOV_ARC_FAKE;
1272		  if (e->flags & EDGE_FALLTHRU)
1273		    flag_bits |= GCOV_ARC_FALLTHROUGH;
1274		  /* On trees we don't have fallthru flags, but we can
1275		     recompute them from CFG shape.  */
1276		  if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
1277		      && e->src->next_bb == e->dest)
1278		    flag_bits |= GCOV_ARC_FALLTHROUGH;
1279
1280		  gcov_write_unsigned (e->dest->index);
1281		  gcov_write_unsigned (flag_bits);
1282	        }
1283	    }
1284
1285	  gcov_write_length (offset);
1286	}
1287
1288      /* Line numbers.  */
1289      /* Initialize the output.  */
1290      output_location (NULL, 0, NULL, NULL);
1291
1292      FOR_EACH_BB_FN (bb, cfun)
1293	{
1294	  gimple_stmt_iterator gsi;
1295	  gcov_position_t offset = 0;
1296
1297	  if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
1298	    {
1299	      expanded_location curr_location =
1300		expand_location (DECL_SOURCE_LOCATION (current_function_decl));
1301	      output_location (curr_location.file, curr_location.line,
1302			       &offset, bb);
1303	    }
1304
1305	  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1306	    {
1307	      gimple stmt = gsi_stmt (gsi);
1308	      if (gimple_has_location (stmt))
1309		output_location (gimple_filename (stmt), gimple_lineno (stmt),
1310				 &offset, bb);
1311	    }
1312
1313	  /* Notice GOTO expressions eliminated while constructing the CFG.  */
1314	  if (single_succ_p (bb)
1315	      && LOCATION_LOCUS (single_succ_edge (bb)->goto_locus)
1316		 != UNKNOWN_LOCATION)
1317	    {
1318	      expanded_location curr_location
1319		= expand_location (single_succ_edge (bb)->goto_locus);
1320	      output_location (curr_location.file, curr_location.line,
1321			       &offset, bb);
1322	    }
1323
1324	  if (offset)
1325	    {
1326	      /* A file of NULL indicates the end of run.  */
1327	      gcov_write_unsigned (0);
1328	      gcov_write_string (NULL);
1329	      gcov_write_length (offset);
1330	    }
1331	}
1332    }
1333
1334  if (flag_profile_values)
1335    gimple_find_values_to_profile (&values);
1336
1337  if (flag_branch_probabilities)
1338    {
1339      compute_branch_probabilities (cfg_checksum, lineno_checksum);
1340      if (flag_profile_values)
1341	compute_value_histograms (values, cfg_checksum, lineno_checksum);
1342    }
1343
1344  remove_fake_edges ();
1345
1346  /* For each edge not on the spanning tree, add counting code.  */
1347  if (profile_arc_flag
1348      && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
1349    {
1350      unsigned n_instrumented;
1351
1352      gimple_init_edge_profiler ();
1353
1354      n_instrumented = instrument_edges (el);
1355
1356      gcc_assert (n_instrumented == num_instrumented);
1357
1358      if (flag_profile_values)
1359	instrument_values (values);
1360
1361      /* Commit changes done by instrumentation.  */
1362      gsi_commit_edge_inserts ();
1363    }
1364
1365  free_aux_for_edges ();
1366
1367  values.release ();
1368  free_edge_list (el);
1369  coverage_end_function (lineno_checksum, cfg_checksum);
1370}
1371
1372/* Union find algorithm implementation for the basic blocks using
1373   aux fields.  */
1374
1375static basic_block
1376find_group (basic_block bb)
1377{
1378  basic_block group = bb, bb1;
1379
1380  while ((basic_block) group->aux != group)
1381    group = (basic_block) group->aux;
1382
1383  /* Compress path.  */
1384  while ((basic_block) bb->aux != group)
1385    {
1386      bb1 = (basic_block) bb->aux;
1387      bb->aux = (void *) group;
1388      bb = bb1;
1389    }
1390  return group;
1391}
1392
1393static void
1394union_groups (basic_block bb1, basic_block bb2)
1395{
1396  basic_block bb1g = find_group (bb1);
1397  basic_block bb2g = find_group (bb2);
1398
1399  /* ??? I don't have a place for the rank field.  OK.  Lets go w/o it,
1400     this code is unlikely going to be performance problem anyway.  */
1401  gcc_assert (bb1g != bb2g);
1402
1403  bb1g->aux = bb2g;
1404}
1405
1406/* This function searches all of the edges in the program flow graph, and puts
1407   as many bad edges as possible onto the spanning tree.  Bad edges include
1408   abnormals edges, which can't be instrumented at the moment.  Since it is
1409   possible for fake edges to form a cycle, we will have to develop some
1410   better way in the future.  Also put critical edges to the tree, since they
1411   are more expensive to instrument.  */
1412
1413static void
1414find_spanning_tree (struct edge_list *el)
1415{
1416  int i;
1417  int num_edges = NUM_EDGES (el);
1418  basic_block bb;
1419
1420  /* We use aux field for standard union-find algorithm.  */
1421  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
1422    bb->aux = bb;
1423
1424  /* Add fake edge exit to entry we can't instrument.  */
1425  union_groups (EXIT_BLOCK_PTR_FOR_FN (cfun), ENTRY_BLOCK_PTR_FOR_FN (cfun));
1426
1427  /* First add all abnormal edges to the tree unless they form a cycle. Also
1428     add all edges to the exit block to avoid inserting profiling code behind
1429     setting return value from function.  */
1430  for (i = 0; i < num_edges; i++)
1431    {
1432      edge e = INDEX_EDGE (el, i);
1433      if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1434	   || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1435	  && !EDGE_INFO (e)->ignore
1436	  && (find_group (e->src) != find_group (e->dest)))
1437	{
1438	  if (dump_file)
1439	    fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1440		     e->src->index, e->dest->index);
1441	  EDGE_INFO (e)->on_tree = 1;
1442	  union_groups (e->src, e->dest);
1443	}
1444    }
1445
1446  /* Now insert all critical edges to the tree unless they form a cycle.  */
1447  for (i = 0; i < num_edges; i++)
1448    {
1449      edge e = INDEX_EDGE (el, i);
1450      if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1451	  && find_group (e->src) != find_group (e->dest))
1452	{
1453	  if (dump_file)
1454	    fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1455		     e->src->index, e->dest->index);
1456	  EDGE_INFO (e)->on_tree = 1;
1457	  union_groups (e->src, e->dest);
1458	}
1459    }
1460
1461  /* And now the rest.  */
1462  for (i = 0; i < num_edges; i++)
1463    {
1464      edge e = INDEX_EDGE (el, i);
1465      if (!EDGE_INFO (e)->ignore
1466	  && find_group (e->src) != find_group (e->dest))
1467	{
1468	  if (dump_file)
1469	    fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1470		     e->src->index, e->dest->index);
1471	  EDGE_INFO (e)->on_tree = 1;
1472	  union_groups (e->src, e->dest);
1473	}
1474    }
1475
1476  clear_aux_for_blocks ();
1477}
1478
1479/* Perform file-level initialization for branch-prob processing.  */
1480
1481void
1482init_branch_prob (void)
1483{
1484  int i;
1485
1486  total_num_blocks = 0;
1487  total_num_edges = 0;
1488  total_num_edges_ignored = 0;
1489  total_num_edges_instrumented = 0;
1490  total_num_blocks_created = 0;
1491  total_num_passes = 0;
1492  total_num_times_called = 0;
1493  total_num_branches = 0;
1494  for (i = 0; i < 20; i++)
1495    total_hist_br_prob[i] = 0;
1496}
1497
1498/* Performs file-level cleanup after branch-prob processing
1499   is completed.  */
1500
1501void
1502end_branch_prob (void)
1503{
1504  if (dump_file)
1505    {
1506      fprintf (dump_file, "\n");
1507      fprintf (dump_file, "Total number of blocks: %d\n",
1508	       total_num_blocks);
1509      fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1510      fprintf (dump_file, "Total number of ignored edges: %d\n",
1511	       total_num_edges_ignored);
1512      fprintf (dump_file, "Total number of instrumented edges: %d\n",
1513	       total_num_edges_instrumented);
1514      fprintf (dump_file, "Total number of blocks created: %d\n",
1515	       total_num_blocks_created);
1516      fprintf (dump_file, "Total number of graph solution passes: %d\n",
1517	       total_num_passes);
1518      if (total_num_times_called != 0)
1519	fprintf (dump_file, "Average number of graph solution passes: %d\n",
1520		 (total_num_passes + (total_num_times_called  >> 1))
1521		 / total_num_times_called);
1522      fprintf (dump_file, "Total number of branches: %d\n",
1523	       total_num_branches);
1524      if (total_num_branches)
1525	{
1526	  int i;
1527
1528	  for (i = 0; i < 10; i++)
1529	    fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1530		     (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1531		     / total_num_branches, 5*i, 5*i+5);
1532	}
1533    }
1534}
1535