1/* Transformations based on profile information for values.
2   Copyright (C) 2003-2022 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "rtl.h"
25#include "tree.h"
26#include "gimple.h"
27#include "cfghooks.h"
28#include "ssa.h"
29#include "cgraph.h"
30#include "coverage.h"
31#include "data-streamer.h"
32#include "diagnostic.h"
33#include "fold-const.h"
34#include "tree-nested.h"
35#include "calls.h"
36#include "expr.h"
37#include "value-prof.h"
38#include "tree-eh.h"
39#include "gimplify.h"
40#include "gimple-iterator.h"
41#include "tree-cfg.h"
42#include "gimple-pretty-print.h"
43#include "dumpfile.h"
44#include "builtins.h"
45
46/* In this file value profile based optimizations are placed.  Currently the
47   following optimizations are implemented (for more detailed descriptions
48   see comments at value_profile_transformations):
49
50   1) Division/modulo specialization.  Provided that we can determine that the
51      operands of the division have some special properties, we may use it to
52      produce more effective code.
53
54   2) Indirect/virtual call specialization. If we can determine most
55      common function callee in indirect/virtual call. We can use this
56      information to improve code effectiveness (especially info for
57      the inliner).
58
59   3) Speculative prefetching.  If we are able to determine that the difference
60      between addresses accessed by a memory reference is usually constant, we
61      may add the prefetch instructions.
62      FIXME: This transformation was removed together with RTL based value
63      profiling.
64
65
66   Value profiling internals
67   ==========================
68
69   Every value profiling transformation starts with defining what values
70   to profile.  There are different histogram types (see HIST_TYPE_* in
71   value-prof.h) and each transformation can request one or more histogram
72   types per GIMPLE statement.  The function gimple_find_values_to_profile()
73   collects the values to profile in a vec, and adds the number of counters
74   required for the different histogram types.
75
76   For a -fprofile-generate run, the statements for which values should be
77   recorded, are instrumented in instrument_values().  The instrumentation
78   is done by helper functions that can be found in tree-profile.cc, where
79   new types of histograms can be added if necessary.
80
81   After a -fprofile-use, the value profiling data is read back in by
82   compute_value_histograms() that translates the collected data to
83   histograms and attaches them to the profiled statements via
84   gimple_add_histogram_value().  Histograms are stored in a hash table
85   that is attached to every intrumented function, see VALUE_HISTOGRAMS
86   in function.h.
87
88   The value-profile transformations driver is the function
89   gimple_value_profile_transformations().  It traverses all statements in
90   the to-be-transformed function, and looks for statements with one or
91   more histograms attached to it.  If a statement has histograms, the
92   transformation functions are called on the statement.
93
94   Limitations / FIXME / TODO:
95   * Only one histogram of each type can be associated with a statement.
96   * Some value profile transformations are done in builtins.cc (?!)
97   * Updating of histograms needs some TLC.
98   * The value profiling code could be used to record analysis results
99     from non-profiling (e.g. VRP).
100   * Adding new profilers should be simplified, starting with a cleanup
101     of what-happens-where and with making gimple_find_values_to_profile
102     and gimple_value_profile_transformations table-driven, perhaps...
103*/
104
105static bool gimple_divmod_fixed_value_transform (gimple_stmt_iterator *);
106static bool gimple_mod_pow2_value_transform (gimple_stmt_iterator *);
107static bool gimple_mod_subtract_transform (gimple_stmt_iterator *);
108static bool gimple_stringops_transform (gimple_stmt_iterator *);
109static void dump_ic_profile (gimple_stmt_iterator *gsi);
110
111/* Allocate histogram value.  */
112
113histogram_value
114gimple_alloc_histogram_value (struct function *fun ATTRIBUTE_UNUSED,
115			      enum hist_type type, gimple *stmt, tree value)
116{
117   histogram_value hist = (histogram_value) xcalloc (1, sizeof (*hist));
118   hist->hvalue.value = value;
119   hist->hvalue.stmt = stmt;
120   hist->type = type;
121   return hist;
122}
123
124/* Hash value for histogram.  */
125
126static hashval_t
127histogram_hash (const void *x)
128{
129  return htab_hash_pointer (((const_histogram_value)x)->hvalue.stmt);
130}
131
132/* Return nonzero if statement for histogram_value X is Y.  */
133
134static int
135histogram_eq (const void *x, const void *y)
136{
137  return ((const_histogram_value) x)->hvalue.stmt == (const gimple *) y;
138}
139
140/* Set histogram for STMT.  */
141
142static void
143set_histogram_value (struct function *fun, gimple *stmt, histogram_value hist)
144{
145  void **loc;
146  if (!hist && !VALUE_HISTOGRAMS (fun))
147    return;
148  if (!VALUE_HISTOGRAMS (fun))
149    VALUE_HISTOGRAMS (fun) = htab_create (1, histogram_hash,
150				           histogram_eq, NULL);
151  loc = htab_find_slot_with_hash (VALUE_HISTOGRAMS (fun), stmt,
152                                  htab_hash_pointer (stmt),
153				  hist ? INSERT : NO_INSERT);
154  if (!hist)
155    {
156      if (loc)
157	htab_clear_slot (VALUE_HISTOGRAMS (fun), loc);
158      return;
159    }
160  *loc = hist;
161}
162
163/* Get histogram list for STMT.  */
164
165histogram_value
166gimple_histogram_value (struct function *fun, gimple *stmt)
167{
168  if (!VALUE_HISTOGRAMS (fun))
169    return NULL;
170  return (histogram_value) htab_find_with_hash (VALUE_HISTOGRAMS (fun), stmt,
171						htab_hash_pointer (stmt));
172}
173
174/* Add histogram for STMT.  */
175
176void
177gimple_add_histogram_value (struct function *fun, gimple *stmt,
178			    histogram_value hist)
179{
180  hist->hvalue.next = gimple_histogram_value (fun, stmt);
181  set_histogram_value (fun, stmt, hist);
182  hist->fun = fun;
183}
184
185/* Remove histogram HIST from STMT's histogram list.  */
186
187void
188gimple_remove_histogram_value (struct function *fun, gimple *stmt,
189			       histogram_value hist)
190{
191  histogram_value hist2 = gimple_histogram_value (fun, stmt);
192  if (hist == hist2)
193    {
194      set_histogram_value (fun, stmt, hist->hvalue.next);
195    }
196  else
197    {
198      while (hist2->hvalue.next != hist)
199	hist2 = hist2->hvalue.next;
200      hist2->hvalue.next = hist->hvalue.next;
201    }
202  free (hist->hvalue.counters);
203  if (flag_checking)
204    memset (hist, 0xab, sizeof (*hist));
205  free (hist);
206}
207
208/* Lookup histogram of type TYPE in the STMT.  */
209
210histogram_value
211gimple_histogram_value_of_type (struct function *fun, gimple *stmt,
212				enum hist_type type)
213{
214  histogram_value hist;
215  for (hist = gimple_histogram_value (fun, stmt); hist;
216       hist = hist->hvalue.next)
217    if (hist->type == type)
218      return hist;
219  return NULL;
220}
221
222/* Dump information about HIST to DUMP_FILE.  */
223
224static void
225dump_histogram_value (FILE *dump_file, histogram_value hist)
226{
227  switch (hist->type)
228    {
229    case HIST_TYPE_INTERVAL:
230      if (hist->hvalue.counters)
231	{
232	  fprintf (dump_file, "Interval counter range [%d,%d]: [",
233		   hist->hdata.intvl.int_start,
234		   (hist->hdata.intvl.int_start
235		    + hist->hdata.intvl.steps - 1));
236
237	  unsigned int i;
238	  for (i = 0; i < hist->hdata.intvl.steps; i++)
239	    {
240	      fprintf (dump_file, "%d:%" PRId64,
241		       hist->hdata.intvl.int_start + i,
242		       (int64_t) hist->hvalue.counters[i]);
243	      if (i != hist->hdata.intvl.steps - 1)
244		fprintf (dump_file, ", ");
245	    }
246	  fprintf (dump_file, "] outside range: %" PRId64 ".\n",
247		   (int64_t) hist->hvalue.counters[i]);
248	}
249      break;
250
251    case HIST_TYPE_POW2:
252      if (hist->hvalue.counters)
253	fprintf (dump_file, "Pow2 counter pow2:%" PRId64
254		 " nonpow2:%" PRId64 ".\n",
255		 (int64_t) hist->hvalue.counters[1],
256		 (int64_t) hist->hvalue.counters[0]);
257      break;
258
259    case HIST_TYPE_TOPN_VALUES:
260    case HIST_TYPE_INDIR_CALL:
261      if (hist->hvalue.counters)
262	{
263	  fprintf (dump_file,
264		   (hist->type == HIST_TYPE_TOPN_VALUES
265		    ? "Top N value counter" : "Indirect call counter"));
266	  if (hist->hvalue.counters)
267	    {
268	      unsigned count = hist->hvalue.counters[1];
269	      fprintf (dump_file, " all: %" PRId64 ", %" PRId64 " values: ",
270		       (int64_t) hist->hvalue.counters[0], (int64_t) count);
271	      for (unsigned i = 0; i < count; i++)
272		{
273		  fprintf (dump_file, "[%" PRId64 ":%" PRId64 "]",
274			   (int64_t) hist->hvalue.counters[2 * i + 2],
275			   (int64_t) hist->hvalue.counters[2 * i + 3]);
276		  if (i != count - 1)
277		    fprintf (dump_file, ", ");
278		}
279	      fprintf (dump_file, ".\n");
280	    }
281	}
282      break;
283
284    case HIST_TYPE_AVERAGE:
285      if (hist->hvalue.counters)
286	fprintf (dump_file, "Average value sum:%" PRId64
287		 " times:%" PRId64 ".\n",
288		 (int64_t) hist->hvalue.counters[0],
289		 (int64_t) hist->hvalue.counters[1]);
290      break;
291
292    case HIST_TYPE_IOR:
293      if (hist->hvalue.counters)
294	fprintf (dump_file, "IOR value ior:%" PRId64 ".\n",
295		 (int64_t) hist->hvalue.counters[0]);
296      break;
297
298    case HIST_TYPE_TIME_PROFILE:
299      if (hist->hvalue.counters)
300	fprintf (dump_file, "Time profile time:%" PRId64 ".\n",
301		 (int64_t) hist->hvalue.counters[0]);
302      break;
303    default:
304      gcc_unreachable ();
305   }
306}
307
308/* Dump information about HIST to DUMP_FILE.  */
309
310void
311stream_out_histogram_value (struct output_block *ob, histogram_value hist)
312{
313  struct bitpack_d bp;
314  unsigned int i;
315
316  bp = bitpack_create (ob->main_stream);
317  bp_pack_enum (&bp, hist_type, HIST_TYPE_MAX, hist->type);
318  bp_pack_value (&bp, hist->hvalue.next != NULL, 1);
319  streamer_write_bitpack (&bp);
320  switch (hist->type)
321    {
322    case HIST_TYPE_INTERVAL:
323      streamer_write_hwi (ob, hist->hdata.intvl.int_start);
324      streamer_write_uhwi (ob, hist->hdata.intvl.steps);
325      break;
326    default:
327      break;
328    }
329  for (i = 0; i < hist->n_counters; i++)
330    {
331      /* When user uses an unsigned type with a big value, constant converted
332	 to gcov_type (a signed type) can be negative.  */
333      gcov_type value = hist->hvalue.counters[i];
334      if (hist->type == HIST_TYPE_TOPN_VALUES
335	  || hist->type == HIST_TYPE_IOR)
336	/* Note that the IOR counter tracks pointer values and these can have
337	   sign bit set.  */
338	;
339      else if (hist->type == HIST_TYPE_INDIR_CALL && i == 0)
340	/* 'all' counter overflow is stored as a negative value. Individual
341	   counters and values are expected to be non-negative.  */
342	;
343      else
344	gcc_assert (value >= 0);
345
346      streamer_write_gcov_count (ob, value);
347    }
348  if (hist->hvalue.next)
349    stream_out_histogram_value (ob, hist->hvalue.next);
350}
351
352/* Dump information about HIST to DUMP_FILE.  */
353
354void
355stream_in_histogram_value (class lto_input_block *ib, gimple *stmt)
356{
357  enum hist_type type;
358  unsigned int ncounters = 0;
359  struct bitpack_d bp;
360  unsigned int i;
361  histogram_value new_val;
362  bool next;
363  histogram_value *next_p = NULL;
364
365  do
366    {
367      bp = streamer_read_bitpack (ib);
368      type = bp_unpack_enum (&bp, hist_type, HIST_TYPE_MAX);
369      next = bp_unpack_value (&bp, 1);
370      new_val = gimple_alloc_histogram_value (cfun, type, stmt);
371      switch (type)
372	{
373	case HIST_TYPE_INTERVAL:
374	  new_val->hdata.intvl.int_start = streamer_read_hwi (ib);
375	  new_val->hdata.intvl.steps = streamer_read_uhwi (ib);
376	  ncounters = new_val->hdata.intvl.steps + 2;
377	  break;
378
379	case HIST_TYPE_POW2:
380	case HIST_TYPE_AVERAGE:
381	  ncounters = 2;
382	  break;
383
384	case HIST_TYPE_TOPN_VALUES:
385	case HIST_TYPE_INDIR_CALL:
386	  break;
387
388	case HIST_TYPE_IOR:
389        case HIST_TYPE_TIME_PROFILE:
390	  ncounters = 1;
391	  break;
392
393	default:
394	  gcc_unreachable ();
395	}
396
397      /* TOP N counters have variable number of counters.  */
398      if (type == HIST_TYPE_INDIR_CALL || type == HIST_TYPE_TOPN_VALUES)
399	{
400	  gcov_type total = streamer_read_gcov_count (ib);
401	  gcov_type ncounters = streamer_read_gcov_count (ib);
402	  new_val->hvalue.counters = XNEWVAR (gcov_type,
403					      sizeof (*new_val->hvalue.counters)
404					      * (2 + 2 * ncounters));
405	  new_val->hvalue.counters[0] = total;
406	  new_val->hvalue.counters[1] = ncounters;
407	  new_val->n_counters = 2 + 2 * ncounters;
408	  for (i = 0; i < 2 * ncounters; i++)
409	    new_val->hvalue.counters[2 + i] = streamer_read_gcov_count (ib);
410	}
411      else
412	{
413	  new_val->hvalue.counters = XNEWVAR (gcov_type,
414					      sizeof (*new_val->hvalue.counters)
415					      * ncounters);
416	  new_val->n_counters = ncounters;
417	  for (i = 0; i < ncounters; i++)
418	    new_val->hvalue.counters[i] = streamer_read_gcov_count (ib);
419	}
420
421      if (!next_p)
422	gimple_add_histogram_value (cfun, stmt, new_val);
423      else
424	*next_p = new_val;
425      next_p = &new_val->hvalue.next;
426    }
427  while (next);
428}
429
430/* Dump all histograms attached to STMT to DUMP_FILE.  */
431
432void
433dump_histograms_for_stmt (struct function *fun, FILE *dump_file, gimple *stmt)
434{
435  histogram_value hist;
436  for (hist = gimple_histogram_value (fun, stmt); hist; hist = hist->hvalue.next)
437    dump_histogram_value (dump_file, hist);
438}
439
440/* Remove all histograms associated with STMT.  */
441
442void
443gimple_remove_stmt_histograms (struct function *fun, gimple *stmt)
444{
445  histogram_value val;
446  while ((val = gimple_histogram_value (fun, stmt)) != NULL)
447    gimple_remove_histogram_value (fun, stmt, val);
448}
449
450/* Duplicate all histograms associates with OSTMT to STMT.  */
451
452void
453gimple_duplicate_stmt_histograms (struct function *fun, gimple *stmt,
454				  struct function *ofun, gimple *ostmt)
455{
456  histogram_value val;
457  for (val = gimple_histogram_value (ofun, ostmt); val != NULL; val = val->hvalue.next)
458    {
459      histogram_value new_val = gimple_alloc_histogram_value (fun, val->type);
460      memcpy (new_val, val, sizeof (*val));
461      new_val->hvalue.stmt = stmt;
462      new_val->hvalue.counters = XNEWVAR (gcov_type, sizeof (*new_val->hvalue.counters) * new_val->n_counters);
463      memcpy (new_val->hvalue.counters, val->hvalue.counters, sizeof (*new_val->hvalue.counters) * new_val->n_counters);
464      gimple_add_histogram_value (fun, stmt, new_val);
465    }
466}
467
468/* Move all histograms associated with OSTMT to STMT.  */
469
470void
471gimple_move_stmt_histograms (struct function *fun, gimple *stmt, gimple *ostmt)
472{
473  histogram_value val = gimple_histogram_value (fun, ostmt);
474  if (val)
475    {
476      /* The following three statements can't be reordered,
477         because histogram hashtab relies on stmt field value
478	 for finding the exact slot. */
479      set_histogram_value (fun, ostmt, NULL);
480      for (; val != NULL; val = val->hvalue.next)
481	val->hvalue.stmt = stmt;
482      set_histogram_value (fun, stmt, val);
483    }
484}
485
486static bool error_found = false;
487
488/* Helper function for verify_histograms.  For each histogram reachable via htab
489   walk verify that it was reached via statement walk.  */
490
491static int
492visit_hist (void **slot, void *data)
493{
494  hash_set<histogram_value> *visited = (hash_set<histogram_value> *) data;
495  histogram_value hist = *(histogram_value *) slot;
496
497  if (!visited->contains (hist)
498      && hist->type != HIST_TYPE_TIME_PROFILE)
499    {
500      error ("dead histogram");
501      dump_histogram_value (stderr, hist);
502      debug_gimple_stmt (hist->hvalue.stmt);
503      error_found = true;
504    }
505  return 1;
506}
507
508/* Verify sanity of the histograms.  */
509
510DEBUG_FUNCTION void
511verify_histograms (void)
512{
513  basic_block bb;
514  gimple_stmt_iterator gsi;
515  histogram_value hist;
516
517  error_found = false;
518  hash_set<histogram_value> visited_hists;
519  FOR_EACH_BB_FN (bb, cfun)
520    for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
521      {
522	gimple *stmt = gsi_stmt (gsi);
523
524	for (hist = gimple_histogram_value (cfun, stmt); hist;
525	     hist = hist->hvalue.next)
526	  {
527	    if (hist->hvalue.stmt != stmt)
528	      {
529		error ("histogram value statement does not correspond to "
530		       "the statement it is associated with");
531		debug_gimple_stmt (stmt);
532		dump_histogram_value (stderr, hist);
533		error_found = true;
534	      }
535            visited_hists.add (hist);
536	  }
537      }
538  if (VALUE_HISTOGRAMS (cfun))
539    htab_traverse (VALUE_HISTOGRAMS (cfun), visit_hist, &visited_hists);
540  if (error_found)
541    internal_error ("%qs failed", __func__);
542}
543
544/* Helper function for verify_histograms.  For each histogram reachable via htab
545   walk verify that it was reached via statement walk.  */
546
547static int
548free_hist (void **slot, void *data ATTRIBUTE_UNUSED)
549{
550  histogram_value hist = *(histogram_value *) slot;
551  free (hist->hvalue.counters);
552  free (hist);
553  return 1;
554}
555
556void
557free_histograms (struct function *fn)
558{
559  if (VALUE_HISTOGRAMS (fn))
560    {
561      htab_traverse (VALUE_HISTOGRAMS (fn), free_hist, NULL);
562      htab_delete (VALUE_HISTOGRAMS (fn));
563      VALUE_HISTOGRAMS (fn) = NULL;
564    }
565}
566
567/* The overall number of invocations of the counter should match
568   execution count of basic block.  Report it as error rather than
569   internal error as it might mean that user has misused the profile
570   somehow.  */
571
572static bool
573check_counter (gimple *stmt, const char * name,
574	       gcov_type *count, gcov_type *all, profile_count bb_count_d)
575{
576  gcov_type bb_count = bb_count_d.ipa ().to_gcov_type ();
577  if (*all != bb_count || *count > *all)
578    {
579      dump_user_location_t locus;
580      locus = ((stmt != NULL)
581	       ? dump_user_location_t (stmt)
582	       : dump_user_location_t::from_function_decl
583		   (current_function_decl));
584      if (flag_profile_correction)
585        {
586          if (dump_enabled_p ())
587            dump_printf_loc (MSG_MISSED_OPTIMIZATION, locus,
588                             "correcting inconsistent value profile: %s "
589                             "profiler overall count (%d) does not match BB "
590                             "count (%d)\n", name, (int)*all, (int)bb_count);
591	  *all = bb_count;
592	  if (*count > *all)
593            *count = *all;
594	  return false;
595	}
596      else
597	{
598	  error_at (locus.get_location_t (), "corrupted value profile: %s "
599		    "profile counter (%d out of %d) inconsistent with "
600		    "basic-block count (%d)",
601		    name,
602		    (int) *count,
603		    (int) *all,
604		    (int) bb_count);
605	  return true;
606	}
607    }
608
609  return false;
610}
611
612/* GIMPLE based transformations. */
613
614bool
615gimple_value_profile_transformations (void)
616{
617  basic_block bb;
618  gimple_stmt_iterator gsi;
619  bool changed = false;
620
621  FOR_EACH_BB_FN (bb, cfun)
622    {
623      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
624	{
625	  gimple *stmt = gsi_stmt (gsi);
626	  histogram_value th = gimple_histogram_value (cfun, stmt);
627	  if (!th)
628	    continue;
629
630	  if (dump_file)
631	    {
632	      fprintf (dump_file, "Trying transformations on stmt ");
633	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
634	      dump_histograms_for_stmt (cfun, dump_file, stmt);
635	    }
636
637	  /* Transformations:  */
638	  /* The order of things in this conditional controls which
639	     transformation is used when more than one is applicable.  */
640	  /* It is expected that any code added by the transformations
641	     will be added before the current statement, and that the
642	     current statement remain valid (although possibly
643	     modified) upon return.  */
644	  if (gimple_mod_subtract_transform (&gsi)
645	      || gimple_divmod_fixed_value_transform (&gsi)
646	      || gimple_mod_pow2_value_transform (&gsi)
647	      || gimple_stringops_transform (&gsi))
648	    {
649	      stmt = gsi_stmt (gsi);
650	      changed = true;
651	      /* Original statement may no longer be in the same block. */
652	      if (bb != gimple_bb (stmt))
653		{
654	          bb = gimple_bb (stmt);
655		  gsi = gsi_for_stmt (stmt);
656		}
657	    }
658
659	  /* The function never thansforms a GIMPLE statement.  */
660	  if (dump_enabled_p ())
661	    dump_ic_profile (&gsi);
662        }
663    }
664
665  return changed;
666}
667
668/* Generate code for transformation 1 (with parent gimple assignment
669   STMT and probability of taking the optimal path PROB, which is
670   equivalent to COUNT/ALL within roundoff error).  This generates the
671   result into a temp and returns the temp; it does not replace or
672   alter the original STMT.  */
673
674static tree
675gimple_divmod_fixed_value (gassign *stmt, tree value, profile_probability prob,
676			   gcov_type count, gcov_type all)
677{
678  gassign *stmt1, *stmt2;
679  gcond *stmt3;
680  tree tmp0, tmp1, tmp2;
681  gimple *bb1end, *bb2end, *bb3end;
682  basic_block bb, bb2, bb3, bb4;
683  tree optype, op1, op2;
684  edge e12, e13, e23, e24, e34;
685  gimple_stmt_iterator gsi;
686
687  gcc_assert (is_gimple_assign (stmt)
688	      && (gimple_assign_rhs_code (stmt) == TRUNC_DIV_EXPR
689		  || gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR));
690
691  optype = TREE_TYPE (gimple_assign_lhs (stmt));
692  op1 = gimple_assign_rhs1 (stmt);
693  op2 = gimple_assign_rhs2 (stmt);
694
695  bb = gimple_bb (stmt);
696  gsi = gsi_for_stmt (stmt);
697
698  tmp0 = make_temp_ssa_name (optype, NULL, "PROF");
699  tmp1 = make_temp_ssa_name (optype, NULL, "PROF");
700  stmt1 = gimple_build_assign (tmp0, fold_convert (optype, value));
701  stmt2 = gimple_build_assign (tmp1, op2);
702  stmt3 = gimple_build_cond (NE_EXPR, tmp1, tmp0, NULL_TREE, NULL_TREE);
703  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
704  gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
705  gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
706  bb1end = stmt3;
707
708  tmp2 = create_tmp_reg (optype, "PROF");
709  stmt1 = gimple_build_assign (tmp2, gimple_assign_rhs_code (stmt), op1, tmp0);
710  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
711  bb2end = stmt1;
712
713  stmt1 = gimple_build_assign (tmp2, gimple_assign_rhs_code (stmt), op1, op2);
714  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
715  bb3end = stmt1;
716
717  /* Fix CFG. */
718  /* Edge e23 connects bb2 to bb3, etc. */
719  e12 = split_block (bb, bb1end);
720  bb2 = e12->dest;
721  bb2->count = profile_count::from_gcov_type (count);
722  e23 = split_block (bb2, bb2end);
723  bb3 = e23->dest;
724  bb3->count = profile_count::from_gcov_type (all - count);
725  e34 = split_block (bb3, bb3end);
726  bb4 = e34->dest;
727  bb4->count = profile_count::from_gcov_type (all);
728
729  e12->flags &= ~EDGE_FALLTHRU;
730  e12->flags |= EDGE_FALSE_VALUE;
731  e12->probability = prob;
732
733  e13 = make_edge (bb, bb3, EDGE_TRUE_VALUE);
734  e13->probability = prob.invert ();
735
736  remove_edge (e23);
737
738  e24 = make_edge (bb2, bb4, EDGE_FALLTHRU);
739  e24->probability = profile_probability::always ();
740
741  e34->probability = profile_probability::always ();
742
743  return tmp2;
744}
745
746/* Return the n-th value count of TOPN_VALUE histogram.  If
747   there's a value, return true and set VALUE and COUNT
748   arguments.
749
750   Counters have the following meaning.
751
752   abs (counters[0]) is the number of executions
753   for i in 0 ... TOPN-1
754     counters[2 * i + 2] is target
755     counters[2 * i + 3] is corresponding hitrate counter.
756
757   Value of counters[0] negative when counter became
758   full during merging and some values are lost.  */
759
760bool
761get_nth_most_common_value (gimple *stmt, const char *counter_type,
762			   histogram_value hist, gcov_type *value,
763			   gcov_type *count, gcov_type *all, unsigned n)
764{
765  unsigned counters = hist->hvalue.counters[1];
766  if (n >= counters)
767    return false;
768
769  *count = 0;
770  *value = 0;
771
772  gcov_type read_all = abs_hwi (hist->hvalue.counters[0]);
773  gcov_type covered = 0;
774  for (unsigned i = 0; i < counters; ++i)
775    covered += hist->hvalue.counters[2 * i + 3];
776
777  gcov_type v = hist->hvalue.counters[2 * n + 2];
778  gcov_type c = hist->hvalue.counters[2 * n + 3];
779
780  if (hist->hvalue.counters[0] < 0
781      && flag_profile_reproducible == PROFILE_REPRODUCIBILITY_PARALLEL_RUNS)
782    {
783      if (dump_file)
784	fprintf (dump_file, "Histogram value dropped in '%s' mode\n",
785		 "-fprofile-reproducible=parallel-runs");
786      return false;
787    }
788  else if (covered != read_all
789	   && flag_profile_reproducible == PROFILE_REPRODUCIBILITY_MULTITHREADED)
790    {
791      if (dump_file)
792	fprintf (dump_file, "Histogram value dropped in '%s' mode\n",
793		 "-fprofile-reproducible=multithreaded");
794      return false;
795    }
796
797  /* Indirect calls can't be verified.  */
798  if (stmt
799      && check_counter (stmt, counter_type, &c, &read_all,
800			gimple_bb (stmt)->count))
801    return false;
802
803  *all = read_all;
804
805  *value = v;
806  *count = c;
807  return true;
808}
809
810/* Do transform 1) on INSN if applicable.  */
811
812static bool
813gimple_divmod_fixed_value_transform (gimple_stmt_iterator *si)
814{
815  histogram_value histogram;
816  enum tree_code code;
817  gcov_type val, count, all;
818  tree result, value, tree_val;
819  profile_probability prob;
820  gassign *stmt;
821
822  stmt = dyn_cast <gassign *> (gsi_stmt (*si));
823  if (!stmt)
824    return false;
825
826  if (!INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt))))
827    return false;
828
829  code = gimple_assign_rhs_code (stmt);
830
831  if (code != TRUNC_DIV_EXPR && code != TRUNC_MOD_EXPR)
832    return false;
833
834  histogram = gimple_histogram_value_of_type (cfun, stmt,
835					      HIST_TYPE_TOPN_VALUES);
836  if (!histogram)
837    return false;
838
839  if (!get_nth_most_common_value (stmt, "divmod", histogram, &val, &count,
840				  &all))
841    return false;
842
843  value = histogram->hvalue.value;
844  gimple_remove_histogram_value (cfun, stmt, histogram);
845
846  /* We require that count is at least half of all.  */
847  if (simple_cst_equal (gimple_assign_rhs2 (stmt), value) != 1
848      || 2 * count < all
849      || optimize_bb_for_size_p (gimple_bb (stmt)))
850    return false;
851
852  /* Compute probability of taking the optimal path.  */
853  if (all > 0)
854    prob = profile_probability::probability_in_gcov_type (count, all);
855  else
856    prob = profile_probability::never ();
857
858  if (sizeof (gcov_type) == sizeof (HOST_WIDE_INT))
859    tree_val = build_int_cst (get_gcov_type (), val);
860  else
861    {
862      HOST_WIDE_INT a[2];
863      a[0] = (unsigned HOST_WIDE_INT) val;
864      a[1] = val >> (HOST_BITS_PER_WIDE_INT - 1) >> 1;
865
866      tree_val = wide_int_to_tree (get_gcov_type (), wide_int::from_array (a, 2,
867	TYPE_PRECISION (get_gcov_type ()), false));
868    }
869  result = gimple_divmod_fixed_value (stmt, tree_val, prob, count, all);
870
871  if (dump_enabled_p ())
872    dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
873		     "Transformation done: div/mod by constant %T\n", tree_val);
874
875  gimple_assign_set_rhs_from_tree (si, result);
876  update_stmt (gsi_stmt (*si));
877
878  return true;
879}
880
881/* Generate code for transformation 2 (with parent gimple assign STMT and
882   probability of taking the optimal path PROB, which is equivalent to COUNT/ALL
883   within roundoff error).  This generates the result into a temp and returns
884   the temp; it does not replace or alter the original STMT.  */
885
886static tree
887gimple_mod_pow2 (gassign *stmt, profile_probability prob, gcov_type count, gcov_type all)
888{
889  gassign *stmt1, *stmt2, *stmt3;
890  gcond *stmt4;
891  tree tmp2, tmp3;
892  gimple *bb1end, *bb2end, *bb3end;
893  basic_block bb, bb2, bb3, bb4;
894  tree optype, op1, op2;
895  edge e12, e13, e23, e24, e34;
896  gimple_stmt_iterator gsi;
897  tree result;
898
899  gcc_assert (is_gimple_assign (stmt)
900	      && gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR);
901
902  optype = TREE_TYPE (gimple_assign_lhs (stmt));
903  op1 = gimple_assign_rhs1 (stmt);
904  op2 = gimple_assign_rhs2 (stmt);
905
906  bb = gimple_bb (stmt);
907  gsi = gsi_for_stmt (stmt);
908
909  result = create_tmp_reg (optype, "PROF");
910  tmp2 = make_temp_ssa_name (optype, NULL, "PROF");
911  tmp3 = make_temp_ssa_name (optype, NULL, "PROF");
912  stmt2 = gimple_build_assign (tmp2, PLUS_EXPR, op2,
913			       build_int_cst (optype, -1));
914  stmt3 = gimple_build_assign (tmp3, BIT_AND_EXPR, tmp2, op2);
915  stmt4 = gimple_build_cond (NE_EXPR, tmp3, build_int_cst (optype, 0),
916			     NULL_TREE, NULL_TREE);
917  gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
918  gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
919  gsi_insert_before (&gsi, stmt4, GSI_SAME_STMT);
920  bb1end = stmt4;
921
922  /* tmp2 == op2-1 inherited from previous block.  */
923  stmt1 = gimple_build_assign (result, BIT_AND_EXPR, op1, tmp2);
924  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
925  bb2end = stmt1;
926
927  stmt1 = gimple_build_assign (result, gimple_assign_rhs_code (stmt),
928			       op1, op2);
929  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
930  bb3end = stmt1;
931
932  /* Fix CFG. */
933  /* Edge e23 connects bb2 to bb3, etc. */
934  e12 = split_block (bb, bb1end);
935  bb2 = e12->dest;
936  bb2->count = profile_count::from_gcov_type (count);
937  e23 = split_block (bb2, bb2end);
938  bb3 = e23->dest;
939  bb3->count = profile_count::from_gcov_type (all - count);
940  e34 = split_block (bb3, bb3end);
941  bb4 = e34->dest;
942  bb4->count = profile_count::from_gcov_type (all);
943
944  e12->flags &= ~EDGE_FALLTHRU;
945  e12->flags |= EDGE_FALSE_VALUE;
946  e12->probability = prob;
947
948  e13 = make_edge (bb, bb3, EDGE_TRUE_VALUE);
949  e13->probability = prob.invert ();
950
951  remove_edge (e23);
952
953  e24 = make_edge (bb2, bb4, EDGE_FALLTHRU);
954  e24->probability = profile_probability::always ();
955
956  e34->probability = profile_probability::always ();
957
958  return result;
959}
960
961/* Do transform 2) on INSN if applicable.  */
962
963static bool
964gimple_mod_pow2_value_transform (gimple_stmt_iterator *si)
965{
966  histogram_value histogram;
967  enum tree_code code;
968  gcov_type count, wrong_values, all;
969  tree lhs_type, result, value;
970  profile_probability prob;
971  gassign *stmt;
972
973  stmt = dyn_cast <gassign *> (gsi_stmt (*si));
974  if (!stmt)
975    return false;
976
977  lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
978  if (!INTEGRAL_TYPE_P (lhs_type))
979    return false;
980
981  code = gimple_assign_rhs_code (stmt);
982
983  if (code != TRUNC_MOD_EXPR || !TYPE_UNSIGNED (lhs_type))
984    return false;
985
986  histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_POW2);
987  if (!histogram)
988    return false;
989
990  value = histogram->hvalue.value;
991  wrong_values = histogram->hvalue.counters[0];
992  count = histogram->hvalue.counters[1];
993
994  gimple_remove_histogram_value (cfun, stmt, histogram);
995
996  /* We require that we hit a power of 2 at least half of all evaluations.  */
997  if (simple_cst_equal (gimple_assign_rhs2 (stmt), value) != 1
998      || count < wrong_values
999      || optimize_bb_for_size_p (gimple_bb (stmt)))
1000    return false;
1001
1002  /* Compute probability of taking the optimal path.  */
1003  all = count + wrong_values;
1004
1005  if (check_counter (stmt, "pow2", &count, &all, gimple_bb (stmt)->count))
1006    return false;
1007
1008  if (dump_enabled_p ())
1009    dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
1010		     "Transformation done: mod power of 2\n");
1011
1012  if (all > 0)
1013    prob = profile_probability::probability_in_gcov_type (count, all);
1014  else
1015    prob = profile_probability::never ();
1016
1017  result = gimple_mod_pow2 (stmt, prob, count, all);
1018
1019  gimple_assign_set_rhs_from_tree (si, result);
1020  update_stmt (gsi_stmt (*si));
1021
1022  return true;
1023}
1024
1025/* Generate code for transformations 3 and 4 (with parent gimple assign STMT, and
1026   NCOUNTS the number of cases to support.  Currently only NCOUNTS==0 or 1 is
1027   supported and this is built into this interface.  The probabilities of taking
1028   the optimal paths are PROB1 and PROB2, which are equivalent to COUNT1/ALL and
1029   COUNT2/ALL respectively within roundoff error).  This generates the
1030   result into a temp and returns the temp; it does not replace or alter
1031   the original STMT.  */
1032/* FIXME: Generalize the interface to handle NCOUNTS > 1.  */
1033
1034static tree
1035gimple_mod_subtract (gassign *stmt, profile_probability prob1,
1036		     profile_probability prob2, int ncounts,
1037		     gcov_type count1, gcov_type count2, gcov_type all)
1038{
1039  gassign *stmt1;
1040  gimple *stmt2;
1041  gcond *stmt3;
1042  tree tmp1;
1043  gimple *bb1end, *bb2end = NULL, *bb3end;
1044  basic_block bb, bb2, bb3, bb4;
1045  tree optype, op1, op2;
1046  edge e12, e23 = 0, e24, e34, e14;
1047  gimple_stmt_iterator gsi;
1048  tree result;
1049
1050  gcc_assert (is_gimple_assign (stmt)
1051	      && gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR);
1052
1053  optype = TREE_TYPE (gimple_assign_lhs (stmt));
1054  op1 = gimple_assign_rhs1 (stmt);
1055  op2 = gimple_assign_rhs2 (stmt);
1056
1057  bb = gimple_bb (stmt);
1058  gsi = gsi_for_stmt (stmt);
1059
1060  result = create_tmp_reg (optype, "PROF");
1061  tmp1 = make_temp_ssa_name (optype, NULL, "PROF");
1062  stmt1 = gimple_build_assign (result, op1);
1063  stmt2 = gimple_build_assign (tmp1, op2);
1064  stmt3 = gimple_build_cond (LT_EXPR, result, tmp1, NULL_TREE, NULL_TREE);
1065  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
1066  gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
1067  gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
1068  bb1end = stmt3;
1069
1070  if (ncounts)	/* Assumed to be 0 or 1 */
1071    {
1072      stmt1 = gimple_build_assign (result, MINUS_EXPR, result, tmp1);
1073      stmt2 = gimple_build_cond (LT_EXPR, result, tmp1, NULL_TREE, NULL_TREE);
1074      gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
1075      gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
1076      bb2end = stmt2;
1077    }
1078
1079  /* Fallback case. */
1080  stmt1 = gimple_build_assign (result, gimple_assign_rhs_code (stmt),
1081			       result, tmp1);
1082  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
1083  bb3end = stmt1;
1084
1085  /* Fix CFG. */
1086  /* Edge e23 connects bb2 to bb3, etc. */
1087  /* However block 3 is optional; if it is not there, references
1088     to 3 really refer to block 2. */
1089  e12 = split_block (bb, bb1end);
1090  bb2 = e12->dest;
1091  bb2->count = profile_count::from_gcov_type (all - count1);
1092
1093  if (ncounts)	/* Assumed to be 0 or 1.  */
1094    {
1095      e23 = split_block (bb2, bb2end);
1096      bb3 = e23->dest;
1097      bb3->count = profile_count::from_gcov_type (all - count1 - count2);
1098    }
1099
1100  e34 = split_block (ncounts ? bb3 : bb2, bb3end);
1101  bb4 = e34->dest;
1102  bb4->count = profile_count::from_gcov_type (all);
1103
1104  e12->flags &= ~EDGE_FALLTHRU;
1105  e12->flags |= EDGE_FALSE_VALUE;
1106  e12->probability = prob1.invert ();
1107
1108  e14 = make_edge (bb, bb4, EDGE_TRUE_VALUE);
1109  e14->probability = prob1;
1110
1111  if (ncounts)  /* Assumed to be 0 or 1.  */
1112    {
1113      e23->flags &= ~EDGE_FALLTHRU;
1114      e23->flags |= EDGE_FALSE_VALUE;
1115      e23->probability = prob2.invert ();
1116
1117      e24 = make_edge (bb2, bb4, EDGE_TRUE_VALUE);
1118      e24->probability = prob2;
1119    }
1120
1121  e34->probability = profile_probability::always ();
1122
1123  return result;
1124}
1125
1126/* Do transforms 3) and 4) on the statement pointed-to by SI if applicable.  */
1127
1128static bool
1129gimple_mod_subtract_transform (gimple_stmt_iterator *si)
1130{
1131  histogram_value histogram;
1132  enum tree_code code;
1133  gcov_type count, wrong_values, all;
1134  tree lhs_type, result;
1135  profile_probability prob1, prob2;
1136  unsigned int i, steps;
1137  gcov_type count1, count2;
1138  gassign *stmt;
1139  stmt = dyn_cast <gassign *> (gsi_stmt (*si));
1140  if (!stmt)
1141    return false;
1142
1143  lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
1144  if (!INTEGRAL_TYPE_P (lhs_type))
1145    return false;
1146
1147  code = gimple_assign_rhs_code (stmt);
1148
1149  if (code != TRUNC_MOD_EXPR || !TYPE_UNSIGNED (lhs_type))
1150    return false;
1151
1152  histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_INTERVAL);
1153  if (!histogram)
1154    return false;
1155
1156  all = 0;
1157  wrong_values = 0;
1158  for (i = 0; i < histogram->hdata.intvl.steps; i++)
1159    all += histogram->hvalue.counters[i];
1160
1161  wrong_values += histogram->hvalue.counters[i];
1162  wrong_values += histogram->hvalue.counters[i+1];
1163  steps = histogram->hdata.intvl.steps;
1164  all += wrong_values;
1165  count1 = histogram->hvalue.counters[0];
1166  count2 = histogram->hvalue.counters[1];
1167
1168  if (check_counter (stmt, "interval", &count1, &all, gimple_bb (stmt)->count))
1169    {
1170      gimple_remove_histogram_value (cfun, stmt, histogram);
1171      return false;
1172    }
1173
1174  if (flag_profile_correction && count1 + count2 > all)
1175      all = count1 + count2;
1176
1177  gcc_assert (count1 + count2 <= all);
1178
1179  /* We require that we use just subtractions in at least 50% of all
1180     evaluations.  */
1181  count = 0;
1182  for (i = 0; i < histogram->hdata.intvl.steps; i++)
1183    {
1184      count += histogram->hvalue.counters[i];
1185      if (count * 2 >= all)
1186	break;
1187    }
1188  if (i == steps
1189      || optimize_bb_for_size_p (gimple_bb (stmt)))
1190    return false;
1191
1192  gimple_remove_histogram_value (cfun, stmt, histogram);
1193  if (dump_enabled_p ())
1194    dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
1195		     "Transformation done: mod subtract\n");
1196
1197  /* Compute probability of taking the optimal path(s).  */
1198  if (all > 0)
1199    {
1200      prob1 = profile_probability::probability_in_gcov_type (count1, all);
1201      prob2 = profile_probability::probability_in_gcov_type (count2, all);
1202    }
1203  else
1204    {
1205      prob1 = prob2 = profile_probability::never ();
1206    }
1207
1208  /* In practice, "steps" is always 2.  This interface reflects this,
1209     and will need to be changed if "steps" can change.  */
1210  result = gimple_mod_subtract (stmt, prob1, prob2, i, count1, count2, all);
1211
1212  gimple_assign_set_rhs_from_tree (si, result);
1213  update_stmt (gsi_stmt (*si));
1214
1215  return true;
1216}
1217
1218typedef int_hash <unsigned int, 0, UINT_MAX> profile_id_hash;
1219
1220static hash_map<profile_id_hash, cgraph_node *> *cgraph_node_map = 0;
1221
1222/* Returns true if node graph is initialized. This
1223   is used to test if profile_id has been created
1224   for cgraph_nodes.  */
1225
1226bool
1227coverage_node_map_initialized_p (void)
1228{
1229  return cgraph_node_map != 0;
1230}
1231
1232/* Initialize map from PROFILE_ID to CGRAPH_NODE.
1233   When LOCAL is true, the PROFILE_IDs are computed.  when it is false we assume
1234   that the PROFILE_IDs was already assigned.  */
1235
1236void
1237init_node_map (bool local)
1238{
1239  struct cgraph_node *n;
1240  cgraph_node_map = new hash_map<profile_id_hash, cgraph_node *>;
1241
1242  FOR_EACH_DEFINED_FUNCTION (n)
1243    if (n->has_gimple_body_p () || n->thunk)
1244      {
1245	cgraph_node **val;
1246	dump_user_location_t loc
1247	  = dump_user_location_t::from_function_decl (n->decl);
1248	if (local)
1249	  {
1250	    n->profile_id = coverage_compute_profile_id (n);
1251	    while ((val = cgraph_node_map->get (n->profile_id))
1252		   || !n->profile_id)
1253	      {
1254		if (dump_enabled_p ())
1255		  dump_printf_loc (MSG_MISSED_OPTIMIZATION, loc,
1256				   "Local profile-id %i conflict"
1257				   " with nodes %s %s\n",
1258				   n->profile_id,
1259				   n->dump_name (),
1260				   (*val)->dump_name ());
1261		n->profile_id = (n->profile_id + 1) & 0x7fffffff;
1262	      }
1263	  }
1264	else if (!n->profile_id)
1265	  {
1266	    if (dump_enabled_p ())
1267	      dump_printf_loc (MSG_MISSED_OPTIMIZATION, loc,
1268			       "Node %s has no profile-id"
1269			       " (profile feedback missing?)\n",
1270			       n->dump_name ());
1271	    continue;
1272	  }
1273	else if ((val = cgraph_node_map->get (n->profile_id)))
1274	  {
1275	    if (dump_enabled_p ())
1276	      dump_printf_loc (MSG_MISSED_OPTIMIZATION, loc,
1277			       "Node %s has IP profile-id %i conflict. "
1278			       "Giving up.\n",
1279			       n->dump_name (), n->profile_id);
1280	    *val = NULL;
1281	    continue;
1282	  }
1283	cgraph_node_map->put (n->profile_id, n);
1284      }
1285}
1286
1287/* Delete the CGRAPH_NODE_MAP.  */
1288
1289void
1290del_node_map (void)
1291{
1292  delete cgraph_node_map;
1293}
1294
1295/* Return cgraph node for function with pid */
1296
1297struct cgraph_node*
1298find_func_by_profile_id (int profile_id)
1299{
1300  cgraph_node **val = cgraph_node_map->get (profile_id);
1301  if (val)
1302    return *val;
1303  else
1304    return NULL;
1305}
1306
1307/* Do transformation
1308
1309  if (actual_callee_address == address_of_most_common_function/method)
1310    do direct call
1311  else
1312    old call
1313 */
1314
1315gcall *
1316gimple_ic (gcall *icall_stmt, struct cgraph_node *direct_call,
1317	   profile_probability prob)
1318{
1319  gcall *dcall_stmt;
1320  gassign *load_stmt;
1321  gcond *cond_stmt;
1322  tree tmp0, tmp1, tmp;
1323  basic_block cond_bb, dcall_bb, icall_bb, join_bb = NULL;
1324  edge e_cd, e_ci, e_di, e_dj = NULL, e_ij;
1325  gimple_stmt_iterator gsi;
1326  int lp_nr, dflags;
1327  edge e_eh, e;
1328  edge_iterator ei;
1329
1330  cond_bb = gimple_bb (icall_stmt);
1331  gsi = gsi_for_stmt (icall_stmt);
1332
1333  tmp0 = make_temp_ssa_name (ptr_type_node, NULL, "PROF");
1334  tmp1 = make_temp_ssa_name (ptr_type_node, NULL, "PROF");
1335  tmp = unshare_expr (gimple_call_fn (icall_stmt));
1336  load_stmt = gimple_build_assign (tmp0, tmp);
1337  gsi_insert_before (&gsi, load_stmt, GSI_SAME_STMT);
1338
1339  tmp = fold_convert (ptr_type_node, build_addr (direct_call->decl));
1340  load_stmt = gimple_build_assign (tmp1, tmp);
1341  gsi_insert_before (&gsi, load_stmt, GSI_SAME_STMT);
1342
1343  cond_stmt = gimple_build_cond (EQ_EXPR, tmp1, tmp0, NULL_TREE, NULL_TREE);
1344  gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
1345
1346  if (TREE_CODE (gimple_vdef (icall_stmt)) == SSA_NAME)
1347    {
1348      unlink_stmt_vdef (icall_stmt);
1349      release_ssa_name (gimple_vdef (icall_stmt));
1350    }
1351  gimple_set_vdef (icall_stmt, NULL_TREE);
1352  gimple_set_vuse (icall_stmt, NULL_TREE);
1353  update_stmt (icall_stmt);
1354  dcall_stmt = as_a <gcall *> (gimple_copy (icall_stmt));
1355  gimple_call_set_fndecl (dcall_stmt, direct_call->decl);
1356  dflags = flags_from_decl_or_type (direct_call->decl);
1357  if ((dflags & ECF_NORETURN) != 0
1358      && should_remove_lhs_p (gimple_call_lhs (dcall_stmt)))
1359    gimple_call_set_lhs (dcall_stmt, NULL_TREE);
1360  gsi_insert_before (&gsi, dcall_stmt, GSI_SAME_STMT);
1361
1362  /* Fix CFG. */
1363  /* Edge e_cd connects cond_bb to dcall_bb, etc; note the first letters. */
1364  e_cd = split_block (cond_bb, cond_stmt);
1365  dcall_bb = e_cd->dest;
1366  dcall_bb->count = cond_bb->count.apply_probability (prob);
1367
1368  e_di = split_block (dcall_bb, dcall_stmt);
1369  icall_bb = e_di->dest;
1370  icall_bb->count = cond_bb->count - dcall_bb->count;
1371
1372  /* Do not disturb existing EH edges from the indirect call.  */
1373  if (!stmt_ends_bb_p (icall_stmt))
1374    e_ij = split_block (icall_bb, icall_stmt);
1375  else
1376    {
1377      e_ij = find_fallthru_edge (icall_bb->succs);
1378      /* The indirect call might be noreturn.  */
1379      if (e_ij != NULL)
1380	{
1381	  e_ij->probability = profile_probability::always ();
1382	  e_ij = single_pred_edge (split_edge (e_ij));
1383	}
1384    }
1385  if (e_ij != NULL)
1386    {
1387      join_bb = e_ij->dest;
1388      join_bb->count = cond_bb->count;
1389    }
1390
1391  e_cd->flags = (e_cd->flags & ~EDGE_FALLTHRU) | EDGE_TRUE_VALUE;
1392  e_cd->probability = prob;
1393
1394  e_ci = make_edge (cond_bb, icall_bb, EDGE_FALSE_VALUE);
1395  e_ci->probability = prob.invert ();
1396
1397  remove_edge (e_di);
1398
1399  if (e_ij != NULL)
1400    {
1401      if ((dflags & ECF_NORETURN) == 0)
1402	{
1403	  e_dj = make_edge (dcall_bb, join_bb, EDGE_FALLTHRU);
1404	  e_dj->probability = profile_probability::always ();
1405	}
1406      e_ij->probability = profile_probability::always ();
1407    }
1408
1409  /* Insert PHI node for the call result if necessary.  */
1410  if (gimple_call_lhs (icall_stmt)
1411      && TREE_CODE (gimple_call_lhs (icall_stmt)) == SSA_NAME
1412      && (dflags & ECF_NORETURN) == 0)
1413    {
1414      tree result = gimple_call_lhs (icall_stmt);
1415      gphi *phi = create_phi_node (result, join_bb);
1416      gimple_call_set_lhs (icall_stmt,
1417			   duplicate_ssa_name (result, icall_stmt));
1418      add_phi_arg (phi, gimple_call_lhs (icall_stmt), e_ij, UNKNOWN_LOCATION);
1419      gimple_call_set_lhs (dcall_stmt,
1420			   duplicate_ssa_name (result, dcall_stmt));
1421      add_phi_arg (phi, gimple_call_lhs (dcall_stmt), e_dj, UNKNOWN_LOCATION);
1422    }
1423
1424  /* Build an EH edge for the direct call if necessary.  */
1425  lp_nr = lookup_stmt_eh_lp (icall_stmt);
1426  if (lp_nr > 0 && stmt_could_throw_p (cfun, dcall_stmt))
1427    {
1428      add_stmt_to_eh_lp (dcall_stmt, lp_nr);
1429    }
1430
1431  FOR_EACH_EDGE (e_eh, ei, icall_bb->succs)
1432    if (e_eh->flags & (EDGE_EH | EDGE_ABNORMAL))
1433      {
1434	e = make_edge (dcall_bb, e_eh->dest, e_eh->flags);
1435	e->probability = e_eh->probability;
1436	for (gphi_iterator psi = gsi_start_phis (e_eh->dest);
1437	     !gsi_end_p (psi); gsi_next (&psi))
1438	  {
1439	    gphi *phi = psi.phi ();
1440	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1441		     PHI_ARG_DEF_FROM_EDGE (phi, e_eh));
1442	  }
1443       }
1444  if (!stmt_could_throw_p (cfun, dcall_stmt))
1445    gimple_purge_dead_eh_edges (dcall_bb);
1446  return dcall_stmt;
1447}
1448
1449/* Dump info about indirect call profile.  */
1450
1451static void
1452dump_ic_profile (gimple_stmt_iterator *gsi)
1453{
1454  gcall *stmt;
1455  histogram_value histogram;
1456  gcov_type val, count, all;
1457  struct cgraph_node *direct_call;
1458
1459  stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
1460  if (!stmt)
1461    return;
1462
1463  if (gimple_call_fndecl (stmt) != NULL_TREE)
1464    return;
1465
1466  if (gimple_call_internal_p (stmt))
1467    return;
1468
1469  histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_INDIR_CALL);
1470  if (!histogram)
1471    return;
1472
1473  count = 0;
1474  all = histogram->hvalue.counters[0];
1475
1476  for (unsigned j = 0; j < GCOV_TOPN_MAXIMUM_TRACKED_VALUES; j++)
1477    {
1478      if (!get_nth_most_common_value (NULL, "indirect call", histogram, &val,
1479				      &count, &all, j))
1480	return;
1481      if (!count)
1482	continue;
1483
1484      direct_call = find_func_by_profile_id ((int) val);
1485
1486      if (direct_call == NULL)
1487	dump_printf_loc (
1488	  MSG_MISSED_OPTIMIZATION, stmt,
1489	  "Indirect call -> direct call from other "
1490	  "module %T=> %i (will resolve by ipa-profile only with LTO)\n",
1491	  gimple_call_fn (stmt), (int) val);
1492      else
1493	dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
1494			 "Indirect call -> direct call "
1495			 "%T => %T (will resolve by ipa-profile)\n",
1496			 gimple_call_fn (stmt), direct_call->decl);
1497      dump_printf_loc (MSG_NOTE, stmt,
1498		       "hist->count %" PRId64 " hist->all %" PRId64 "\n",
1499		       count, all);
1500    }
1501}
1502
1503/* Return true if the stringop CALL shall be profiled.  SIZE_ARG be
1504   set to the argument index for the size of the string operation.  */
1505
1506static bool
1507interesting_stringop_to_profile_p (gcall *call, int *size_arg)
1508{
1509  enum built_in_function fcode;
1510
1511  fcode = DECL_FUNCTION_CODE (gimple_call_fndecl (call));
1512  switch (fcode)
1513    {
1514     case BUILT_IN_MEMCPY:
1515     case BUILT_IN_MEMPCPY:
1516     case BUILT_IN_MEMMOVE:
1517       *size_arg = 2;
1518       return validate_gimple_arglist (call, POINTER_TYPE, POINTER_TYPE,
1519				       INTEGER_TYPE, VOID_TYPE);
1520     case BUILT_IN_MEMSET:
1521       *size_arg = 2;
1522       return validate_gimple_arglist (call, POINTER_TYPE, INTEGER_TYPE,
1523				      INTEGER_TYPE, VOID_TYPE);
1524     case BUILT_IN_BZERO:
1525       *size_arg = 1;
1526       return validate_gimple_arglist (call, POINTER_TYPE, INTEGER_TYPE,
1527				       VOID_TYPE);
1528     default:
1529       return false;
1530    }
1531}
1532
1533/* Convert stringop (..., vcall_size)
1534   into
1535   if (vcall_size == icall_size)
1536     stringop (..., icall_size);
1537   else
1538     stringop (..., vcall_size);
1539   assuming we'll propagate a true constant into ICALL_SIZE later.  */
1540
1541static void
1542gimple_stringop_fixed_value (gcall *vcall_stmt, tree icall_size, profile_probability prob,
1543			     gcov_type count, gcov_type all)
1544{
1545  gassign *tmp_stmt;
1546  gcond *cond_stmt;
1547  gcall *icall_stmt;
1548  tree tmp0, tmp1, vcall_size, optype;
1549  basic_block cond_bb, icall_bb, vcall_bb, join_bb;
1550  edge e_ci, e_cv, e_iv, e_ij, e_vj;
1551  gimple_stmt_iterator gsi;
1552  int size_arg;
1553
1554  if (!interesting_stringop_to_profile_p (vcall_stmt, &size_arg))
1555    gcc_unreachable ();
1556
1557  cond_bb = gimple_bb (vcall_stmt);
1558  gsi = gsi_for_stmt (vcall_stmt);
1559
1560  vcall_size = gimple_call_arg (vcall_stmt, size_arg);
1561  optype = TREE_TYPE (vcall_size);
1562
1563  tmp0 = make_temp_ssa_name (optype, NULL, "PROF");
1564  tmp1 = make_temp_ssa_name (optype, NULL, "PROF");
1565  tmp_stmt = gimple_build_assign (tmp0, fold_convert (optype, icall_size));
1566  gsi_insert_before (&gsi, tmp_stmt, GSI_SAME_STMT);
1567
1568  tmp_stmt = gimple_build_assign (tmp1, vcall_size);
1569  gsi_insert_before (&gsi, tmp_stmt, GSI_SAME_STMT);
1570
1571  cond_stmt = gimple_build_cond (EQ_EXPR, tmp1, tmp0, NULL_TREE, NULL_TREE);
1572  gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
1573
1574  if (TREE_CODE (gimple_vdef (vcall_stmt)) == SSA_NAME)
1575    {
1576      unlink_stmt_vdef (vcall_stmt);
1577      release_ssa_name (gimple_vdef (vcall_stmt));
1578    }
1579  gimple_set_vdef (vcall_stmt, NULL);
1580  gimple_set_vuse (vcall_stmt, NULL);
1581  update_stmt (vcall_stmt);
1582  icall_stmt = as_a <gcall *> (gimple_copy (vcall_stmt));
1583  gimple_call_set_arg (icall_stmt, size_arg,
1584		       fold_convert (optype, icall_size));
1585  gsi_insert_before (&gsi, icall_stmt, GSI_SAME_STMT);
1586
1587  /* Fix CFG. */
1588  /* Edge e_ci connects cond_bb to icall_bb, etc. */
1589  e_ci = split_block (cond_bb, cond_stmt);
1590  icall_bb = e_ci->dest;
1591  icall_bb->count = profile_count::from_gcov_type (count);
1592
1593  e_iv = split_block (icall_bb, icall_stmt);
1594  vcall_bb = e_iv->dest;
1595  vcall_bb->count = profile_count::from_gcov_type (all - count);
1596
1597  e_vj = split_block (vcall_bb, vcall_stmt);
1598  join_bb = e_vj->dest;
1599  join_bb->count = profile_count::from_gcov_type (all);
1600
1601  e_ci->flags = (e_ci->flags & ~EDGE_FALLTHRU) | EDGE_TRUE_VALUE;
1602  e_ci->probability = prob;
1603
1604  e_cv = make_edge (cond_bb, vcall_bb, EDGE_FALSE_VALUE);
1605  e_cv->probability = prob.invert ();
1606
1607  remove_edge (e_iv);
1608
1609  e_ij = make_edge (icall_bb, join_bb, EDGE_FALLTHRU);
1610  e_ij->probability = profile_probability::always ();
1611
1612  e_vj->probability = profile_probability::always ();
1613
1614  /* Insert PHI node for the call result if necessary.  */
1615  if (gimple_call_lhs (vcall_stmt)
1616      && TREE_CODE (gimple_call_lhs (vcall_stmt)) == SSA_NAME)
1617    {
1618      tree result = gimple_call_lhs (vcall_stmt);
1619      gphi *phi = create_phi_node (result, join_bb);
1620      gimple_call_set_lhs (vcall_stmt,
1621			   duplicate_ssa_name (result, vcall_stmt));
1622      add_phi_arg (phi, gimple_call_lhs (vcall_stmt), e_vj, UNKNOWN_LOCATION);
1623      gimple_call_set_lhs (icall_stmt,
1624			   duplicate_ssa_name (result, icall_stmt));
1625      add_phi_arg (phi, gimple_call_lhs (icall_stmt), e_ij, UNKNOWN_LOCATION);
1626    }
1627
1628  /* Because these are all string op builtins, they're all nothrow.  */
1629  gcc_assert (!stmt_could_throw_p (cfun, vcall_stmt));
1630  gcc_assert (!stmt_could_throw_p (cfun, icall_stmt));
1631}
1632
1633/* Find values inside STMT for that we want to measure histograms for
1634   division/modulo optimization.  */
1635
1636static bool
1637gimple_stringops_transform (gimple_stmt_iterator *gsi)
1638{
1639  gcall *stmt;
1640  tree blck_size;
1641  enum built_in_function fcode;
1642  histogram_value histogram;
1643  gcov_type count, all, val;
1644  tree dest, src;
1645  unsigned int dest_align, src_align;
1646  profile_probability prob;
1647  tree tree_val;
1648  int size_arg;
1649
1650  stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
1651  if (!stmt)
1652    return false;
1653
1654  if (!gimple_call_builtin_p (gsi_stmt (*gsi), BUILT_IN_NORMAL))
1655    return false;
1656
1657  if (!interesting_stringop_to_profile_p (stmt, &size_arg))
1658    return false;
1659
1660  blck_size = gimple_call_arg (stmt, size_arg);
1661  if (TREE_CODE (blck_size) == INTEGER_CST)
1662    return false;
1663
1664  histogram = gimple_histogram_value_of_type (cfun, stmt,
1665					      HIST_TYPE_TOPN_VALUES);
1666  if (!histogram)
1667    return false;
1668
1669  if (!get_nth_most_common_value (stmt, "stringops", histogram, &val, &count,
1670				  &all))
1671    return false;
1672
1673  gimple_remove_histogram_value (cfun, stmt, histogram);
1674
1675  /* We require that count is at least half of all.  */
1676  if (2 * count < all || optimize_bb_for_size_p (gimple_bb (stmt)))
1677    return false;
1678  if (check_counter (stmt, "value", &count, &all, gimple_bb (stmt)->count))
1679    return false;
1680  if (all > 0)
1681    prob = profile_probability::probability_in_gcov_type (count, all);
1682  else
1683    prob = profile_probability::never ();
1684
1685  dest = gimple_call_arg (stmt, 0);
1686  dest_align = get_pointer_alignment (dest);
1687  fcode = DECL_FUNCTION_CODE (gimple_call_fndecl (stmt));
1688  switch (fcode)
1689    {
1690    case BUILT_IN_MEMCPY:
1691    case BUILT_IN_MEMPCPY:
1692    case BUILT_IN_MEMMOVE:
1693      src = gimple_call_arg (stmt, 1);
1694      src_align = get_pointer_alignment (src);
1695      if (!can_move_by_pieces (val, MIN (dest_align, src_align)))
1696	return false;
1697      break;
1698    case BUILT_IN_MEMSET:
1699      if (!can_store_by_pieces (val, builtin_memset_read_str,
1700				gimple_call_arg (stmt, 1),
1701				dest_align, true))
1702	return false;
1703      break;
1704    case BUILT_IN_BZERO:
1705      if (!can_store_by_pieces (val, builtin_memset_read_str,
1706				integer_zero_node,
1707				dest_align, true))
1708	return false;
1709      break;
1710    default:
1711      gcc_unreachable ();
1712    }
1713
1714  if (sizeof (gcov_type) == sizeof (HOST_WIDE_INT))
1715    tree_val = build_int_cst (get_gcov_type (), val);
1716  else
1717    {
1718      HOST_WIDE_INT a[2];
1719      a[0] = (unsigned HOST_WIDE_INT) val;
1720      a[1] = val >> (HOST_BITS_PER_WIDE_INT - 1) >> 1;
1721
1722      tree_val = wide_int_to_tree (get_gcov_type (), wide_int::from_array (a, 2,
1723	TYPE_PRECISION (get_gcov_type ()), false));
1724    }
1725
1726  if (dump_enabled_p ())
1727    dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
1728		     "Transformation done: single value %i stringop for %s\n",
1729		     (int)val, built_in_names[(int)fcode]);
1730
1731  gimple_stringop_fixed_value (stmt, tree_val, prob, count, all);
1732
1733  return true;
1734}
1735
1736void
1737stringop_block_profile (gimple *stmt, unsigned int *expected_align,
1738			HOST_WIDE_INT *expected_size)
1739{
1740  histogram_value histogram;
1741  histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_AVERAGE);
1742
1743  if (!histogram)
1744    *expected_size = -1;
1745  else if (!histogram->hvalue.counters[1])
1746    {
1747      *expected_size = -1;
1748      gimple_remove_histogram_value (cfun, stmt, histogram);
1749    }
1750  else
1751    {
1752      gcov_type size;
1753      size = ((histogram->hvalue.counters[0]
1754	      + histogram->hvalue.counters[1] / 2)
1755	       / histogram->hvalue.counters[1]);
1756      /* Even if we can hold bigger value in SIZE, INT_MAX
1757	 is safe "infinity" for code generation strategies.  */
1758      if (size > INT_MAX)
1759	size = INT_MAX;
1760      *expected_size = size;
1761      gimple_remove_histogram_value (cfun, stmt, histogram);
1762    }
1763
1764  histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_IOR);
1765
1766  if (!histogram)
1767    *expected_align = 0;
1768  else if (!histogram->hvalue.counters[0])
1769    {
1770      gimple_remove_histogram_value (cfun, stmt, histogram);
1771      *expected_align = 0;
1772    }
1773  else
1774    {
1775      gcov_type count;
1776      unsigned int alignment;
1777
1778      count = histogram->hvalue.counters[0];
1779      alignment = 1;
1780      while (!(count & alignment)
1781	     && (alignment <= UINT_MAX / 2 / BITS_PER_UNIT))
1782	alignment <<= 1;
1783      *expected_align = alignment * BITS_PER_UNIT;
1784      gimple_remove_histogram_value (cfun, stmt, histogram);
1785    }
1786}
1787
1788
1789/* Find values inside STMT for that we want to measure histograms for
1790   division/modulo optimization.  */
1791
1792static void
1793gimple_divmod_values_to_profile (gimple *stmt, histogram_values *values)
1794{
1795  tree lhs, divisor, op0, type;
1796  histogram_value hist;
1797
1798  if (gimple_code (stmt) != GIMPLE_ASSIGN)
1799    return;
1800
1801  lhs = gimple_assign_lhs (stmt);
1802  type = TREE_TYPE (lhs);
1803  if (!INTEGRAL_TYPE_P (type))
1804    return;
1805
1806  switch (gimple_assign_rhs_code (stmt))
1807    {
1808    case TRUNC_DIV_EXPR:
1809    case TRUNC_MOD_EXPR:
1810      divisor = gimple_assign_rhs2 (stmt);
1811      op0 = gimple_assign_rhs1 (stmt);
1812
1813      if (TREE_CODE (divisor) == SSA_NAME)
1814	/* Check for the case where the divisor is the same value most
1815	   of the time.  */
1816	values->safe_push (gimple_alloc_histogram_value (cfun,
1817							 HIST_TYPE_TOPN_VALUES,
1818							 stmt, divisor));
1819
1820      /* For mod, check whether it is not often a noop (or replaceable by
1821	 a few subtractions).  */
1822      if (gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR
1823	  && TYPE_UNSIGNED (type)
1824	  && TREE_CODE (divisor) == SSA_NAME)
1825	{
1826          tree val;
1827          /* Check for a special case where the divisor is power of 2.  */
1828	  values->safe_push (gimple_alloc_histogram_value (cfun,
1829							   HIST_TYPE_POW2,
1830							   stmt, divisor));
1831	  val = build2 (TRUNC_DIV_EXPR, type, op0, divisor);
1832	  hist = gimple_alloc_histogram_value (cfun, HIST_TYPE_INTERVAL,
1833					       stmt, val);
1834	  hist->hdata.intvl.int_start = 0;
1835	  hist->hdata.intvl.steps = 2;
1836	  values->safe_push (hist);
1837	}
1838      return;
1839
1840    default:
1841      return;
1842    }
1843}
1844
1845/* Find calls inside STMT for that we want to measure histograms for
1846   indirect/virtual call optimization. */
1847
1848static void
1849gimple_indirect_call_to_profile (gimple *stmt, histogram_values *values)
1850{
1851  tree callee;
1852
1853  if (gimple_code (stmt) != GIMPLE_CALL
1854      || gimple_call_internal_p (stmt)
1855      || gimple_call_fndecl (stmt) != NULL_TREE)
1856    return;
1857
1858  callee = gimple_call_fn (stmt);
1859  histogram_value v = gimple_alloc_histogram_value (cfun, HIST_TYPE_INDIR_CALL,
1860						    stmt, callee);
1861  values->safe_push (v);
1862
1863  return;
1864}
1865
1866/* Find values inside STMT for that we want to measure histograms for
1867   string operations.  */
1868
1869static void
1870gimple_stringops_values_to_profile (gimple *gs, histogram_values *values)
1871{
1872  gcall *stmt;
1873  tree blck_size;
1874  tree dest;
1875  int size_arg;
1876
1877  stmt = dyn_cast <gcall *> (gs);
1878  if (!stmt)
1879    return;
1880
1881  if (!gimple_call_builtin_p (gs, BUILT_IN_NORMAL))
1882    return;
1883
1884  if (!interesting_stringop_to_profile_p (stmt, &size_arg))
1885    return;
1886
1887  dest = gimple_call_arg (stmt, 0);
1888  blck_size = gimple_call_arg (stmt, size_arg);
1889
1890  if (TREE_CODE (blck_size) != INTEGER_CST)
1891    {
1892      values->safe_push (gimple_alloc_histogram_value (cfun,
1893						       HIST_TYPE_TOPN_VALUES,
1894						       stmt, blck_size));
1895      values->safe_push (gimple_alloc_histogram_value (cfun, HIST_TYPE_AVERAGE,
1896						       stmt, blck_size));
1897    }
1898
1899  if (TREE_CODE (blck_size) != INTEGER_CST)
1900    values->safe_push (gimple_alloc_histogram_value (cfun, HIST_TYPE_IOR,
1901						     stmt, dest));
1902}
1903
1904/* Find values inside STMT for that we want to measure histograms and adds
1905   them to list VALUES.  */
1906
1907static void
1908gimple_values_to_profile (gimple *stmt, histogram_values *values)
1909{
1910  gimple_divmod_values_to_profile (stmt, values);
1911  gimple_stringops_values_to_profile (stmt, values);
1912  gimple_indirect_call_to_profile (stmt, values);
1913}
1914
1915void
1916gimple_find_values_to_profile (histogram_values *values)
1917{
1918  basic_block bb;
1919  gimple_stmt_iterator gsi;
1920  unsigned i;
1921  histogram_value hist = NULL;
1922  values->create (0);
1923
1924  FOR_EACH_BB_FN (bb, cfun)
1925    for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1926      gimple_values_to_profile (gsi_stmt (gsi), values);
1927
1928  values->safe_push (gimple_alloc_histogram_value (cfun,
1929						   HIST_TYPE_TIME_PROFILE));
1930
1931  FOR_EACH_VEC_ELT (*values, i, hist)
1932    {
1933      switch (hist->type)
1934        {
1935	case HIST_TYPE_INTERVAL:
1936	  hist->n_counters = hist->hdata.intvl.steps + 2;
1937	  break;
1938
1939	case HIST_TYPE_POW2:
1940	  hist->n_counters = 2;
1941	  break;
1942
1943	case HIST_TYPE_TOPN_VALUES:
1944	case HIST_TYPE_INDIR_CALL:
1945	  hist->n_counters = GCOV_TOPN_MEM_COUNTERS;
1946	  break;
1947
1948        case HIST_TYPE_TIME_PROFILE:
1949          hist->n_counters = 1;
1950          break;
1951
1952	case HIST_TYPE_AVERAGE:
1953	  hist->n_counters = 2;
1954	  break;
1955
1956	case HIST_TYPE_IOR:
1957	  hist->n_counters = 1;
1958	  break;
1959
1960	default:
1961	  gcc_unreachable ();
1962	}
1963      if (dump_file && hist->hvalue.stmt != NULL)
1964        {
1965	  fprintf (dump_file, "Stmt ");
1966          print_gimple_stmt (dump_file, hist->hvalue.stmt, 0, TDF_SLIM);
1967	  dump_histogram_value (dump_file, hist);
1968        }
1969    }
1970}
1971