1/* Code sinking for trees
2   Copyright (C) 2001, 2002, 2003, 2004, 2007, 2008, 2009
3   Free Software Foundation, Inc.
4   Contributed by Daniel Berlin <dan@dberlin.org>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 3, or (at your option)
11any later version.
12
13GCC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3.  If not see
20<http://www.gnu.org/licenses/>.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "ggc.h"
27#include "tree.h"
28#include "basic-block.h"
29#include "diagnostic.h"
30#include "tree-inline.h"
31#include "tree-flow.h"
32#include "gimple.h"
33#include "tree-dump.h"
34#include "timevar.h"
35#include "fibheap.h"
36#include "hashtab.h"
37#include "tree-iterator.h"
38#include "real.h"
39#include "alloc-pool.h"
40#include "tree-pass.h"
41#include "flags.h"
42#include "bitmap.h"
43#include "langhooks.h"
44#include "cfgloop.h"
45
46/* TODO:
47   1. Sinking store only using scalar promotion (IE without moving the RHS):
48
49   *q = p;
50   p = p + 1;
51   if (something)
52     *q = <not p>;
53   else
54     y = *q;
55
56
57   should become
58   sinktemp = p;
59   p = p + 1;
60   if (something)
61     *q = <not p>;
62   else
63   {
64     *q = sinktemp;
65     y = *q
66   }
67   Store copy propagation will take care of the store elimination above.
68
69
70   2. Sinking using Partial Dead Code Elimination.  */
71
72
73static struct
74{
75  /* The number of statements sunk down the flowgraph by code sinking.  */
76  int sunk;
77
78} sink_stats;
79
80
81/* Given a PHI, and one of its arguments (DEF), find the edge for
82   that argument and return it.  If the argument occurs twice in the PHI node,
83   we return NULL.  */
84
85static basic_block
86find_bb_for_arg (gimple phi, tree def)
87{
88  size_t i;
89  bool foundone = false;
90  basic_block result = NULL;
91  for (i = 0; i < gimple_phi_num_args (phi); i++)
92    if (PHI_ARG_DEF (phi, i) == def)
93      {
94	if (foundone)
95	  return NULL;
96	foundone = true;
97	result = gimple_phi_arg_edge (phi, i)->src;
98      }
99  return result;
100}
101
102/* When the first immediate use is in a statement, then return true if all
103   immediate uses in IMM are in the same statement.
104   We could also do the case where  the first immediate use is in a phi node,
105   and all the other uses are in phis in the same basic block, but this
106   requires some expensive checking later (you have to make sure no def/vdef
107   in the statement occurs for multiple edges in the various phi nodes it's
108   used in, so that you only have one place you can sink it to.  */
109
110static bool
111all_immediate_uses_same_place (gimple stmt)
112{
113  gimple firstuse = NULL;
114  ssa_op_iter op_iter;
115  imm_use_iterator imm_iter;
116  use_operand_p use_p;
117  tree var;
118
119  FOR_EACH_SSA_TREE_OPERAND (var, stmt, op_iter, SSA_OP_ALL_DEFS)
120    {
121      FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
122        {
123	  if (is_gimple_debug (USE_STMT (use_p)))
124	    continue;
125	  if (firstuse == NULL)
126	    firstuse = USE_STMT (use_p);
127	  else
128	    if (firstuse != USE_STMT (use_p))
129	      return false;
130	}
131    }
132
133  return true;
134}
135
136/* Some global stores don't necessarily have VDEF's of global variables,
137   but we still must avoid moving them around.  */
138
139bool
140is_hidden_global_store (gimple stmt)
141{
142  /* Check virtual definitions.  If we get here, the only virtual
143     definitions we should see are those generated by assignment or call
144     statements.  */
145  if (gimple_vdef (stmt))
146    {
147      tree lhs;
148
149      gcc_assert (is_gimple_assign (stmt) || is_gimple_call (stmt));
150
151      /* Note that we must not check the individual virtual operands
152	 here.  In particular, if this is an aliased store, we could
153	 end up with something like the following (SSA notation
154	 redacted for brevity):
155
156	 	foo (int *p, int i)
157		{
158		  int x;
159		  p_1 = (i_2 > 3) ? &x : p;
160
161		  # x_4 = VDEF <x_3>
162		  *p_1 = 5;
163
164		  return 2;
165		}
166
167	 Notice that the store to '*p_1' should be preserved, if we
168	 were to check the virtual definitions in that store, we would
169	 not mark it needed.  This is because 'x' is not a global
170	 variable.
171
172	 Therefore, we check the base address of the LHS.  If the
173	 address is a pointer, we check if its name tag or symbol tag is
174	 a global variable.  Otherwise, we check if the base variable
175	 is a global.  */
176      lhs = gimple_get_lhs (stmt);
177
178      if (REFERENCE_CLASS_P (lhs))
179	lhs = get_base_address (lhs);
180
181      if (lhs == NULL_TREE)
182	{
183	  /* If LHS is NULL, it means that we couldn't get the base
184	     address of the reference.  In which case, we should not
185	     move this store.  */
186	  return true;
187	}
188      else if (DECL_P (lhs))
189	{
190	  /* If the store is to a global symbol, we need to keep it.  */
191	  if (is_global_var (lhs))
192	    return true;
193
194	}
195      else if (INDIRECT_REF_P (lhs))
196	return ptr_deref_may_alias_global_p (TREE_OPERAND (lhs, 0));
197      else
198	gcc_unreachable ();
199    }
200
201  return false;
202}
203
204/* Find the nearest common dominator of all of the immediate uses in IMM.  */
205
206static basic_block
207nearest_common_dominator_of_uses (gimple stmt, bool *debug_stmts)
208{
209  bitmap blocks = BITMAP_ALLOC (NULL);
210  basic_block commondom;
211  unsigned int j;
212  bitmap_iterator bi;
213  ssa_op_iter op_iter;
214  imm_use_iterator imm_iter;
215  use_operand_p use_p;
216  tree var;
217
218  bitmap_clear (blocks);
219  FOR_EACH_SSA_TREE_OPERAND (var, stmt, op_iter, SSA_OP_ALL_DEFS)
220    {
221      FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
222        {
223	  gimple usestmt = USE_STMT (use_p);
224	  basic_block useblock;
225
226	  if (gimple_code (usestmt) == GIMPLE_PHI)
227	    {
228	      int idx = PHI_ARG_INDEX_FROM_USE (use_p);
229
230	      useblock = gimple_phi_arg_edge (usestmt, idx)->src;
231	    }
232	  else if (is_gimple_debug (usestmt))
233	    {
234	      *debug_stmts = true;
235	      continue;
236	    }
237	  else
238	    {
239	      useblock = gimple_bb (usestmt);
240	    }
241
242	  /* Short circuit. Nothing dominates the entry block.  */
243	  if (useblock == ENTRY_BLOCK_PTR)
244	    {
245	      BITMAP_FREE (blocks);
246	      return NULL;
247	    }
248	  bitmap_set_bit (blocks, useblock->index);
249	}
250    }
251  commondom = BASIC_BLOCK (bitmap_first_set_bit (blocks));
252  EXECUTE_IF_SET_IN_BITMAP (blocks, 0, j, bi)
253    commondom = nearest_common_dominator (CDI_DOMINATORS, commondom,
254					  BASIC_BLOCK (j));
255  BITMAP_FREE (blocks);
256  return commondom;
257}
258
259/* Given a statement (STMT) and the basic block it is currently in (FROMBB),
260   determine the location to sink the statement to, if any.
261   Returns true if there is such location; in that case, TOGSI points to the
262   statement before that STMT should be moved.  */
263
264static bool
265statement_sink_location (gimple stmt, basic_block frombb,
266			 gimple_stmt_iterator *togsi)
267{
268  gimple use;
269  tree def;
270  use_operand_p one_use = NULL_USE_OPERAND_P;
271  basic_block sinkbb;
272  use_operand_p use_p;
273  def_operand_p def_p;
274  ssa_op_iter iter;
275  imm_use_iterator imm_iter;
276
277  FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
278    {
279      FOR_EACH_IMM_USE_FAST (one_use, imm_iter, def)
280	{
281	  if (is_gimple_debug (USE_STMT (one_use)))
282	    continue;
283
284	  break;
285	}
286      if (one_use != NULL_USE_OPERAND_P)
287        break;
288    }
289
290  /* Return if there are no immediate uses of this stmt.  */
291  if (one_use == NULL_USE_OPERAND_P)
292    return false;
293
294  if (gimple_code (stmt) != GIMPLE_ASSIGN)
295    return false;
296
297  /* There are a few classes of things we can't or don't move, some because we
298     don't have code to handle it, some because it's not profitable and some
299     because it's not legal.
300
301     We can't sink things that may be global stores, at least not without
302     calculating a lot more information, because we may cause it to no longer
303     be seen by an external routine that needs it depending on where it gets
304     moved to.
305
306     We don't want to sink loads from memory.
307
308     We can't sink statements that end basic blocks without splitting the
309     incoming edge for the sink location to place it there.
310
311     We can't sink statements that have volatile operands.
312
313     We don't want to sink dead code, so anything with 0 immediate uses is not
314     sunk.
315
316     Don't sink BLKmode assignments if current function has any local explicit
317     register variables, as BLKmode assignments may involve memcpy or memset
318     calls or, on some targets, inline expansion thereof that sometimes need
319     to use specific hard registers.
320
321  */
322  if (stmt_ends_bb_p (stmt)
323      || gimple_has_side_effects (stmt)
324      || is_hidden_global_store (stmt)
325      || gimple_has_volatile_ops (stmt)
326      || gimple_vuse (stmt)
327      || (cfun->has_local_explicit_reg_vars
328	  && TYPE_MODE (TREE_TYPE (gimple_assign_lhs (stmt))) == BLKmode))
329    return false;
330
331  FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_ALL_DEFS)
332    {
333      tree def = DEF_FROM_PTR (def_p);
334      if (is_global_var (SSA_NAME_VAR (def))
335	  || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
336	return false;
337    }
338
339  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES)
340    {
341      tree use = USE_FROM_PTR (use_p);
342      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use))
343	return false;
344    }
345
346  /* If all the immediate uses are not in the same place, find the nearest
347     common dominator of all the immediate uses.  For PHI nodes, we have to
348     find the nearest common dominator of all of the predecessor blocks, since
349     that is where insertion would have to take place.  */
350  if (!all_immediate_uses_same_place (stmt))
351    {
352      bool debug_stmts = false;
353      basic_block commondom = nearest_common_dominator_of_uses (stmt,
354								&debug_stmts);
355
356      if (commondom == frombb)
357	return false;
358
359      /* Our common dominator has to be dominated by frombb in order to be a
360	 trivially safe place to put this statement, since it has multiple
361	 uses.  */
362      if (!dominated_by_p (CDI_DOMINATORS, commondom, frombb))
363	return false;
364
365      /* It doesn't make sense to move to a dominator that post-dominates
366	 frombb, because it means we've just moved it into a path that always
367	 executes if frombb executes, instead of reducing the number of
368	 executions .  */
369      if (dominated_by_p (CDI_POST_DOMINATORS, frombb, commondom))
370	{
371	  if (dump_file && (dump_flags & TDF_DETAILS))
372	    fprintf (dump_file, "Not moving store, common dominator post-dominates from block.\n");
373	  return false;
374	}
375
376      if (commondom == frombb || commondom->loop_depth > frombb->loop_depth)
377	return false;
378      if (dump_file && (dump_flags & TDF_DETAILS))
379	{
380	  fprintf (dump_file, "Common dominator of all uses is %d\n",
381		   commondom->index);
382	}
383
384      *togsi = gsi_after_labels (commondom);
385
386      return true;
387    }
388
389  use = USE_STMT (one_use);
390  if (gimple_code (use) != GIMPLE_PHI)
391    {
392      sinkbb = gimple_bb (use);
393      if (sinkbb == frombb || sinkbb->loop_depth > frombb->loop_depth
394	  || sinkbb->loop_father != frombb->loop_father)
395	return false;
396
397      /* Move the expression to a post dominator can't reduce the number of
398         executions.  */
399      if (dominated_by_p (CDI_POST_DOMINATORS, frombb, sinkbb))
400        return false;
401
402      *togsi = gsi_for_stmt (use);
403
404      return true;
405    }
406
407  /* Note that at this point, all uses must be in the same statement, so it
408     doesn't matter which def op we choose, pick the first one.  */
409  FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
410    break;
411
412  sinkbb = find_bb_for_arg (use, def);
413  if (!sinkbb)
414    return false;
415
416  /* This will happen when you have
417     a_3 = PHI <a_13, a_26>
418
419     a_26 = VDEF <a_3>
420
421     If the use is a phi, and is in the same bb as the def,
422     we can't sink it.  */
423
424  if (gimple_bb (use) == frombb)
425    return false;
426  if (sinkbb == frombb || sinkbb->loop_depth > frombb->loop_depth
427      || sinkbb->loop_father != frombb->loop_father)
428    return false;
429
430  /* Move the expression to a post dominator can't reduce the number of
431     executions.  */
432  if (dominated_by_p (CDI_POST_DOMINATORS, frombb, sinkbb))
433    return false;
434
435  *togsi = gsi_after_labels (sinkbb);
436
437  return true;
438}
439
440/* Perform code sinking on BB */
441
442static void
443sink_code_in_bb (basic_block bb)
444{
445  basic_block son;
446  gimple_stmt_iterator gsi;
447  edge_iterator ei;
448  edge e;
449  bool last = true;
450
451  /* If this block doesn't dominate anything, there can't be any place to sink
452     the statements to.  */
453  if (first_dom_son (CDI_DOMINATORS, bb) == NULL)
454    goto earlyout;
455
456  /* We can't move things across abnormal edges, so don't try.  */
457  FOR_EACH_EDGE (e, ei, bb->succs)
458    if (e->flags & EDGE_ABNORMAL)
459      goto earlyout;
460
461  for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
462    {
463      gimple stmt = gsi_stmt (gsi);
464      gimple_stmt_iterator togsi;
465
466      if (!statement_sink_location (stmt, bb, &togsi))
467	{
468	  if (!gsi_end_p (gsi))
469	    gsi_prev (&gsi);
470	  last = false;
471	  continue;
472	}
473      if (dump_file)
474	{
475	  fprintf (dump_file, "Sinking ");
476	  print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS);
477	  fprintf (dump_file, " from bb %d to bb %d\n",
478		   bb->index, (gsi_bb (togsi))->index);
479	}
480
481      /* If this is the end of the basic block, we need to insert at the end
482         of the basic block.  */
483      if (gsi_end_p (togsi))
484	gsi_move_to_bb_end (&gsi, gsi_bb (togsi));
485      else
486	gsi_move_before (&gsi, &togsi);
487
488      sink_stats.sunk++;
489
490      /* If we've just removed the last statement of the BB, the
491	 gsi_end_p() test below would fail, but gsi_prev() would have
492	 succeeded, and we want it to succeed.  So we keep track of
493	 whether we're at the last statement and pick up the new last
494	 statement.  */
495      if (last)
496	{
497	  gsi = gsi_last_bb (bb);
498	  continue;
499	}
500
501      last = false;
502      if (!gsi_end_p (gsi))
503	gsi_prev (&gsi);
504
505    }
506 earlyout:
507  for (son = first_dom_son (CDI_POST_DOMINATORS, bb);
508       son;
509       son = next_dom_son (CDI_POST_DOMINATORS, son))
510    {
511      sink_code_in_bb (son);
512    }
513}
514
515/* Perform code sinking.
516   This moves code down the flowgraph when we know it would be
517   profitable to do so, or it wouldn't increase the number of
518   executions of the statement.
519
520   IE given
521
522   a_1 = b + c;
523   if (<something>)
524   {
525   }
526   else
527   {
528     foo (&b, &c);
529     a_5 = b + c;
530   }
531   a_6 = PHI (a_5, a_1);
532   USE a_6.
533
534   we'll transform this into:
535
536   if (<something>)
537   {
538      a_1 = b + c;
539   }
540   else
541   {
542      foo (&b, &c);
543      a_5 = b + c;
544   }
545   a_6 = PHI (a_5, a_1);
546   USE a_6.
547
548   Note that this reduces the number of computations of a = b + c to 1
549   when we take the else edge, instead of 2.
550*/
551static void
552execute_sink_code (void)
553{
554  loop_optimizer_init (LOOPS_NORMAL);
555
556  connect_infinite_loops_to_exit ();
557  memset (&sink_stats, 0, sizeof (sink_stats));
558  calculate_dominance_info (CDI_DOMINATORS);
559  calculate_dominance_info (CDI_POST_DOMINATORS);
560  sink_code_in_bb (EXIT_BLOCK_PTR);
561  statistics_counter_event (cfun, "Sunk statements", sink_stats.sunk);
562  free_dominance_info (CDI_POST_DOMINATORS);
563  remove_fake_exit_edges ();
564  loop_optimizer_finalize ();
565}
566
567/* Gate and execute functions for PRE.  */
568
569static unsigned int
570do_sink (void)
571{
572  execute_sink_code ();
573  return 0;
574}
575
576static bool
577gate_sink (void)
578{
579  return flag_tree_sink != 0;
580}
581
582struct gimple_opt_pass pass_sink_code =
583{
584 {
585  GIMPLE_PASS,
586  "sink",				/* name */
587  gate_sink,				/* gate */
588  do_sink,				/* execute */
589  NULL,					/* sub */
590  NULL,					/* next */
591  0,					/* static_pass_number */
592  TV_TREE_SINK,				/* tv_id */
593  PROP_no_crit_edges | PROP_cfg
594    | PROP_ssa,				/* properties_required */
595  0,					/* properties_provided */
596  0,					/* properties_destroyed */
597  0,					/* todo_flags_start */
598  TODO_update_ssa
599    | TODO_dump_func
600    | TODO_ggc_collect
601    | TODO_verify_ssa			/* todo_flags_finish */
602 }
603};
604