1/* Generic SSA value propagation engine.
2   Copyright (C) 2004, 2005, 2006 Free Software Foundation, Inc.
3   Contributed by Diego Novillo <dnovillo@redhat.com>
4
5   This file is part of GCC.
6
7   GCC is free software; you can redistribute it and/or modify it
8   under the terms of the GNU General Public License as published by the
9   Free Software Foundation; either version 2, or (at your option) any
10   later version.
11
12   GCC is distributed in the hope that it will be useful, but WITHOUT
13   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14   FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15   for more details.
16
17   You should have received a copy of the GNU General Public License
18   along with GCC; see the file COPYING.  If not, write to the Free
19   Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20   02110-1301, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "flags.h"
28#include "rtl.h"
29#include "tm_p.h"
30#include "ggc.h"
31#include "basic-block.h"
32#include "output.h"
33#include "expr.h"
34#include "function.h"
35#include "diagnostic.h"
36#include "timevar.h"
37#include "tree-dump.h"
38#include "tree-flow.h"
39#include "tree-pass.h"
40#include "tree-ssa-propagate.h"
41#include "langhooks.h"
42#include "varray.h"
43#include "vec.h"
44
45/* This file implements a generic value propagation engine based on
46   the same propagation used by the SSA-CCP algorithm [1].
47
48   Propagation is performed by simulating the execution of every
49   statement that produces the value being propagated.  Simulation
50   proceeds as follows:
51
52   1- Initially, all edges of the CFG are marked not executable and
53      the CFG worklist is seeded with all the statements in the entry
54      basic block (block 0).
55
56   2- Every statement S is simulated with a call to the call-back
57      function SSA_PROP_VISIT_STMT.  This evaluation may produce 3
58      results:
59
60      	SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
61	    interest and does not affect any of the work lists.
62
63	SSA_PROP_VARYING: The value produced by S cannot be determined
64	    at compile time.  Further simulation of S is not required.
65	    If S is a conditional jump, all the outgoing edges for the
66	    block are considered executable and added to the work
67	    list.
68
69	SSA_PROP_INTERESTING: S produces a value that can be computed
70	    at compile time.  Its result can be propagated into the
71	    statements that feed from S.  Furthermore, if S is a
72	    conditional jump, only the edge known to be taken is added
73	    to the work list.  Edges that are known not to execute are
74	    never simulated.
75
76   3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI.  The
77      return value from SSA_PROP_VISIT_PHI has the same semantics as
78      described in #2.
79
80   4- Three work lists are kept.  Statements are only added to these
81      lists if they produce one of SSA_PROP_INTERESTING or
82      SSA_PROP_VARYING.
83
84   	CFG_BLOCKS contains the list of blocks to be simulated.
85	    Blocks are added to this list if their incoming edges are
86	    found executable.
87
88	VARYING_SSA_EDGES contains the list of statements that feed
89	    from statements that produce an SSA_PROP_VARYING result.
90	    These are simulated first to speed up processing.
91
92	INTERESTING_SSA_EDGES contains the list of statements that
93	    feed from statements that produce an SSA_PROP_INTERESTING
94	    result.
95
96   5- Simulation terminates when all three work lists are drained.
97
98   Before calling ssa_propagate, it is important to clear
99   DONT_SIMULATE_AGAIN for all the statements in the program that
100   should be simulated.  This initialization allows an implementation
101   to specify which statements should never be simulated.
102
103   It is also important to compute def-use information before calling
104   ssa_propagate.
105
106   References:
107
108     [1] Constant propagation with conditional branches,
109         Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
110
111     [2] Building an Optimizing Compiler,
112	 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
113
114     [3] Advanced Compiler Design and Implementation,
115	 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6  */
116
117/* Function pointers used to parameterize the propagation engine.  */
118static ssa_prop_visit_stmt_fn ssa_prop_visit_stmt;
119static ssa_prop_visit_phi_fn ssa_prop_visit_phi;
120
121/* Use the TREE_DEPRECATED bitflag to mark statements that have been
122   added to one of the SSA edges worklists.  This flag is used to
123   avoid visiting statements unnecessarily when draining an SSA edge
124   worklist.  If while simulating a basic block, we find a statement with
125   STMT_IN_SSA_EDGE_WORKLIST set, we clear it to prevent SSA edge
126   processing from visiting it again.  */
127#define STMT_IN_SSA_EDGE_WORKLIST(T)	TREE_DEPRECATED (T)
128
129/* A bitmap to keep track of executable blocks in the CFG.  */
130static sbitmap executable_blocks;
131
132/* Array of control flow edges on the worklist.  */
133static VEC(basic_block,heap) *cfg_blocks;
134
135static unsigned int cfg_blocks_num = 0;
136static int cfg_blocks_tail;
137static int cfg_blocks_head;
138
139static sbitmap bb_in_list;
140
141/* Worklist of SSA edges which will need reexamination as their
142   definition has changed.  SSA edges are def-use edges in the SSA
143   web.  For each D-U edge, we store the target statement or PHI node
144   U.  */
145static GTY(()) VEC(tree,gc) *interesting_ssa_edges;
146
147/* Identical to INTERESTING_SSA_EDGES.  For performance reasons, the
148   list of SSA edges is split into two.  One contains all SSA edges
149   who need to be reexamined because their lattice value changed to
150   varying (this worklist), and the other contains all other SSA edges
151   to be reexamined (INTERESTING_SSA_EDGES).
152
153   Since most values in the program are VARYING, the ideal situation
154   is to move them to that lattice value as quickly as possible.
155   Thus, it doesn't make sense to process any other type of lattice
156   value until all VARYING values are propagated fully, which is one
157   thing using the VARYING worklist achieves.  In addition, if we
158   don't use a separate worklist for VARYING edges, we end up with
159   situations where lattice values move from
160   UNDEFINED->INTERESTING->VARYING instead of UNDEFINED->VARYING.  */
161static GTY(()) VEC(tree,gc) *varying_ssa_edges;
162
163
164/* Return true if the block worklist empty.  */
165
166static inline bool
167cfg_blocks_empty_p (void)
168{
169  return (cfg_blocks_num == 0);
170}
171
172
173/* Add a basic block to the worklist.  The block must not be already
174   in the worklist, and it must not be the ENTRY or EXIT block.  */
175
176static void
177cfg_blocks_add (basic_block bb)
178{
179  gcc_assert (bb != ENTRY_BLOCK_PTR && bb != EXIT_BLOCK_PTR);
180  gcc_assert (!TEST_BIT (bb_in_list, bb->index));
181
182  if (cfg_blocks_empty_p ())
183    {
184      cfg_blocks_tail = cfg_blocks_head = 0;
185      cfg_blocks_num = 1;
186    }
187  else
188    {
189      cfg_blocks_num++;
190      if (cfg_blocks_num > VEC_length (basic_block, cfg_blocks))
191	{
192	  /* We have to grow the array now.  Adjust to queue to occupy
193	     the full space of the original array.  We do not need to
194	     initialize the newly allocated portion of the array
195	     because we keep track of CFG_BLOCKS_HEAD and
196	     CFG_BLOCKS_HEAD.  */
197	  cfg_blocks_tail = VEC_length (basic_block, cfg_blocks);
198	  cfg_blocks_head = 0;
199	  VEC_safe_grow (basic_block, heap, cfg_blocks, 2 * cfg_blocks_tail);
200	}
201      else
202	cfg_blocks_tail = ((cfg_blocks_tail + 1)
203			   % VEC_length (basic_block, cfg_blocks));
204    }
205
206  VEC_replace (basic_block, cfg_blocks, cfg_blocks_tail, bb);
207  SET_BIT (bb_in_list, bb->index);
208}
209
210
211/* Remove a block from the worklist.  */
212
213static basic_block
214cfg_blocks_get (void)
215{
216  basic_block bb;
217
218  bb = VEC_index (basic_block, cfg_blocks, cfg_blocks_head);
219
220  gcc_assert (!cfg_blocks_empty_p ());
221  gcc_assert (bb);
222
223  cfg_blocks_head = ((cfg_blocks_head + 1)
224		     % VEC_length (basic_block, cfg_blocks));
225  --cfg_blocks_num;
226  RESET_BIT (bb_in_list, bb->index);
227
228  return bb;
229}
230
231
232/* We have just defined a new value for VAR.  If IS_VARYING is true,
233   add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
234   them to INTERESTING_SSA_EDGES.  */
235
236static void
237add_ssa_edge (tree var, bool is_varying)
238{
239  imm_use_iterator iter;
240  use_operand_p use_p;
241
242  FOR_EACH_IMM_USE_FAST (use_p, iter, var)
243    {
244      tree use_stmt = USE_STMT (use_p);
245
246      if (!DONT_SIMULATE_AGAIN (use_stmt)
247	  && !STMT_IN_SSA_EDGE_WORKLIST (use_stmt))
248	{
249	  STMT_IN_SSA_EDGE_WORKLIST (use_stmt) = 1;
250	  if (is_varying)
251	    VEC_safe_push (tree, gc, varying_ssa_edges, use_stmt);
252	  else
253	    VEC_safe_push (tree, gc, interesting_ssa_edges, use_stmt);
254	}
255    }
256}
257
258
259/* Add edge E to the control flow worklist.  */
260
261static void
262add_control_edge (edge e)
263{
264  basic_block bb = e->dest;
265  if (bb == EXIT_BLOCK_PTR)
266    return;
267
268  /* If the edge had already been executed, skip it.  */
269  if (e->flags & EDGE_EXECUTABLE)
270    return;
271
272  e->flags |= EDGE_EXECUTABLE;
273
274  /* If the block is already in the list, we're done.  */
275  if (TEST_BIT (bb_in_list, bb->index))
276    return;
277
278  cfg_blocks_add (bb);
279
280  if (dump_file && (dump_flags & TDF_DETAILS))
281    fprintf (dump_file, "Adding Destination of edge (%d -> %d) to worklist\n\n",
282	e->src->index, e->dest->index);
283}
284
285
286/* Simulate the execution of STMT and update the work lists accordingly.  */
287
288static void
289simulate_stmt (tree stmt)
290{
291  enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
292  edge taken_edge = NULL;
293  tree output_name = NULL_TREE;
294
295  /* Don't bother visiting statements that are already
296     considered varying by the propagator.  */
297  if (DONT_SIMULATE_AGAIN (stmt))
298    return;
299
300  if (TREE_CODE (stmt) == PHI_NODE)
301    {
302      val = ssa_prop_visit_phi (stmt);
303      output_name = PHI_RESULT (stmt);
304    }
305  else
306    val = ssa_prop_visit_stmt (stmt, &taken_edge, &output_name);
307
308  if (val == SSA_PROP_VARYING)
309    {
310      DONT_SIMULATE_AGAIN (stmt) = 1;
311
312      /* If the statement produced a new varying value, add the SSA
313	 edges coming out of OUTPUT_NAME.  */
314      if (output_name)
315	add_ssa_edge (output_name, true);
316
317      /* If STMT transfers control out of its basic block, add
318	 all outgoing edges to the work list.  */
319      if (stmt_ends_bb_p (stmt))
320	{
321	  edge e;
322	  edge_iterator ei;
323	  basic_block bb = bb_for_stmt (stmt);
324	  FOR_EACH_EDGE (e, ei, bb->succs)
325	    add_control_edge (e);
326	}
327    }
328  else if (val == SSA_PROP_INTERESTING)
329    {
330      /* If the statement produced new value, add the SSA edges coming
331	 out of OUTPUT_NAME.  */
332      if (output_name)
333	add_ssa_edge (output_name, false);
334
335      /* If we know which edge is going to be taken out of this block,
336	 add it to the CFG work list.  */
337      if (taken_edge)
338	add_control_edge (taken_edge);
339    }
340}
341
342/* Process an SSA edge worklist.  WORKLIST is the SSA edge worklist to
343   drain.  This pops statements off the given WORKLIST and processes
344   them until there are no more statements on WORKLIST.
345   We take a pointer to WORKLIST because it may be reallocated when an
346   SSA edge is added to it in simulate_stmt.  */
347
348static void
349process_ssa_edge_worklist (VEC(tree,gc) **worklist)
350{
351  /* Drain the entire worklist.  */
352  while (VEC_length (tree, *worklist) > 0)
353    {
354      basic_block bb;
355
356      /* Pull the statement to simulate off the worklist.  */
357      tree stmt = VEC_pop (tree, *worklist);
358
359      /* If this statement was already visited by simulate_block, then
360	 we don't need to visit it again here.  */
361      if (!STMT_IN_SSA_EDGE_WORKLIST (stmt))
362	continue;
363
364      /* STMT is no longer in a worklist.  */
365      STMT_IN_SSA_EDGE_WORKLIST (stmt) = 0;
366
367      if (dump_file && (dump_flags & TDF_DETAILS))
368	{
369	  fprintf (dump_file, "\nSimulating statement (from ssa_edges): ");
370	  print_generic_stmt (dump_file, stmt, dump_flags);
371	}
372
373      bb = bb_for_stmt (stmt);
374
375      /* PHI nodes are always visited, regardless of whether or not
376	 the destination block is executable.  Otherwise, visit the
377	 statement only if its block is marked executable.  */
378      if (TREE_CODE (stmt) == PHI_NODE
379	  || TEST_BIT (executable_blocks, bb->index))
380	simulate_stmt (stmt);
381    }
382}
383
384
385/* Simulate the execution of BLOCK.  Evaluate the statement associated
386   with each variable reference inside the block.  */
387
388static void
389simulate_block (basic_block block)
390{
391  tree phi;
392
393  /* There is nothing to do for the exit block.  */
394  if (block == EXIT_BLOCK_PTR)
395    return;
396
397  if (dump_file && (dump_flags & TDF_DETAILS))
398    fprintf (dump_file, "\nSimulating block %d\n", block->index);
399
400  /* Always simulate PHI nodes, even if we have simulated this block
401     before.  */
402  for (phi = phi_nodes (block); phi; phi = PHI_CHAIN (phi))
403    simulate_stmt (phi);
404
405  /* If this is the first time we've simulated this block, then we
406     must simulate each of its statements.  */
407  if (!TEST_BIT (executable_blocks, block->index))
408    {
409      block_stmt_iterator j;
410      unsigned int normal_edge_count;
411      edge e, normal_edge;
412      edge_iterator ei;
413
414      /* Note that we have simulated this block.  */
415      SET_BIT (executable_blocks, block->index);
416
417      for (j = bsi_start (block); !bsi_end_p (j); bsi_next (&j))
418	{
419	  tree stmt = bsi_stmt (j);
420
421	  /* If this statement is already in the worklist then
422	     "cancel" it.  The reevaluation implied by the worklist
423	     entry will produce the same value we generate here and
424	     thus reevaluating it again from the worklist is
425	     pointless.  */
426	  if (STMT_IN_SSA_EDGE_WORKLIST (stmt))
427	    STMT_IN_SSA_EDGE_WORKLIST (stmt) = 0;
428
429	  simulate_stmt (stmt);
430	}
431
432      /* We can not predict when abnormal edges will be executed, so
433	 once a block is considered executable, we consider any
434	 outgoing abnormal edges as executable.
435
436	 At the same time, if this block has only one successor that is
437	 reached by non-abnormal edges, then add that successor to the
438	 worklist.  */
439      normal_edge_count = 0;
440      normal_edge = NULL;
441      FOR_EACH_EDGE (e, ei, block->succs)
442	{
443	  if (e->flags & EDGE_ABNORMAL)
444	    add_control_edge (e);
445	  else
446	    {
447	      normal_edge_count++;
448	      normal_edge = e;
449	    }
450	}
451
452      if (normal_edge_count == 1)
453	add_control_edge (normal_edge);
454    }
455}
456
457
458/* Initialize local data structures and work lists.  */
459
460static void
461ssa_prop_init (void)
462{
463  edge e;
464  edge_iterator ei;
465  basic_block bb;
466  size_t i;
467
468  /* Worklists of SSA edges.  */
469  interesting_ssa_edges = VEC_alloc (tree, gc, 20);
470  varying_ssa_edges = VEC_alloc (tree, gc, 20);
471
472  executable_blocks = sbitmap_alloc (last_basic_block);
473  sbitmap_zero (executable_blocks);
474
475  bb_in_list = sbitmap_alloc (last_basic_block);
476  sbitmap_zero (bb_in_list);
477
478  if (dump_file && (dump_flags & TDF_DETAILS))
479    dump_immediate_uses (dump_file);
480
481  cfg_blocks = VEC_alloc (basic_block, heap, 20);
482  VEC_safe_grow (basic_block, heap, cfg_blocks, 20);
483
484  /* Initialize the values for every SSA_NAME.  */
485  for (i = 1; i < num_ssa_names; i++)
486    if (ssa_name (i))
487      SSA_NAME_VALUE (ssa_name (i)) = NULL_TREE;
488
489  /* Initially assume that every edge in the CFG is not executable.
490     (including the edges coming out of ENTRY_BLOCK_PTR).  */
491  FOR_ALL_BB (bb)
492    {
493      block_stmt_iterator si;
494
495      for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
496	STMT_IN_SSA_EDGE_WORKLIST (bsi_stmt (si)) = 0;
497
498      FOR_EACH_EDGE (e, ei, bb->succs)
499	e->flags &= ~EDGE_EXECUTABLE;
500    }
501
502  /* Seed the algorithm by adding the successors of the entry block to the
503     edge worklist.  */
504  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
505    add_control_edge (e);
506}
507
508
509/* Free allocated storage.  */
510
511static void
512ssa_prop_fini (void)
513{
514  VEC_free (tree, gc, interesting_ssa_edges);
515  VEC_free (tree, gc, varying_ssa_edges);
516  VEC_free (basic_block, heap, cfg_blocks);
517  cfg_blocks = NULL;
518  sbitmap_free (bb_in_list);
519  sbitmap_free (executable_blocks);
520}
521
522
523/* Get the main expression from statement STMT.  */
524
525tree
526get_rhs (tree stmt)
527{
528  enum tree_code code = TREE_CODE (stmt);
529
530  switch (code)
531    {
532    case RETURN_EXPR:
533      stmt = TREE_OPERAND (stmt, 0);
534      if (!stmt || TREE_CODE (stmt) != MODIFY_EXPR)
535	return stmt;
536      /* FALLTHRU */
537
538    case MODIFY_EXPR:
539      stmt = TREE_OPERAND (stmt, 1);
540      if (TREE_CODE (stmt) == WITH_SIZE_EXPR)
541	return TREE_OPERAND (stmt, 0);
542      else
543	return stmt;
544
545    case COND_EXPR:
546      return COND_EXPR_COND (stmt);
547    case SWITCH_EXPR:
548      return SWITCH_COND (stmt);
549    case GOTO_EXPR:
550      return GOTO_DESTINATION (stmt);
551    case LABEL_EXPR:
552      return LABEL_EXPR_LABEL (stmt);
553
554    default:
555      return stmt;
556    }
557}
558
559
560/* Set the main expression of *STMT_P to EXPR.  If EXPR is not a valid
561   GIMPLE expression no changes are done and the function returns
562   false.  */
563
564bool
565set_rhs (tree *stmt_p, tree expr)
566{
567  tree stmt = *stmt_p, op;
568  enum tree_code code = TREE_CODE (expr);
569  stmt_ann_t ann;
570  tree var;
571  ssa_op_iter iter;
572
573  /* Verify the constant folded result is valid gimple.  */
574  if (TREE_CODE_CLASS (code) == tcc_binary)
575    {
576      if (!is_gimple_val (TREE_OPERAND (expr, 0))
577	  || !is_gimple_val (TREE_OPERAND (expr, 1)))
578	return false;
579    }
580  else if (TREE_CODE_CLASS (code) == tcc_unary)
581    {
582      if (!is_gimple_val (TREE_OPERAND (expr, 0)))
583	return false;
584    }
585  else if (code == ADDR_EXPR)
586    {
587      if (TREE_CODE (TREE_OPERAND (expr, 0)) == ARRAY_REF
588	  && !is_gimple_val (TREE_OPERAND (TREE_OPERAND (expr, 0), 1)))
589	return false;
590    }
591  else if (code == COMPOUND_EXPR
592	   || code == MODIFY_EXPR)
593    return false;
594
595  if (EXPR_HAS_LOCATION (stmt)
596      && EXPR_P (expr)
597      && ! EXPR_HAS_LOCATION (expr)
598      && TREE_SIDE_EFFECTS (expr)
599      && TREE_CODE (expr) != LABEL_EXPR)
600    SET_EXPR_LOCATION (expr, EXPR_LOCATION (stmt));
601
602  switch (TREE_CODE (stmt))
603    {
604    case RETURN_EXPR:
605      op = TREE_OPERAND (stmt, 0);
606      if (TREE_CODE (op) != MODIFY_EXPR)
607	{
608	  TREE_OPERAND (stmt, 0) = expr;
609	  break;
610	}
611      stmt = op;
612      /* FALLTHRU */
613
614    case MODIFY_EXPR:
615      op = TREE_OPERAND (stmt, 1);
616      if (TREE_CODE (op) == WITH_SIZE_EXPR)
617	stmt = op;
618      TREE_OPERAND (stmt, 1) = expr;
619      break;
620
621    case COND_EXPR:
622      if (!is_gimple_condexpr (expr))
623        return false;
624      COND_EXPR_COND (stmt) = expr;
625      break;
626    case SWITCH_EXPR:
627      SWITCH_COND (stmt) = expr;
628      break;
629    case GOTO_EXPR:
630      GOTO_DESTINATION (stmt) = expr;
631      break;
632    case LABEL_EXPR:
633      LABEL_EXPR_LABEL (stmt) = expr;
634      break;
635
636    default:
637      /* Replace the whole statement with EXPR.  If EXPR has no side
638	 effects, then replace *STMT_P with an empty statement.  */
639      ann = stmt_ann (stmt);
640      *stmt_p = TREE_SIDE_EFFECTS (expr) ? expr : build_empty_stmt ();
641      (*stmt_p)->common.ann = (tree_ann_t) ann;
642
643      if (in_ssa_p
644	  && TREE_SIDE_EFFECTS (expr))
645	{
646	  /* Fix all the SSA_NAMEs created by *STMT_P to point to its new
647	     replacement.  */
648	  FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_DEFS)
649	    {
650	      if (TREE_CODE (var) == SSA_NAME)
651		SSA_NAME_DEF_STMT (var) = *stmt_p;
652	    }
653	}
654      break;
655    }
656
657  return true;
658}
659
660
661/* Entry point to the propagation engine.
662
663   VISIT_STMT is called for every statement visited.
664   VISIT_PHI is called for every PHI node visited.  */
665
666void
667ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt,
668	       ssa_prop_visit_phi_fn visit_phi)
669{
670  ssa_prop_visit_stmt = visit_stmt;
671  ssa_prop_visit_phi = visit_phi;
672
673  ssa_prop_init ();
674
675  /* Iterate until the worklists are empty.  */
676  while (!cfg_blocks_empty_p ()
677	 || VEC_length (tree, interesting_ssa_edges) > 0
678	 || VEC_length (tree, varying_ssa_edges) > 0)
679    {
680      if (!cfg_blocks_empty_p ())
681	{
682	  /* Pull the next block to simulate off the worklist.  */
683	  basic_block dest_block = cfg_blocks_get ();
684	  simulate_block (dest_block);
685	}
686
687      /* In order to move things to varying as quickly as
688	 possible,process the VARYING_SSA_EDGES worklist first.  */
689      process_ssa_edge_worklist (&varying_ssa_edges);
690
691      /* Now process the INTERESTING_SSA_EDGES worklist.  */
692      process_ssa_edge_worklist (&interesting_ssa_edges);
693    }
694
695  ssa_prop_fini ();
696}
697
698
699/* Return the first V_MAY_DEF or V_MUST_DEF operand for STMT.  */
700
701tree
702first_vdef (tree stmt)
703{
704  ssa_op_iter iter;
705  tree op;
706
707  /* Simply return the first operand we arrive at.  */
708  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VIRTUAL_DEFS)
709    return (op);
710
711  gcc_unreachable ();
712}
713
714
715/* Return true if STMT is of the form 'LHS = mem_ref', where 'mem_ref'
716   is a non-volatile pointer dereference, a structure reference or a
717   reference to a single _DECL.  Ignore volatile memory references
718   because they are not interesting for the optimizers.  */
719
720bool
721stmt_makes_single_load (tree stmt)
722{
723  tree rhs;
724
725  if (TREE_CODE (stmt) != MODIFY_EXPR)
726    return false;
727
728  if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF|SSA_OP_VUSE))
729    return false;
730
731  rhs = TREE_OPERAND (stmt, 1);
732  STRIP_NOPS (rhs);
733
734  return (!TREE_THIS_VOLATILE (rhs)
735	  && (DECL_P (rhs)
736	      || REFERENCE_CLASS_P (rhs)));
737}
738
739
740/* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
741   is a non-volatile pointer dereference, a structure reference or a
742   reference to a single _DECL.  Ignore volatile memory references
743   because they are not interesting for the optimizers.  */
744
745bool
746stmt_makes_single_store (tree stmt)
747{
748  tree lhs;
749
750  if (TREE_CODE (stmt) != MODIFY_EXPR)
751    return false;
752
753  if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF|SSA_OP_VMUSTDEF))
754    return false;
755
756  lhs = TREE_OPERAND (stmt, 0);
757  STRIP_NOPS (lhs);
758
759  return (!TREE_THIS_VOLATILE (lhs)
760          && (DECL_P (lhs)
761	      || REFERENCE_CLASS_P (lhs)));
762}
763
764
765/* If STMT makes a single memory load and all the virtual use operands
766   have the same value in array VALUES, return it.  Otherwise, return
767   NULL.  */
768
769prop_value_t *
770get_value_loaded_by (tree stmt, prop_value_t *values)
771{
772  ssa_op_iter i;
773  tree vuse;
774  prop_value_t *prev_val = NULL;
775  prop_value_t *val = NULL;
776
777  FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, i, SSA_OP_VIRTUAL_USES)
778    {
779      val = &values[SSA_NAME_VERSION (vuse)];
780      if (prev_val && prev_val->value != val->value)
781	return NULL;
782      prev_val = val;
783    }
784
785  return val;
786}
787
788
789/* Propagation statistics.  */
790struct prop_stats_d
791{
792  long num_const_prop;
793  long num_copy_prop;
794  long num_pred_folded;
795};
796
797static struct prop_stats_d prop_stats;
798
799/* Replace USE references in statement STMT with the values stored in
800   PROP_VALUE. Return true if at least one reference was replaced.  If
801   REPLACED_ADDRESSES_P is given, it will be set to true if an address
802   constant was replaced.  */
803
804bool
805replace_uses_in (tree stmt, bool *replaced_addresses_p,
806		 prop_value_t *prop_value)
807{
808  bool replaced = false;
809  use_operand_p use;
810  ssa_op_iter iter;
811
812  FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
813    {
814      tree tuse = USE_FROM_PTR (use);
815      tree val = prop_value[SSA_NAME_VERSION (tuse)].value;
816
817      if (val == tuse || val == NULL_TREE)
818	continue;
819
820      if (TREE_CODE (stmt) == ASM_EXPR
821	  && !may_propagate_copy_into_asm (tuse))
822	continue;
823
824      if (!may_propagate_copy (tuse, val))
825	continue;
826
827      if (TREE_CODE (val) != SSA_NAME)
828	prop_stats.num_const_prop++;
829      else
830	prop_stats.num_copy_prop++;
831
832      propagate_value (use, val);
833
834      replaced = true;
835      if (POINTER_TYPE_P (TREE_TYPE (tuse)) && replaced_addresses_p)
836	*replaced_addresses_p = true;
837    }
838
839  return replaced;
840}
841
842
843/* Replace the VUSE references in statement STMT with the values
844   stored in PROP_VALUE.  Return true if a reference was replaced.  If
845   REPLACED_ADDRESSES_P is given, it will be set to true if an address
846   constant was replaced.
847
848   Replacing VUSE operands is slightly more complex than replacing
849   regular USEs.  We are only interested in two types of replacements
850   here:
851
852   1- If the value to be replaced is a constant or an SSA name for a
853      GIMPLE register, then we are making a copy/constant propagation
854      from a memory store.  For instance,
855
856      	# a_3 = V_MAY_DEF <a_2>
857	a.b = x_1;
858	...
859 	# VUSE <a_3>
860	y_4 = a.b;
861
862      This replacement is only possible iff STMT is an assignment
863      whose RHS is identical to the LHS of the statement that created
864      the VUSE(s) that we are replacing.  Otherwise, we may do the
865      wrong replacement:
866
867      	# a_3 = V_MAY_DEF <a_2>
868	# b_5 = V_MAY_DEF <b_4>
869	*p = 10;
870	...
871	# VUSE <b_5>
872	x_8 = b;
873
874      Even though 'b_5' acquires the value '10' during propagation,
875      there is no way for the propagator to tell whether the
876      replacement is correct in every reached use, because values are
877      computed at definition sites.  Therefore, when doing final
878      substitution of propagated values, we have to check each use
879      site.  Since the RHS of STMT ('b') is different from the LHS of
880      the originating statement ('*p'), we cannot replace 'b' with
881      '10'.
882
883      Similarly, when merging values from PHI node arguments,
884      propagators need to take care not to merge the same values
885      stored in different locations:
886
887     		if (...)
888		  # a_3 = V_MAY_DEF <a_2>
889		  a.b = 3;
890		else
891		  # a_4 = V_MAY_DEF <a_2>
892		  a.c = 3;
893		# a_5 = PHI <a_3, a_4>
894
895      It would be wrong to propagate '3' into 'a_5' because that
896      operation merges two stores to different memory locations.
897
898
899   2- If the value to be replaced is an SSA name for a virtual
900      register, then we simply replace each VUSE operand with its
901      value from PROP_VALUE.  This is the same replacement done by
902      replace_uses_in.  */
903
904static bool
905replace_vuses_in (tree stmt, bool *replaced_addresses_p,
906                  prop_value_t *prop_value)
907{
908  bool replaced = false;
909  ssa_op_iter iter;
910  use_operand_p vuse;
911
912  if (stmt_makes_single_load (stmt))
913    {
914      /* If STMT is an assignment whose RHS is a single memory load,
915	 see if we are trying to propagate a constant or a GIMPLE
916	 register (case #1 above).  */
917      prop_value_t *val = get_value_loaded_by (stmt, prop_value);
918      tree rhs = TREE_OPERAND (stmt, 1);
919
920      if (val
921	  && val->value
922	  && (is_gimple_reg (val->value)
923	      || is_gimple_min_invariant (val->value))
924	  && simple_cst_equal (rhs, val->mem_ref) == 1)
925
926	{
927	  /* If we are replacing a constant address, inform our
928	     caller.  */
929	  if (TREE_CODE (val->value) != SSA_NAME
930	      && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 1)))
931	      && replaced_addresses_p)
932	    *replaced_addresses_p = true;
933
934	  /* We can only perform the substitution if the load is done
935	     from the same memory location as the original store.
936	     Since we already know that there are no intervening
937	     stores between DEF_STMT and STMT, we only need to check
938	     that the RHS of STMT is the same as the memory reference
939	     propagated together with the value.  */
940	  TREE_OPERAND (stmt, 1) = val->value;
941
942	  if (TREE_CODE (val->value) != SSA_NAME)
943	    prop_stats.num_const_prop++;
944	  else
945	    prop_stats.num_copy_prop++;
946
947	  /* Since we have replaced the whole RHS of STMT, there
948	     is no point in checking the other VUSEs, as they will
949	     all have the same value.  */
950	  return true;
951	}
952    }
953
954  /* Otherwise, the values for every VUSE operand must be other
955     SSA_NAMEs that can be propagated into STMT.  */
956  FOR_EACH_SSA_USE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
957    {
958      tree var = USE_FROM_PTR (vuse);
959      tree val = prop_value[SSA_NAME_VERSION (var)].value;
960
961      if (val == NULL_TREE || var == val)
962	continue;
963
964      /* Constants and copies propagated between real and virtual
965	 operands are only possible in the cases handled above.  They
966	 should be ignored in any other context.  */
967      if (is_gimple_min_invariant (val) || is_gimple_reg (val))
968	continue;
969
970      propagate_value (vuse, val);
971      prop_stats.num_copy_prop++;
972      replaced = true;
973    }
974
975  return replaced;
976}
977
978
979/* Replace propagated values into all the arguments for PHI using the
980   values from PROP_VALUE.  */
981
982static void
983replace_phi_args_in (tree phi, prop_value_t *prop_value)
984{
985  int i;
986  bool replaced = false;
987  tree prev_phi = NULL;
988
989  if (dump_file && (dump_flags & TDF_DETAILS))
990    prev_phi = unshare_expr (phi);
991
992  for (i = 0; i < PHI_NUM_ARGS (phi); i++)
993    {
994      tree arg = PHI_ARG_DEF (phi, i);
995
996      if (TREE_CODE (arg) == SSA_NAME)
997	{
998	  tree val = prop_value[SSA_NAME_VERSION (arg)].value;
999
1000	  if (val && val != arg && may_propagate_copy (arg, val))
1001	    {
1002	      if (TREE_CODE (val) != SSA_NAME)
1003		prop_stats.num_const_prop++;
1004	      else
1005		prop_stats.num_copy_prop++;
1006
1007	      propagate_value (PHI_ARG_DEF_PTR (phi, i), val);
1008	      replaced = true;
1009
1010	      /* If we propagated a copy and this argument flows
1011		 through an abnormal edge, update the replacement
1012		 accordingly.  */
1013	      if (TREE_CODE (val) == SSA_NAME
1014		  && PHI_ARG_EDGE (phi, i)->flags & EDGE_ABNORMAL)
1015		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1016	    }
1017	}
1018    }
1019
1020  if (replaced && dump_file && (dump_flags & TDF_DETAILS))
1021    {
1022      fprintf (dump_file, "Folded PHI node: ");
1023      print_generic_stmt (dump_file, prev_phi, TDF_SLIM);
1024      fprintf (dump_file, "           into: ");
1025      print_generic_stmt (dump_file, phi, TDF_SLIM);
1026      fprintf (dump_file, "\n");
1027    }
1028}
1029
1030
1031/* If STMT has a predicate whose value can be computed using the value
1032   range information computed by VRP, compute its value and return true.
1033   Otherwise, return false.  */
1034
1035static bool
1036fold_predicate_in (tree stmt)
1037{
1038  tree *pred_p = NULL;
1039  bool modify_expr_p = false;
1040  tree val;
1041
1042  if (TREE_CODE (stmt) == MODIFY_EXPR
1043      && COMPARISON_CLASS_P (TREE_OPERAND (stmt, 1)))
1044    {
1045      modify_expr_p = true;
1046      pred_p = &TREE_OPERAND (stmt, 1);
1047    }
1048  else if (TREE_CODE (stmt) == COND_EXPR)
1049    pred_p = &COND_EXPR_COND (stmt);
1050  else
1051    return false;
1052
1053  val = vrp_evaluate_conditional (*pred_p, stmt);
1054  if (val)
1055    {
1056      if (modify_expr_p)
1057        val = fold_convert (TREE_TYPE (*pred_p), val);
1058
1059      if (dump_file)
1060	{
1061	  fprintf (dump_file, "Folding predicate ");
1062	  print_generic_expr (dump_file, *pred_p, 0);
1063	  fprintf (dump_file, " to ");
1064	  print_generic_expr (dump_file, val, 0);
1065	  fprintf (dump_file, "\n");
1066	}
1067
1068      prop_stats.num_pred_folded++;
1069      *pred_p = val;
1070      return true;
1071    }
1072
1073  return false;
1074}
1075
1076
1077/* Perform final substitution and folding of propagated values.
1078
1079   PROP_VALUE[I] contains the single value that should be substituted
1080   at every use of SSA name N_I.  If PROP_VALUE is NULL, no values are
1081   substituted.
1082
1083   If USE_RANGES_P is true, statements that contain predicate
1084   expressions are evaluated with a call to vrp_evaluate_conditional.
1085   This will only give meaningful results when called from tree-vrp.c
1086   (the information used by vrp_evaluate_conditional is built by the
1087   VRP pass).  */
1088
1089void
1090substitute_and_fold (prop_value_t *prop_value, bool use_ranges_p)
1091{
1092  basic_block bb;
1093
1094  if (prop_value == NULL && !use_ranges_p)
1095    return;
1096
1097  if (dump_file && (dump_flags & TDF_DETAILS))
1098    fprintf (dump_file, "\nSubstituing values and folding statements\n\n");
1099
1100  memset (&prop_stats, 0, sizeof (prop_stats));
1101
1102  /* Substitute values in every statement of every basic block.  */
1103  FOR_EACH_BB (bb)
1104    {
1105      block_stmt_iterator i;
1106      tree phi;
1107
1108      /* Propagate known values into PHI nodes.  */
1109      if (prop_value)
1110	for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1111	  replace_phi_args_in (phi, prop_value);
1112
1113      for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1114	{
1115          bool replaced_address, did_replace;
1116	  tree prev_stmt = NULL;
1117	  tree stmt = bsi_stmt (i);
1118
1119	  /* Ignore ASSERT_EXPRs.  They are used by VRP to generate
1120	     range information for names and they are discarded
1121	     afterwards.  */
1122	  if (TREE_CODE (stmt) == MODIFY_EXPR
1123	      && TREE_CODE (TREE_OPERAND (stmt, 1)) == ASSERT_EXPR)
1124	    continue;
1125
1126	  /* Replace the statement with its folded version and mark it
1127	     folded.  */
1128	  did_replace = false;
1129	  replaced_address = false;
1130	  if (dump_file && (dump_flags & TDF_DETAILS))
1131	    prev_stmt = unshare_expr (stmt);
1132
1133	  /* If we have range information, see if we can fold
1134	     predicate expressions.  */
1135	  if (use_ranges_p)
1136	    did_replace = fold_predicate_in (stmt);
1137
1138	  if (prop_value)
1139	    {
1140	      /* Only replace real uses if we couldn't fold the
1141		 statement using value range information (value range
1142		 information is not collected on virtuals, so we only
1143		 need to check this for real uses).  */
1144	      if (!did_replace)
1145		did_replace |= replace_uses_in (stmt, &replaced_address,
1146		                                prop_value);
1147
1148	      did_replace |= replace_vuses_in (stmt, &replaced_address,
1149		                               prop_value);
1150	    }
1151
1152	  /* If we made a replacement, fold and cleanup the statement.  */
1153	  if (did_replace)
1154	    {
1155	      tree old_stmt = stmt;
1156	      tree rhs;
1157
1158	      fold_stmt (bsi_stmt_ptr (i));
1159	      stmt = bsi_stmt (i);
1160
1161	      /* If we folded a builtin function, we'll likely
1162		 need to rename VDEFs.  */
1163	      mark_new_vars_to_rename (stmt);
1164
1165              /* If we cleaned up EH information from the statement,
1166                 remove EH edges.  */
1167	      if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
1168		tree_purge_dead_eh_edges (bb);
1169
1170	      rhs = get_rhs (stmt);
1171	      if (TREE_CODE (rhs) == ADDR_EXPR)
1172		recompute_tree_invariant_for_addr_expr (rhs);
1173
1174	      if (dump_file && (dump_flags & TDF_DETAILS))
1175		{
1176		  fprintf (dump_file, "Folded statement: ");
1177		  print_generic_stmt (dump_file, prev_stmt, TDF_SLIM);
1178		  fprintf (dump_file, "            into: ");
1179		  print_generic_stmt (dump_file, stmt, TDF_SLIM);
1180		  fprintf (dump_file, "\n");
1181		}
1182	    }
1183
1184	  /* Some statements may be simplified using ranges.  For
1185	     example, division may be replaced by shifts, modulo
1186	     replaced with bitwise and, etc.   Do this after
1187	     substituting constants, folding, etc so that we're
1188	     presented with a fully propagated, canonicalized
1189	     statement.  */
1190	  if (use_ranges_p)
1191	    simplify_stmt_using_ranges (stmt);
1192
1193	}
1194    }
1195
1196  if (dump_file && (dump_flags & TDF_STATS))
1197    {
1198      fprintf (dump_file, "Constants propagated: %6ld\n",
1199	       prop_stats.num_const_prop);
1200      fprintf (dump_file, "Copies propagated:    %6ld\n",
1201	       prop_stats.num_copy_prop);
1202      fprintf (dump_file, "Predicates folded:    %6ld\n",
1203	       prop_stats.num_pred_folded);
1204    }
1205}
1206
1207#include "gt-tree-ssa-propagate.h"
1208