1/* SSA Dominator optimizations for trees
2   Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006
3   Free Software Foundation, Inc.
4   Contributed by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 2, or (at your option)
11any later version.
12
13GCC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to
20the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21Boston, MA 02110-1301, USA.  */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27#include "tree.h"
28#include "flags.h"
29#include "rtl.h"
30#include "tm_p.h"
31#include "ggc.h"
32#include "basic-block.h"
33#include "cfgloop.h"
34#include "output.h"
35#include "expr.h"
36#include "function.h"
37#include "diagnostic.h"
38#include "timevar.h"
39#include "tree-dump.h"
40#include "tree-flow.h"
41#include "domwalk.h"
42#include "real.h"
43#include "tree-pass.h"
44#include "tree-ssa-propagate.h"
45#include "langhooks.h"
46#include "params.h"
47
48/* This file implements optimizations on the dominator tree.  */
49
50
51/* Structure for recording edge equivalences as well as any pending
52   edge redirections during the dominator optimizer.
53
54   Computing and storing the edge equivalences instead of creating
55   them on-demand can save significant amounts of time, particularly
56   for pathological cases involving switch statements.
57
58   These structures live for a single iteration of the dominator
59   optimizer in the edge's AUX field.  At the end of an iteration we
60   free each of these structures and update the AUX field to point
61   to any requested redirection target (the code for updating the
62   CFG and SSA graph for edge redirection expects redirection edge
63   targets to be in the AUX field for each edge.  */
64
65struct edge_info
66{
67  /* If this edge creates a simple equivalence, the LHS and RHS of
68     the equivalence will be stored here.  */
69  tree lhs;
70  tree rhs;
71
72  /* Traversing an edge may also indicate one or more particular conditions
73     are true or false.  The number of recorded conditions can vary, but
74     can be determined by the condition's code.  So we have an array
75     and its maximum index rather than use a varray.  */
76  tree *cond_equivalences;
77  unsigned int max_cond_equivalences;
78};
79
80
81/* Hash table with expressions made available during the renaming process.
82   When an assignment of the form X_i = EXPR is found, the statement is
83   stored in this table.  If the same expression EXPR is later found on the
84   RHS of another statement, it is replaced with X_i (thus performing
85   global redundancy elimination).  Similarly as we pass through conditionals
86   we record the conditional itself as having either a true or false value
87   in this table.  */
88static htab_t avail_exprs;
89
90/* Stack of available expressions in AVAIL_EXPRs.  Each block pushes any
91   expressions it enters into the hash table along with a marker entry
92   (null).  When we finish processing the block, we pop off entries and
93   remove the expressions from the global hash table until we hit the
94   marker.  */
95static VEC(tree,heap) *avail_exprs_stack;
96
97/* Stack of statements we need to rescan during finalization for newly
98   exposed variables.
99
100   Statement rescanning must occur after the current block's available
101   expressions are removed from AVAIL_EXPRS.  Else we may change the
102   hash code for an expression and be unable to find/remove it from
103   AVAIL_EXPRS.  */
104static VEC(tree,heap) *stmts_to_rescan;
105
106/* Structure for entries in the expression hash table.
107
108   This requires more memory for the hash table entries, but allows us
109   to avoid creating silly tree nodes and annotations for conditionals,
110   eliminates 2 global hash tables and two block local varrays.
111
112   It also allows us to reduce the number of hash table lookups we
113   have to perform in lookup_avail_expr and finally it allows us to
114   significantly reduce the number of calls into the hashing routine
115   itself.  */
116
117struct expr_hash_elt
118{
119  /* The value (lhs) of this expression.  */
120  tree lhs;
121
122  /* The expression (rhs) we want to record.  */
123  tree rhs;
124
125  /* The stmt pointer if this element corresponds to a statement.  */
126  tree stmt;
127
128  /* The hash value for RHS/ann.  */
129  hashval_t hash;
130};
131
132/* Stack of dest,src pairs that need to be restored during finalization.
133
134   A NULL entry is used to mark the end of pairs which need to be
135   restored during finalization of this block.  */
136static VEC(tree,heap) *const_and_copies_stack;
137
138/* Track whether or not we have changed the control flow graph.  */
139static bool cfg_altered;
140
141/* Bitmap of blocks that have had EH statements cleaned.  We should
142   remove their dead edges eventually.  */
143static bitmap need_eh_cleanup;
144
145/* Statistics for dominator optimizations.  */
146struct opt_stats_d
147{
148  long num_stmts;
149  long num_exprs_considered;
150  long num_re;
151  long num_const_prop;
152  long num_copy_prop;
153};
154
155static struct opt_stats_d opt_stats;
156
157struct eq_expr_value
158{
159  tree src;
160  tree dst;
161};
162
163/* Local functions.  */
164static void optimize_stmt (struct dom_walk_data *,
165			   basic_block bb,
166			   block_stmt_iterator);
167static tree lookup_avail_expr (tree, bool);
168static hashval_t avail_expr_hash (const void *);
169static hashval_t real_avail_expr_hash (const void *);
170static int avail_expr_eq (const void *, const void *);
171static void htab_statistics (FILE *, htab_t);
172static void record_cond (tree, tree);
173static void record_const_or_copy (tree, tree);
174static void record_equality (tree, tree);
175static void record_equivalences_from_phis (basic_block);
176static void record_equivalences_from_incoming_edge (basic_block);
177static bool eliminate_redundant_computations (tree);
178static void record_equivalences_from_stmt (tree, int, stmt_ann_t);
179static void dom_thread_across_edge (struct dom_walk_data *, edge);
180static void dom_opt_finalize_block (struct dom_walk_data *, basic_block);
181static void dom_opt_initialize_block (struct dom_walk_data *, basic_block);
182static void propagate_to_outgoing_edges (struct dom_walk_data *, basic_block);
183static void remove_local_expressions_from_table (void);
184static void restore_vars_to_original_value (void);
185static edge single_incoming_edge_ignoring_loop_edges (basic_block);
186
187
188/* Allocate an EDGE_INFO for edge E and attach it to E.
189   Return the new EDGE_INFO structure.  */
190
191static struct edge_info *
192allocate_edge_info (edge e)
193{
194  struct edge_info *edge_info;
195
196  edge_info = XCNEW (struct edge_info);
197
198  e->aux = edge_info;
199  return edge_info;
200}
201
202/* Free all EDGE_INFO structures associated with edges in the CFG.
203   If a particular edge can be threaded, copy the redirection
204   target from the EDGE_INFO structure into the edge's AUX field
205   as required by code to update the CFG and SSA graph for
206   jump threading.  */
207
208static void
209free_all_edge_infos (void)
210{
211  basic_block bb;
212  edge_iterator ei;
213  edge e;
214
215  FOR_EACH_BB (bb)
216    {
217      FOR_EACH_EDGE (e, ei, bb->preds)
218        {
219	 struct edge_info *edge_info = (struct edge_info *) e->aux;
220
221	  if (edge_info)
222	    {
223	      if (edge_info->cond_equivalences)
224		free (edge_info->cond_equivalences);
225	      free (edge_info);
226	      e->aux = NULL;
227	    }
228	}
229    }
230}
231
232/* Jump threading, redundancy elimination and const/copy propagation.
233
234   This pass may expose new symbols that need to be renamed into SSA.  For
235   every new symbol exposed, its corresponding bit will be set in
236   VARS_TO_RENAME.  */
237
238static unsigned int
239tree_ssa_dominator_optimize (void)
240{
241  struct dom_walk_data walk_data;
242  unsigned int i;
243  struct loops loops_info;
244
245  memset (&opt_stats, 0, sizeof (opt_stats));
246
247  /* Create our hash tables.  */
248  avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free);
249  avail_exprs_stack = VEC_alloc (tree, heap, 20);
250  const_and_copies_stack = VEC_alloc (tree, heap, 20);
251  stmts_to_rescan = VEC_alloc (tree, heap, 20);
252  need_eh_cleanup = BITMAP_ALLOC (NULL);
253
254  /* Setup callbacks for the generic dominator tree walker.  */
255  walk_data.walk_stmts_backward = false;
256  walk_data.dom_direction = CDI_DOMINATORS;
257  walk_data.initialize_block_local_data = NULL;
258  walk_data.before_dom_children_before_stmts = dom_opt_initialize_block;
259  walk_data.before_dom_children_walk_stmts = optimize_stmt;
260  walk_data.before_dom_children_after_stmts = propagate_to_outgoing_edges;
261  walk_data.after_dom_children_before_stmts = NULL;
262  walk_data.after_dom_children_walk_stmts = NULL;
263  walk_data.after_dom_children_after_stmts = dom_opt_finalize_block;
264  /* Right now we only attach a dummy COND_EXPR to the global data pointer.
265     When we attach more stuff we'll need to fill this out with a real
266     structure.  */
267  walk_data.global_data = NULL;
268  walk_data.block_local_data_size = 0;
269  walk_data.interesting_blocks = NULL;
270
271  /* Now initialize the dominator walker.  */
272  init_walk_dominator_tree (&walk_data);
273
274  calculate_dominance_info (CDI_DOMINATORS);
275
276  /* We need to know which edges exit loops so that we can
277     aggressively thread through loop headers to an exit
278     edge.  */
279  flow_loops_find (&loops_info);
280  mark_loop_exit_edges (&loops_info);
281  flow_loops_free (&loops_info);
282
283  /* Clean up the CFG so that any forwarder blocks created by loop
284     canonicalization are removed.  */
285  cleanup_tree_cfg ();
286  calculate_dominance_info (CDI_DOMINATORS);
287
288  /* We need accurate information regarding back edges in the CFG
289     for jump threading.  */
290  mark_dfs_back_edges ();
291
292  /* Recursively walk the dominator tree optimizing statements.  */
293  walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
294
295  {
296    block_stmt_iterator bsi;
297    basic_block bb;
298    FOR_EACH_BB (bb)
299      {
300	for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
301	  update_stmt_if_modified (bsi_stmt (bsi));
302      }
303  }
304
305  /* If we exposed any new variables, go ahead and put them into
306     SSA form now, before we handle jump threading.  This simplifies
307     interactions between rewriting of _DECL nodes into SSA form
308     and rewriting SSA_NAME nodes into SSA form after block
309     duplication and CFG manipulation.  */
310  update_ssa (TODO_update_ssa);
311
312  free_all_edge_infos ();
313
314  /* Thread jumps, creating duplicate blocks as needed.  */
315  cfg_altered |= thread_through_all_blocks ();
316
317  /* Removal of statements may make some EH edges dead.  Purge
318     such edges from the CFG as needed.  */
319  if (!bitmap_empty_p (need_eh_cleanup))
320    {
321      cfg_altered |= tree_purge_all_dead_eh_edges (need_eh_cleanup);
322      bitmap_zero (need_eh_cleanup);
323    }
324
325  if (cfg_altered)
326    free_dominance_info (CDI_DOMINATORS);
327
328  /* Finally, remove everything except invariants in SSA_NAME_VALUE.
329
330     Long term we will be able to let everything in SSA_NAME_VALUE
331     persist.  However, for now, we know this is the safe thing to do.  */
332  for (i = 0; i < num_ssa_names; i++)
333   {
334      tree name = ssa_name (i);
335      tree value;
336
337      if (!name)
338        continue;
339
340      value = SSA_NAME_VALUE (name);
341      if (value && !is_gimple_min_invariant (value))
342	SSA_NAME_VALUE (name) = NULL;
343    }
344
345  /* Debugging dumps.  */
346  if (dump_file && (dump_flags & TDF_STATS))
347    dump_dominator_optimization_stats (dump_file);
348
349  /* Delete our main hashtable.  */
350  htab_delete (avail_exprs);
351
352  /* And finalize the dominator walker.  */
353  fini_walk_dominator_tree (&walk_data);
354
355  /* Free asserted bitmaps and stacks.  */
356  BITMAP_FREE (need_eh_cleanup);
357
358  VEC_free (tree, heap, avail_exprs_stack);
359  VEC_free (tree, heap, const_and_copies_stack);
360  VEC_free (tree, heap, stmts_to_rescan);
361  return 0;
362}
363
364static bool
365gate_dominator (void)
366{
367  return flag_tree_dom != 0;
368}
369
370struct tree_opt_pass pass_dominator =
371{
372  "dom",				/* name */
373  gate_dominator,			/* gate */
374  tree_ssa_dominator_optimize,		/* execute */
375  NULL,					/* sub */
376  NULL,					/* next */
377  0,					/* static_pass_number */
378  TV_TREE_SSA_DOMINATOR_OPTS,		/* tv_id */
379  PROP_cfg | PROP_ssa | PROP_alias,	/* properties_required */
380  0,					/* properties_provided */
381  PROP_smt_usage,			/* properties_destroyed */
382  0,					/* todo_flags_start */
383  TODO_dump_func
384    | TODO_update_ssa
385    | TODO_cleanup_cfg
386    | TODO_verify_ssa
387    | TODO_update_smt_usage,		/* todo_flags_finish */
388  0					/* letter */
389};
390
391
392/* Given a stmt CONDSTMT containing a COND_EXPR, canonicalize the
393   COND_EXPR into a canonical form.  */
394
395static void
396canonicalize_comparison (tree condstmt)
397{
398  tree cond = COND_EXPR_COND (condstmt);
399  tree op0;
400  tree op1;
401  enum tree_code code = TREE_CODE (cond);
402
403  if (!COMPARISON_CLASS_P (cond))
404    return;
405
406  op0 = TREE_OPERAND (cond, 0);
407  op1 = TREE_OPERAND (cond, 1);
408
409  /* If it would be profitable to swap the operands, then do so to
410     canonicalize the statement, enabling better optimization.
411
412     By placing canonicalization of such expressions here we
413     transparently keep statements in canonical form, even
414     when the statement is modified.  */
415  if (tree_swap_operands_p (op0, op1, false))
416    {
417      /* For relationals we need to swap the operands
418	 and change the code.  */
419      if (code == LT_EXPR
420	  || code == GT_EXPR
421	  || code == LE_EXPR
422	  || code == GE_EXPR)
423	{
424	  TREE_SET_CODE (cond, swap_tree_comparison (code));
425	  swap_tree_operands (condstmt,
426			      &TREE_OPERAND (cond, 0),
427			      &TREE_OPERAND (cond, 1));
428	  /* If one operand was in the operand cache, but the other is
429	     not, because it is a constant, this is a case that the
430	     internal updating code of swap_tree_operands can't handle
431	     properly.  */
432	  if (TREE_CODE_CLASS (TREE_CODE (op0))
433	      != TREE_CODE_CLASS (TREE_CODE (op1)))
434	    update_stmt (condstmt);
435	}
436    }
437}
438
439/* Initialize local stacks for this optimizer and record equivalences
440   upon entry to BB.  Equivalences can come from the edge traversed to
441   reach BB or they may come from PHI nodes at the start of BB.  */
442
443static void
444dom_opt_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
445			  basic_block bb)
446{
447  if (dump_file && (dump_flags & TDF_DETAILS))
448    fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
449
450  /* Push a marker on the stacks of local information so that we know how
451     far to unwind when we finalize this block.  */
452  VEC_safe_push (tree, heap, avail_exprs_stack, NULL_TREE);
453  VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
454
455  record_equivalences_from_incoming_edge (bb);
456
457  /* PHI nodes can create equivalences too.  */
458  record_equivalences_from_phis (bb);
459}
460
461/* Given an expression EXPR (a relational expression or a statement),
462   initialize the hash table element pointed to by ELEMENT.  */
463
464static void
465initialize_hash_element (tree expr, tree lhs, struct expr_hash_elt *element)
466{
467  /* Hash table elements may be based on conditional expressions or statements.
468
469     For the former case, we have no annotation and we want to hash the
470     conditional expression.  In the latter case we have an annotation and
471     we want to record the expression the statement evaluates.  */
472  if (COMPARISON_CLASS_P (expr) || TREE_CODE (expr) == TRUTH_NOT_EXPR)
473    {
474      element->stmt = NULL;
475      element->rhs = expr;
476    }
477  else if (TREE_CODE (expr) == COND_EXPR)
478    {
479      element->stmt = expr;
480      element->rhs = COND_EXPR_COND (expr);
481    }
482  else if (TREE_CODE (expr) == SWITCH_EXPR)
483    {
484      element->stmt = expr;
485      element->rhs = SWITCH_COND (expr);
486    }
487  else if (TREE_CODE (expr) == RETURN_EXPR && TREE_OPERAND (expr, 0))
488    {
489      element->stmt = expr;
490      element->rhs = TREE_OPERAND (TREE_OPERAND (expr, 0), 1);
491    }
492  else if (TREE_CODE (expr) == GOTO_EXPR)
493    {
494      element->stmt = expr;
495      element->rhs = GOTO_DESTINATION (expr);
496    }
497  else
498    {
499      element->stmt = expr;
500      element->rhs = TREE_OPERAND (expr, 1);
501    }
502
503  element->lhs = lhs;
504  element->hash = avail_expr_hash (element);
505}
506
507/* Remove all the expressions in LOCALS from TABLE, stopping when there are
508   LIMIT entries left in LOCALs.  */
509
510static void
511remove_local_expressions_from_table (void)
512{
513  /* Remove all the expressions made available in this block.  */
514  while (VEC_length (tree, avail_exprs_stack) > 0)
515    {
516      struct expr_hash_elt element;
517      tree expr = VEC_pop (tree, avail_exprs_stack);
518
519      if (expr == NULL_TREE)
520	break;
521
522      initialize_hash_element (expr, NULL, &element);
523      htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
524    }
525}
526
527/* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
528   CONST_AND_COPIES to its original state, stopping when we hit a
529   NULL marker.  */
530
531static void
532restore_vars_to_original_value (void)
533{
534  while (VEC_length (tree, const_and_copies_stack) > 0)
535    {
536      tree prev_value, dest;
537
538      dest = VEC_pop (tree, const_and_copies_stack);
539
540      if (dest == NULL)
541	break;
542
543      prev_value = VEC_pop (tree, const_and_copies_stack);
544      SSA_NAME_VALUE (dest) =  prev_value;
545    }
546}
547
548/* A trivial wrapper so that we can present the generic jump
549   threading code with a simple API for simplifying statements.  */
550static tree
551simplify_stmt_for_jump_threading (tree stmt, tree within_stmt ATTRIBUTE_UNUSED)
552{
553  return lookup_avail_expr (stmt, false);
554}
555
556/* Wrapper for common code to attempt to thread an edge.  For example,
557   it handles lazily building the dummy condition and the bookkeeping
558   when jump threading is successful.  */
559
560static void
561dom_thread_across_edge (struct dom_walk_data *walk_data, edge e)
562{
563  /* If we don't already have a dummy condition, build it now.  */
564  if (! walk_data->global_data)
565    {
566      tree dummy_cond = build2 (NE_EXPR, boolean_type_node,
567			        integer_zero_node, integer_zero_node);
568      dummy_cond = build3 (COND_EXPR, void_type_node, dummy_cond, NULL, NULL);
569      walk_data->global_data = dummy_cond;
570    }
571
572  thread_across_edge (walk_data->global_data, e, false,
573		      &const_and_copies_stack,
574		      simplify_stmt_for_jump_threading);
575}
576
577/* We have finished processing the dominator children of BB, perform
578   any finalization actions in preparation for leaving this node in
579   the dominator tree.  */
580
581static void
582dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
583{
584  tree last;
585
586
587  /* If we have an outgoing edge to a block with multiple incoming and
588     outgoing edges, then we may be able to thread the edge.  ie, we
589     may be able to statically determine which of the outgoing edges
590     will be traversed when the incoming edge from BB is traversed.  */
591  if (single_succ_p (bb)
592      && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
593      && potentially_threadable_block (single_succ (bb)))
594    {
595      dom_thread_across_edge (walk_data, single_succ_edge (bb));
596    }
597  else if ((last = last_stmt (bb))
598	   && TREE_CODE (last) == COND_EXPR
599	   && (COMPARISON_CLASS_P (COND_EXPR_COND (last))
600	       || TREE_CODE (COND_EXPR_COND (last)) == SSA_NAME)
601	   && EDGE_COUNT (bb->succs) == 2
602	   && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
603	   && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
604    {
605      edge true_edge, false_edge;
606
607      extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
608
609      /* Only try to thread the edge if it reaches a target block with
610	 more than one predecessor and more than one successor.  */
611      if (potentially_threadable_block (true_edge->dest))
612	{
613	  struct edge_info *edge_info;
614	  unsigned int i;
615
616	  /* Push a marker onto the available expression stack so that we
617	     unwind any expressions related to the TRUE arm before processing
618	     the false arm below.  */
619	  VEC_safe_push (tree, heap, avail_exprs_stack, NULL_TREE);
620	  VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
621
622	  edge_info = (struct edge_info *) true_edge->aux;
623
624	  /* If we have info associated with this edge, record it into
625	     our equivalency tables.  */
626	  if (edge_info)
627	    {
628	      tree *cond_equivalences = edge_info->cond_equivalences;
629	      tree lhs = edge_info->lhs;
630	      tree rhs = edge_info->rhs;
631
632	      /* If we have a simple NAME = VALUE equivalency record it.  */
633	      if (lhs && TREE_CODE (lhs) == SSA_NAME)
634		record_const_or_copy (lhs, rhs);
635
636	      /* If we have 0 = COND or 1 = COND equivalences, record them
637		 into our expression hash tables.  */
638	      if (cond_equivalences)
639		for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
640		  {
641		    tree expr = cond_equivalences[i];
642		    tree value = cond_equivalences[i + 1];
643
644		    record_cond (expr, value);
645		  }
646	    }
647
648	  dom_thread_across_edge (walk_data, true_edge);
649
650	  /* And restore the various tables to their state before
651	     we threaded this edge.  */
652	  remove_local_expressions_from_table ();
653	}
654
655      /* Similarly for the ELSE arm.  */
656      if (potentially_threadable_block (false_edge->dest))
657	{
658	  struct edge_info *edge_info;
659	  unsigned int i;
660
661	  VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
662	  edge_info = (struct edge_info *) false_edge->aux;
663
664	  /* If we have info associated with this edge, record it into
665	     our equivalency tables.  */
666	  if (edge_info)
667	    {
668	      tree *cond_equivalences = edge_info->cond_equivalences;
669	      tree lhs = edge_info->lhs;
670	      tree rhs = edge_info->rhs;
671
672	      /* If we have a simple NAME = VALUE equivalency record it.  */
673	      if (lhs && TREE_CODE (lhs) == SSA_NAME)
674		record_const_or_copy (lhs, rhs);
675
676	      /* If we have 0 = COND or 1 = COND equivalences, record them
677		 into our expression hash tables.  */
678	      if (cond_equivalences)
679		for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
680		  {
681		    tree expr = cond_equivalences[i];
682		    tree value = cond_equivalences[i + 1];
683
684		    record_cond (expr, value);
685		  }
686	    }
687
688	  /* Now thread the edge.  */
689	  dom_thread_across_edge (walk_data, false_edge);
690
691	  /* No need to remove local expressions from our tables
692	     or restore vars to their original value as that will
693	     be done immediately below.  */
694	}
695    }
696
697  remove_local_expressions_from_table ();
698  restore_vars_to_original_value ();
699
700  /* If we queued any statements to rescan in this block, then
701     go ahead and rescan them now.  */
702  while (VEC_length (tree, stmts_to_rescan) > 0)
703    {
704      tree stmt = VEC_last (tree, stmts_to_rescan);
705      basic_block stmt_bb = bb_for_stmt (stmt);
706
707      if (stmt_bb != bb)
708	break;
709
710      VEC_pop (tree, stmts_to_rescan);
711      mark_new_vars_to_rename (stmt);
712    }
713}
714
715/* PHI nodes can create equivalences too.
716
717   Ignoring any alternatives which are the same as the result, if
718   all the alternatives are equal, then the PHI node creates an
719   equivalence.  */
720
721static void
722record_equivalences_from_phis (basic_block bb)
723{
724  tree phi;
725
726  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
727    {
728      tree lhs = PHI_RESULT (phi);
729      tree rhs = NULL;
730      int i;
731
732      for (i = 0; i < PHI_NUM_ARGS (phi); i++)
733	{
734	  tree t = PHI_ARG_DEF (phi, i);
735
736	  /* Ignore alternatives which are the same as our LHS.  Since
737	     LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
738	     can simply compare pointers.  */
739	  if (lhs == t)
740	    continue;
741
742	  /* If we have not processed an alternative yet, then set
743	     RHS to this alternative.  */
744	  if (rhs == NULL)
745	    rhs = t;
746	  /* If we have processed an alternative (stored in RHS), then
747	     see if it is equal to this one.  If it isn't, then stop
748	     the search.  */
749	  else if (! operand_equal_for_phi_arg_p (rhs, t))
750	    break;
751	}
752
753      /* If we had no interesting alternatives, then all the RHS alternatives
754	 must have been the same as LHS.  */
755      if (!rhs)
756	rhs = lhs;
757
758      /* If we managed to iterate through each PHI alternative without
759	 breaking out of the loop, then we have a PHI which may create
760	 a useful equivalence.  We do not need to record unwind data for
761	 this, since this is a true assignment and not an equivalence
762	 inferred from a comparison.  All uses of this ssa name are dominated
763	 by this assignment, so unwinding just costs time and space.  */
764      if (i == PHI_NUM_ARGS (phi)
765	  && may_propagate_copy (lhs, rhs))
766	SSA_NAME_VALUE (lhs) = rhs;
767    }
768}
769
770/* Ignoring loop backedges, if BB has precisely one incoming edge then
771   return that edge.  Otherwise return NULL.  */
772static edge
773single_incoming_edge_ignoring_loop_edges (basic_block bb)
774{
775  edge retval = NULL;
776  edge e;
777  edge_iterator ei;
778
779  FOR_EACH_EDGE (e, ei, bb->preds)
780    {
781      /* A loop back edge can be identified by the destination of
782	 the edge dominating the source of the edge.  */
783      if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
784	continue;
785
786      /* If we have already seen a non-loop edge, then we must have
787	 multiple incoming non-loop edges and thus we return NULL.  */
788      if (retval)
789	return NULL;
790
791      /* This is the first non-loop incoming edge we have found.  Record
792	 it.  */
793      retval = e;
794    }
795
796  return retval;
797}
798
799/* Record any equivalences created by the incoming edge to BB.  If BB
800   has more than one incoming edge, then no equivalence is created.  */
801
802static void
803record_equivalences_from_incoming_edge (basic_block bb)
804{
805  edge e;
806  basic_block parent;
807  struct edge_info *edge_info;
808
809  /* If our parent block ended with a control statement, then we may be
810     able to record some equivalences based on which outgoing edge from
811     the parent was followed.  */
812  parent = get_immediate_dominator (CDI_DOMINATORS, bb);
813
814  e = single_incoming_edge_ignoring_loop_edges (bb);
815
816  /* If we had a single incoming edge from our parent block, then enter
817     any data associated with the edge into our tables.  */
818  if (e && e->src == parent)
819    {
820      unsigned int i;
821
822      edge_info = (struct edge_info *) e->aux;
823
824      if (edge_info)
825	{
826	  tree lhs = edge_info->lhs;
827	  tree rhs = edge_info->rhs;
828	  tree *cond_equivalences = edge_info->cond_equivalences;
829
830	  if (lhs)
831	    record_equality (lhs, rhs);
832
833	  if (cond_equivalences)
834	    {
835	      for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
836		{
837		  tree expr = cond_equivalences[i];
838		  tree value = cond_equivalences[i + 1];
839
840		  record_cond (expr, value);
841		}
842	    }
843	}
844    }
845}
846
847/* Dump SSA statistics on FILE.  */
848
849void
850dump_dominator_optimization_stats (FILE *file)
851{
852  long n_exprs;
853
854  fprintf (file, "Total number of statements:                   %6ld\n\n",
855	   opt_stats.num_stmts);
856  fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
857           opt_stats.num_exprs_considered);
858
859  n_exprs = opt_stats.num_exprs_considered;
860  if (n_exprs == 0)
861    n_exprs = 1;
862
863  fprintf (file, "    Redundant expressions eliminated:         %6ld (%.0f%%)\n",
864	   opt_stats.num_re, PERCENT (opt_stats.num_re,
865				      n_exprs));
866  fprintf (file, "    Constants propagated:                     %6ld\n",
867	   opt_stats.num_const_prop);
868  fprintf (file, "    Copies propagated:                        %6ld\n",
869	   opt_stats.num_copy_prop);
870
871  fprintf (file, "\nHash table statistics:\n");
872
873  fprintf (file, "    avail_exprs: ");
874  htab_statistics (file, avail_exprs);
875}
876
877
878/* Dump SSA statistics on stderr.  */
879
880void
881debug_dominator_optimization_stats (void)
882{
883  dump_dominator_optimization_stats (stderr);
884}
885
886
887/* Dump statistics for the hash table HTAB.  */
888
889static void
890htab_statistics (FILE *file, htab_t htab)
891{
892  fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
893	   (long) htab_size (htab),
894	   (long) htab_elements (htab),
895	   htab_collisions (htab));
896}
897
898/* Enter a statement into the true/false expression hash table indicating
899   that the condition COND has the value VALUE.  */
900
901static void
902record_cond (tree cond, tree value)
903{
904  struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
905  void **slot;
906
907  initialize_hash_element (cond, value, element);
908
909  slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
910				   element->hash, INSERT);
911  if (*slot == NULL)
912    {
913      *slot = (void *) element;
914      VEC_safe_push (tree, heap, avail_exprs_stack, cond);
915    }
916  else
917    free (element);
918}
919
920/* Build a new conditional using NEW_CODE, OP0 and OP1 and store
921   the new conditional into *p, then store a boolean_true_node
922   into *(p + 1).  */
923
924static void
925build_and_record_new_cond (enum tree_code new_code, tree op0, tree op1, tree *p)
926{
927  *p = build2 (new_code, boolean_type_node, op0, op1);
928  p++;
929  *p = boolean_true_node;
930}
931
932/* Record that COND is true and INVERTED is false into the edge information
933   structure.  Also record that any conditions dominated by COND are true
934   as well.
935
936   For example, if a < b is true, then a <= b must also be true.  */
937
938static void
939record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
940{
941  tree op0, op1;
942
943  if (!COMPARISON_CLASS_P (cond))
944    return;
945
946  op0 = TREE_OPERAND (cond, 0);
947  op1 = TREE_OPERAND (cond, 1);
948
949  switch (TREE_CODE (cond))
950    {
951    case LT_EXPR:
952    case GT_EXPR:
953      if (FLOAT_TYPE_P (TREE_TYPE (op0)))
954	{
955	  edge_info->max_cond_equivalences = 12;
956	  edge_info->cond_equivalences = XNEWVEC (tree, 12);
957	  build_and_record_new_cond (ORDERED_EXPR, op0, op1,
958				     &edge_info->cond_equivalences[8]);
959	  build_and_record_new_cond (LTGT_EXPR, op0, op1,
960				     &edge_info->cond_equivalences[10]);
961	}
962      else
963	{
964	  edge_info->max_cond_equivalences = 8;
965	  edge_info->cond_equivalences = XNEWVEC (tree, 8);
966	}
967
968      build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
969				  ? LE_EXPR : GE_EXPR),
970				 op0, op1, &edge_info->cond_equivalences[4]);
971      build_and_record_new_cond (NE_EXPR, op0, op1,
972				 &edge_info->cond_equivalences[6]);
973      break;
974
975    case GE_EXPR:
976    case LE_EXPR:
977      if (FLOAT_TYPE_P (TREE_TYPE (op0)))
978	{
979	  edge_info->max_cond_equivalences = 6;
980	  edge_info->cond_equivalences = XNEWVEC (tree, 6);
981	  build_and_record_new_cond (ORDERED_EXPR, op0, op1,
982				     &edge_info->cond_equivalences[4]);
983	}
984      else
985	{
986	  edge_info->max_cond_equivalences = 4;
987	  edge_info->cond_equivalences = XNEWVEC (tree, 4);
988	}
989      break;
990
991    case EQ_EXPR:
992      if (FLOAT_TYPE_P (TREE_TYPE (op0)))
993	{
994	  edge_info->max_cond_equivalences = 10;
995	  edge_info->cond_equivalences = XNEWVEC (tree, 10);
996	  build_and_record_new_cond (ORDERED_EXPR, op0, op1,
997				     &edge_info->cond_equivalences[8]);
998	}
999      else
1000	{
1001	  edge_info->max_cond_equivalences = 8;
1002	  edge_info->cond_equivalences = XNEWVEC (tree, 8);
1003	}
1004      build_and_record_new_cond (LE_EXPR, op0, op1,
1005				 &edge_info->cond_equivalences[4]);
1006      build_and_record_new_cond (GE_EXPR, op0, op1,
1007				 &edge_info->cond_equivalences[6]);
1008      break;
1009
1010    case UNORDERED_EXPR:
1011      edge_info->max_cond_equivalences = 16;
1012      edge_info->cond_equivalences = XNEWVEC (tree, 16);
1013      build_and_record_new_cond (NE_EXPR, op0, op1,
1014				 &edge_info->cond_equivalences[4]);
1015      build_and_record_new_cond (UNLE_EXPR, op0, op1,
1016				 &edge_info->cond_equivalences[6]);
1017      build_and_record_new_cond (UNGE_EXPR, op0, op1,
1018				 &edge_info->cond_equivalences[8]);
1019      build_and_record_new_cond (UNEQ_EXPR, op0, op1,
1020				 &edge_info->cond_equivalences[10]);
1021      build_and_record_new_cond (UNLT_EXPR, op0, op1,
1022				 &edge_info->cond_equivalences[12]);
1023      build_and_record_new_cond (UNGT_EXPR, op0, op1,
1024				 &edge_info->cond_equivalences[14]);
1025      break;
1026
1027    case UNLT_EXPR:
1028    case UNGT_EXPR:
1029      edge_info->max_cond_equivalences = 8;
1030      edge_info->cond_equivalences = XNEWVEC (tree, 8);
1031      build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1032				  ? UNLE_EXPR : UNGE_EXPR),
1033				 op0, op1, &edge_info->cond_equivalences[4]);
1034      build_and_record_new_cond (NE_EXPR, op0, op1,
1035				 &edge_info->cond_equivalences[6]);
1036      break;
1037
1038    case UNEQ_EXPR:
1039      edge_info->max_cond_equivalences = 8;
1040      edge_info->cond_equivalences = XNEWVEC (tree, 8);
1041      build_and_record_new_cond (UNLE_EXPR, op0, op1,
1042				 &edge_info->cond_equivalences[4]);
1043      build_and_record_new_cond (UNGE_EXPR, op0, op1,
1044				 &edge_info->cond_equivalences[6]);
1045      break;
1046
1047    case LTGT_EXPR:
1048      edge_info->max_cond_equivalences = 8;
1049      edge_info->cond_equivalences = XNEWVEC (tree, 8);
1050      build_and_record_new_cond (NE_EXPR, op0, op1,
1051				 &edge_info->cond_equivalences[4]);
1052      build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1053				 &edge_info->cond_equivalences[6]);
1054      break;
1055
1056    default:
1057      edge_info->max_cond_equivalences = 4;
1058      edge_info->cond_equivalences = XNEWVEC (tree, 4);
1059      break;
1060    }
1061
1062  /* Now store the original true and false conditions into the first
1063     two slots.  */
1064  edge_info->cond_equivalences[0] = cond;
1065  edge_info->cond_equivalences[1] = boolean_true_node;
1066  edge_info->cond_equivalences[2] = inverted;
1067  edge_info->cond_equivalences[3] = boolean_false_node;
1068}
1069
1070/* A helper function for record_const_or_copy and record_equality.
1071   Do the work of recording the value and undo info.  */
1072
1073static void
1074record_const_or_copy_1 (tree x, tree y, tree prev_x)
1075{
1076  SSA_NAME_VALUE (x) = y;
1077
1078  VEC_reserve (tree, heap, const_and_copies_stack, 2);
1079  VEC_quick_push (tree, const_and_copies_stack, prev_x);
1080  VEC_quick_push (tree, const_and_copies_stack, x);
1081}
1082
1083
1084/* Return the loop depth of the basic block of the defining statement of X.
1085   This number should not be treated as absolutely correct because the loop
1086   information may not be completely up-to-date when dom runs.  However, it
1087   will be relatively correct, and as more passes are taught to keep loop info
1088   up to date, the result will become more and more accurate.  */
1089
1090int
1091loop_depth_of_name (tree x)
1092{
1093  tree defstmt;
1094  basic_block defbb;
1095
1096  /* If it's not an SSA_NAME, we have no clue where the definition is.  */
1097  if (TREE_CODE (x) != SSA_NAME)
1098    return 0;
1099
1100  /* Otherwise return the loop depth of the defining statement's bb.
1101     Note that there may not actually be a bb for this statement, if the
1102     ssa_name is live on entry.  */
1103  defstmt = SSA_NAME_DEF_STMT (x);
1104  defbb = bb_for_stmt (defstmt);
1105  if (!defbb)
1106    return 0;
1107
1108  return defbb->loop_depth;
1109}
1110
1111
1112/* Record that X is equal to Y in const_and_copies.  Record undo
1113   information in the block-local vector.  */
1114
1115static void
1116record_const_or_copy (tree x, tree y)
1117{
1118  tree prev_x = SSA_NAME_VALUE (x);
1119
1120  if (TREE_CODE (y) == SSA_NAME)
1121    {
1122      tree tmp = SSA_NAME_VALUE (y);
1123      if (tmp)
1124	y = tmp;
1125    }
1126
1127  record_const_or_copy_1 (x, y, prev_x);
1128}
1129
1130/* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1131   This constrains the cases in which we may treat this as assignment.  */
1132
1133static void
1134record_equality (tree x, tree y)
1135{
1136  tree prev_x = NULL, prev_y = NULL;
1137
1138  if (TREE_CODE (x) == SSA_NAME)
1139    prev_x = SSA_NAME_VALUE (x);
1140  if (TREE_CODE (y) == SSA_NAME)
1141    prev_y = SSA_NAME_VALUE (y);
1142
1143  /* If one of the previous values is invariant, or invariant in more loops
1144     (by depth), then use that.
1145     Otherwise it doesn't matter which value we choose, just so
1146     long as we canonicalize on one value.  */
1147  if (TREE_INVARIANT (y))
1148    ;
1149  else if (TREE_INVARIANT (x) || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
1150    prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1151  else if (prev_x && TREE_INVARIANT (prev_x))
1152    x = y, y = prev_x, prev_x = prev_y;
1153  else if (prev_y && TREE_CODE (prev_y) != VALUE_HANDLE)
1154    y = prev_y;
1155
1156  /* After the swapping, we must have one SSA_NAME.  */
1157  if (TREE_CODE (x) != SSA_NAME)
1158    return;
1159
1160  /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1161     variable compared against zero.  If we're honoring signed zeros,
1162     then we cannot record this value unless we know that the value is
1163     nonzero.  */
1164  if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1165      && (TREE_CODE (y) != REAL_CST
1166	  || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1167    return;
1168
1169  record_const_or_copy_1 (x, y, prev_x);
1170}
1171
1172/* Returns true when STMT is a simple iv increment.  It detects the
1173   following situation:
1174
1175   i_1 = phi (..., i_2)
1176   i_2 = i_1 +/- ...  */
1177
1178static bool
1179simple_iv_increment_p (tree stmt)
1180{
1181  tree lhs, rhs, preinc, phi;
1182  unsigned i;
1183
1184  if (TREE_CODE (stmt) != MODIFY_EXPR)
1185    return false;
1186
1187  lhs = TREE_OPERAND (stmt, 0);
1188  if (TREE_CODE (lhs) != SSA_NAME)
1189    return false;
1190
1191  rhs = TREE_OPERAND (stmt, 1);
1192
1193  if (TREE_CODE (rhs) != PLUS_EXPR
1194      && TREE_CODE (rhs) != MINUS_EXPR)
1195    return false;
1196
1197  preinc = TREE_OPERAND (rhs, 0);
1198  if (TREE_CODE (preinc) != SSA_NAME)
1199    return false;
1200
1201  phi = SSA_NAME_DEF_STMT (preinc);
1202  if (TREE_CODE (phi) != PHI_NODE)
1203    return false;
1204
1205  for (i = 0; i < (unsigned) PHI_NUM_ARGS (phi); i++)
1206    if (PHI_ARG_DEF (phi, i) == lhs)
1207      return true;
1208
1209  return false;
1210}
1211
1212/* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
1213   known value for that SSA_NAME (or NULL if no value is known).
1214
1215   Propagate values from CONST_AND_COPIES into the PHI nodes of the
1216   successors of BB.  */
1217
1218static void
1219cprop_into_successor_phis (basic_block bb)
1220{
1221  edge e;
1222  edge_iterator ei;
1223
1224  FOR_EACH_EDGE (e, ei, bb->succs)
1225    {
1226      tree phi;
1227      int indx;
1228
1229      /* If this is an abnormal edge, then we do not want to copy propagate
1230	 into the PHI alternative associated with this edge.  */
1231      if (e->flags & EDGE_ABNORMAL)
1232	continue;
1233
1234      phi = phi_nodes (e->dest);
1235      if (! phi)
1236	continue;
1237
1238      indx = e->dest_idx;
1239      for ( ; phi; phi = PHI_CHAIN (phi))
1240	{
1241	  tree new;
1242	  use_operand_p orig_p;
1243	  tree orig;
1244
1245	  /* The alternative may be associated with a constant, so verify
1246	     it is an SSA_NAME before doing anything with it.  */
1247	  orig_p = PHI_ARG_DEF_PTR (phi, indx);
1248	  orig = USE_FROM_PTR (orig_p);
1249	  if (TREE_CODE (orig) != SSA_NAME)
1250	    continue;
1251
1252	  /* If we have *ORIG_P in our constant/copy table, then replace
1253	     ORIG_P with its value in our constant/copy table.  */
1254	  new = SSA_NAME_VALUE (orig);
1255	  if (new
1256	      && new != orig
1257	      && (TREE_CODE (new) == SSA_NAME
1258		  || is_gimple_min_invariant (new))
1259	      && may_propagate_copy (orig, new))
1260	    propagate_value (orig_p, new);
1261	}
1262    }
1263}
1264
1265/* We have finished optimizing BB, record any information implied by
1266   taking a specific outgoing edge from BB.  */
1267
1268static void
1269record_edge_info (basic_block bb)
1270{
1271  block_stmt_iterator bsi = bsi_last (bb);
1272  struct edge_info *edge_info;
1273
1274  if (! bsi_end_p (bsi))
1275    {
1276      tree stmt = bsi_stmt (bsi);
1277
1278      if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1279	{
1280	  tree cond = SWITCH_COND (stmt);
1281
1282	  if (TREE_CODE (cond) == SSA_NAME)
1283	    {
1284	      tree labels = SWITCH_LABELS (stmt);
1285	      int i, n_labels = TREE_VEC_LENGTH (labels);
1286	      tree *info = XCNEWVEC (tree, last_basic_block);
1287	      edge e;
1288	      edge_iterator ei;
1289
1290	      for (i = 0; i < n_labels; i++)
1291		{
1292		  tree label = TREE_VEC_ELT (labels, i);
1293		  basic_block target_bb = label_to_block (CASE_LABEL (label));
1294
1295		  if (CASE_HIGH (label)
1296		      || !CASE_LOW (label)
1297		      || info[target_bb->index])
1298		    info[target_bb->index] = error_mark_node;
1299		  else
1300		    info[target_bb->index] = label;
1301		}
1302
1303	      FOR_EACH_EDGE (e, ei, bb->succs)
1304		{
1305		  basic_block target_bb = e->dest;
1306		  tree node = info[target_bb->index];
1307
1308		  if (node != NULL && node != error_mark_node)
1309		    {
1310		      tree x = fold_convert (TREE_TYPE (cond), CASE_LOW (node));
1311		      edge_info = allocate_edge_info (e);
1312		      edge_info->lhs = cond;
1313		      edge_info->rhs = x;
1314		    }
1315		}
1316	      free (info);
1317	    }
1318	}
1319
1320      /* A COND_EXPR may create equivalences too.  */
1321      if (stmt && TREE_CODE (stmt) == COND_EXPR)
1322	{
1323	  tree cond = COND_EXPR_COND (stmt);
1324	  edge true_edge;
1325	  edge false_edge;
1326
1327	  extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1328
1329	  /* If the conditional is a single variable 'X', record 'X = 1'
1330	     for the true edge and 'X = 0' on the false edge.  */
1331	  if (SSA_VAR_P (cond))
1332	    {
1333	      struct edge_info *edge_info;
1334
1335	      edge_info = allocate_edge_info (true_edge);
1336	      edge_info->lhs = cond;
1337	      edge_info->rhs = constant_boolean_node (1, TREE_TYPE (cond));
1338
1339	      edge_info = allocate_edge_info (false_edge);
1340	      edge_info->lhs = cond;
1341	      edge_info->rhs = constant_boolean_node (0, TREE_TYPE (cond));
1342	    }
1343	  /* Equality tests may create one or two equivalences.  */
1344	  else if (COMPARISON_CLASS_P (cond))
1345	    {
1346	      tree op0 = TREE_OPERAND (cond, 0);
1347	      tree op1 = TREE_OPERAND (cond, 1);
1348
1349	      /* Special case comparing booleans against a constant as we
1350		 know the value of OP0 on both arms of the branch.  i.e., we
1351		 can record an equivalence for OP0 rather than COND.  */
1352	      if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1353		  && TREE_CODE (op0) == SSA_NAME
1354		  && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1355		  && is_gimple_min_invariant (op1))
1356		{
1357		  if (TREE_CODE (cond) == EQ_EXPR)
1358		    {
1359		      edge_info = allocate_edge_info (true_edge);
1360		      edge_info->lhs = op0;
1361		      edge_info->rhs = (integer_zerop (op1)
1362					    ? boolean_false_node
1363					    : boolean_true_node);
1364
1365		      edge_info = allocate_edge_info (false_edge);
1366		      edge_info->lhs = op0;
1367		      edge_info->rhs = (integer_zerop (op1)
1368					    ? boolean_true_node
1369					    : boolean_false_node);
1370		    }
1371		  else
1372		    {
1373		      edge_info = allocate_edge_info (true_edge);
1374		      edge_info->lhs = op0;
1375		      edge_info->rhs = (integer_zerop (op1)
1376					    ? boolean_true_node
1377					    : boolean_false_node);
1378
1379		      edge_info = allocate_edge_info (false_edge);
1380		      edge_info->lhs = op0;
1381		      edge_info->rhs = (integer_zerop (op1)
1382					    ? boolean_false_node
1383					    : boolean_true_node);
1384		    }
1385		}
1386
1387	      else if (is_gimple_min_invariant (op0)
1388		       && (TREE_CODE (op1) == SSA_NAME
1389			   || is_gimple_min_invariant (op1)))
1390		{
1391		  tree inverted = invert_truthvalue (cond);
1392		  struct edge_info *edge_info;
1393
1394		  edge_info = allocate_edge_info (true_edge);
1395		  record_conditions (edge_info, cond, inverted);
1396
1397		  if (TREE_CODE (cond) == EQ_EXPR)
1398		    {
1399		      edge_info->lhs = op1;
1400		      edge_info->rhs = op0;
1401		    }
1402
1403		  edge_info = allocate_edge_info (false_edge);
1404		  record_conditions (edge_info, inverted, cond);
1405
1406		  if (TREE_CODE (cond) == NE_EXPR)
1407		    {
1408		      edge_info->lhs = op1;
1409		      edge_info->rhs = op0;
1410		    }
1411		}
1412
1413	      else if (TREE_CODE (op0) == SSA_NAME
1414		       && (is_gimple_min_invariant (op1)
1415			   || TREE_CODE (op1) == SSA_NAME))
1416		{
1417		  tree inverted = invert_truthvalue (cond);
1418		  struct edge_info *edge_info;
1419
1420		  edge_info = allocate_edge_info (true_edge);
1421		  record_conditions (edge_info, cond, inverted);
1422
1423		  if (TREE_CODE (cond) == EQ_EXPR)
1424		    {
1425		      edge_info->lhs = op0;
1426		      edge_info->rhs = op1;
1427		    }
1428
1429		  edge_info = allocate_edge_info (false_edge);
1430		  record_conditions (edge_info, inverted, cond);
1431
1432		  if (TREE_CODE (cond) == NE_EXPR)
1433		    {
1434		      edge_info->lhs = op0;
1435		      edge_info->rhs = op1;
1436		    }
1437		}
1438	    }
1439
1440	  /* ??? TRUTH_NOT_EXPR can create an equivalence too.  */
1441	}
1442    }
1443}
1444
1445/* Propagate information from BB to its outgoing edges.
1446
1447   This can include equivalency information implied by control statements
1448   at the end of BB and const/copy propagation into PHIs in BB's
1449   successor blocks.  */
1450
1451static void
1452propagate_to_outgoing_edges (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
1453			     basic_block bb)
1454{
1455  record_edge_info (bb);
1456  cprop_into_successor_phis (bb);
1457}
1458
1459/* Search for redundant computations in STMT.  If any are found, then
1460   replace them with the variable holding the result of the computation.
1461
1462   If safe, record this expression into the available expression hash
1463   table.  */
1464
1465static bool
1466eliminate_redundant_computations (tree stmt)
1467{
1468  tree *expr_p, def = NULL_TREE;
1469  bool insert = true;
1470  tree cached_lhs;
1471  bool retval = false;
1472  bool modify_expr_p = false;
1473
1474  if (TREE_CODE (stmt) == MODIFY_EXPR)
1475    def = TREE_OPERAND (stmt, 0);
1476
1477  /* Certain expressions on the RHS can be optimized away, but can not
1478     themselves be entered into the hash tables.  */
1479  if (! def
1480      || TREE_CODE (def) != SSA_NAME
1481      || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
1482      || !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF)
1483      /* Do not record equivalences for increments of ivs.  This would create
1484	 overlapping live ranges for a very questionable gain.  */
1485      || simple_iv_increment_p (stmt))
1486    insert = false;
1487
1488  /* Check if the expression has been computed before.  */
1489  cached_lhs = lookup_avail_expr (stmt, insert);
1490
1491  opt_stats.num_exprs_considered++;
1492
1493  /* Get a pointer to the expression we are trying to optimize.  */
1494  if (TREE_CODE (stmt) == COND_EXPR)
1495    expr_p = &COND_EXPR_COND (stmt);
1496  else if (TREE_CODE (stmt) == SWITCH_EXPR)
1497    expr_p = &SWITCH_COND (stmt);
1498  else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0))
1499    {
1500      expr_p = &TREE_OPERAND (TREE_OPERAND (stmt, 0), 1);
1501      modify_expr_p = true;
1502    }
1503  else
1504    {
1505      expr_p = &TREE_OPERAND (stmt, 1);
1506      modify_expr_p = true;
1507    }
1508
1509  /* It is safe to ignore types here since we have already done
1510     type checking in the hashing and equality routines.  In fact
1511     type checking here merely gets in the way of constant
1512     propagation.  Also, make sure that it is safe to propagate
1513     CACHED_LHS into *EXPR_P.  */
1514  if (cached_lhs
1515      && ((TREE_CODE (cached_lhs) != SSA_NAME
1516	   && (modify_expr_p
1517	       || tree_ssa_useless_type_conversion_1 (TREE_TYPE (*expr_p),
1518						      TREE_TYPE (cached_lhs))))
1519	  || may_propagate_copy (*expr_p, cached_lhs)))
1520    {
1521      if (dump_file && (dump_flags & TDF_DETAILS))
1522	{
1523	  fprintf (dump_file, "  Replaced redundant expr '");
1524	  print_generic_expr (dump_file, *expr_p, dump_flags);
1525	  fprintf (dump_file, "' with '");
1526	  print_generic_expr (dump_file, cached_lhs, dump_flags);
1527	   fprintf (dump_file, "'\n");
1528	}
1529
1530      opt_stats.num_re++;
1531
1532#if defined ENABLE_CHECKING
1533      gcc_assert (TREE_CODE (cached_lhs) == SSA_NAME
1534		  || is_gimple_min_invariant (cached_lhs));
1535#endif
1536
1537      if (TREE_CODE (cached_lhs) == ADDR_EXPR
1538	  || (POINTER_TYPE_P (TREE_TYPE (*expr_p))
1539	      && is_gimple_min_invariant (cached_lhs)))
1540	retval = true;
1541
1542      if (modify_expr_p
1543	  && !tree_ssa_useless_type_conversion_1 (TREE_TYPE (*expr_p),
1544						  TREE_TYPE (cached_lhs)))
1545	cached_lhs = fold_convert (TREE_TYPE (*expr_p), cached_lhs);
1546
1547      propagate_tree_value (expr_p, cached_lhs);
1548      mark_stmt_modified (stmt);
1549    }
1550  return retval;
1551}
1552
1553/* STMT, a MODIFY_EXPR, may create certain equivalences, in either
1554   the available expressions table or the const_and_copies table.
1555   Detect and record those equivalences.  */
1556
1557static void
1558record_equivalences_from_stmt (tree stmt,
1559			       int may_optimize_p,
1560			       stmt_ann_t ann)
1561{
1562  tree lhs = TREE_OPERAND (stmt, 0);
1563  enum tree_code lhs_code = TREE_CODE (lhs);
1564
1565  if (lhs_code == SSA_NAME)
1566    {
1567      tree rhs = TREE_OPERAND (stmt, 1);
1568
1569      /* Strip away any useless type conversions.  */
1570      STRIP_USELESS_TYPE_CONVERSION (rhs);
1571
1572      /* If the RHS of the assignment is a constant or another variable that
1573	 may be propagated, register it in the CONST_AND_COPIES table.  We
1574	 do not need to record unwind data for this, since this is a true
1575	 assignment and not an equivalence inferred from a comparison.  All
1576	 uses of this ssa name are dominated by this assignment, so unwinding
1577	 just costs time and space.  */
1578      if (may_optimize_p
1579	  && (TREE_CODE (rhs) == SSA_NAME
1580	      || is_gimple_min_invariant (rhs)))
1581	SSA_NAME_VALUE (lhs) = rhs;
1582    }
1583
1584  /* A memory store, even an aliased store, creates a useful
1585     equivalence.  By exchanging the LHS and RHS, creating suitable
1586     vops and recording the result in the available expression table,
1587     we may be able to expose more redundant loads.  */
1588  if (!ann->has_volatile_ops
1589      && (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME
1590	  || is_gimple_min_invariant (TREE_OPERAND (stmt, 1)))
1591      && !is_gimple_reg (lhs))
1592    {
1593      tree rhs = TREE_OPERAND (stmt, 1);
1594      tree new;
1595
1596      /* FIXME: If the LHS of the assignment is a bitfield and the RHS
1597         is a constant, we need to adjust the constant to fit into the
1598         type of the LHS.  If the LHS is a bitfield and the RHS is not
1599	 a constant, then we can not record any equivalences for this
1600	 statement since we would need to represent the widening or
1601	 narrowing of RHS.  This fixes gcc.c-torture/execute/921016-1.c
1602	 and should not be necessary if GCC represented bitfields
1603	 properly.  */
1604      if (lhs_code == COMPONENT_REF
1605	  && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1)))
1606	{
1607	  if (TREE_CONSTANT (rhs))
1608	    rhs = widen_bitfield (rhs, TREE_OPERAND (lhs, 1), lhs);
1609	  else
1610	    rhs = NULL;
1611
1612	  /* If the value overflowed, then we can not use this equivalence.  */
1613	  if (rhs && ! is_gimple_min_invariant (rhs))
1614	    rhs = NULL;
1615	}
1616
1617      if (rhs)
1618	{
1619	  /* Build a new statement with the RHS and LHS exchanged.  */
1620	  new = build2 (MODIFY_EXPR, TREE_TYPE (stmt), rhs, lhs);
1621
1622	  create_ssa_artficial_load_stmt (new, stmt);
1623
1624	  /* Finally enter the statement into the available expression
1625	     table.  */
1626	  lookup_avail_expr (new, true);
1627	}
1628    }
1629}
1630
1631/* Replace *OP_P in STMT with any known equivalent value for *OP_P from
1632   CONST_AND_COPIES.  */
1633
1634static bool
1635cprop_operand (tree stmt, use_operand_p op_p)
1636{
1637  bool may_have_exposed_new_symbols = false;
1638  tree val;
1639  tree op = USE_FROM_PTR (op_p);
1640
1641  /* If the operand has a known constant value or it is known to be a
1642     copy of some other variable, use the value or copy stored in
1643     CONST_AND_COPIES.  */
1644  val = SSA_NAME_VALUE (op);
1645  if (val && val != op && TREE_CODE (val) != VALUE_HANDLE)
1646    {
1647      tree op_type, val_type;
1648
1649      /* Do not change the base variable in the virtual operand
1650	 tables.  That would make it impossible to reconstruct
1651	 the renamed virtual operand if we later modify this
1652	 statement.  Also only allow the new value to be an SSA_NAME
1653	 for propagation into virtual operands.  */
1654      if (!is_gimple_reg (op)
1655	  && (TREE_CODE (val) != SSA_NAME
1656	      || is_gimple_reg (val)
1657	      || get_virtual_var (val) != get_virtual_var (op)))
1658	return false;
1659
1660      /* Do not replace hard register operands in asm statements.  */
1661      if (TREE_CODE (stmt) == ASM_EXPR
1662	  && !may_propagate_copy_into_asm (op))
1663	return false;
1664
1665      /* Get the toplevel type of each operand.  */
1666      op_type = TREE_TYPE (op);
1667      val_type = TREE_TYPE (val);
1668
1669      /* While both types are pointers, get the type of the object
1670	 pointed to.  */
1671      while (POINTER_TYPE_P (op_type) && POINTER_TYPE_P (val_type))
1672	{
1673	  op_type = TREE_TYPE (op_type);
1674	  val_type = TREE_TYPE (val_type);
1675	}
1676
1677      /* Make sure underlying types match before propagating a constant by
1678	 converting the constant to the proper type.  Note that convert may
1679	 return a non-gimple expression, in which case we ignore this
1680	 propagation opportunity.  */
1681      if (TREE_CODE (val) != SSA_NAME)
1682	{
1683	  if (!lang_hooks.types_compatible_p (op_type, val_type))
1684	    {
1685	      val = fold_convert (TREE_TYPE (op), val);
1686	      if (!is_gimple_min_invariant (val))
1687		return false;
1688	    }
1689	}
1690
1691      /* Certain operands are not allowed to be copy propagated due
1692	 to their interaction with exception handling and some GCC
1693	 extensions.  */
1694      else if (!may_propagate_copy (op, val))
1695	return false;
1696
1697      /* Do not propagate copies if the propagated value is at a deeper loop
1698	 depth than the propagatee.  Otherwise, this may move loop variant
1699	 variables outside of their loops and prevent coalescing
1700	 opportunities.  If the value was loop invariant, it will be hoisted
1701	 by LICM and exposed for copy propagation.  */
1702      if (loop_depth_of_name (val) > loop_depth_of_name (op))
1703	return false;
1704
1705      /* Dump details.  */
1706      if (dump_file && (dump_flags & TDF_DETAILS))
1707	{
1708	  fprintf (dump_file, "  Replaced '");
1709	  print_generic_expr (dump_file, op, dump_flags);
1710	  fprintf (dump_file, "' with %s '",
1711		   (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
1712	  print_generic_expr (dump_file, val, dump_flags);
1713	  fprintf (dump_file, "'\n");
1714	}
1715
1716      /* If VAL is an ADDR_EXPR or a constant of pointer type, note
1717	 that we may have exposed a new symbol for SSA renaming.  */
1718      if (TREE_CODE (val) == ADDR_EXPR
1719	  || (POINTER_TYPE_P (TREE_TYPE (op))
1720	      && is_gimple_min_invariant (val)))
1721	may_have_exposed_new_symbols = true;
1722
1723      if (TREE_CODE (val) != SSA_NAME)
1724	opt_stats.num_const_prop++;
1725      else
1726	opt_stats.num_copy_prop++;
1727
1728      propagate_value (op_p, val);
1729
1730      /* And note that we modified this statement.  This is now
1731	 safe, even if we changed virtual operands since we will
1732	 rescan the statement and rewrite its operands again.  */
1733      mark_stmt_modified (stmt);
1734    }
1735  return may_have_exposed_new_symbols;
1736}
1737
1738/* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
1739   known value for that SSA_NAME (or NULL if no value is known).
1740
1741   Propagate values from CONST_AND_COPIES into the uses, vuses and
1742   v_may_def_ops of STMT.  */
1743
1744static bool
1745cprop_into_stmt (tree stmt)
1746{
1747  bool may_have_exposed_new_symbols = false;
1748  use_operand_p op_p;
1749  ssa_op_iter iter;
1750
1751  FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
1752    {
1753      if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
1754	may_have_exposed_new_symbols |= cprop_operand (stmt, op_p);
1755    }
1756
1757  return may_have_exposed_new_symbols;
1758}
1759
1760
1761/* Optimize the statement pointed to by iterator SI.
1762
1763   We try to perform some simplistic global redundancy elimination and
1764   constant propagation:
1765
1766   1- To detect global redundancy, we keep track of expressions that have
1767      been computed in this block and its dominators.  If we find that the
1768      same expression is computed more than once, we eliminate repeated
1769      computations by using the target of the first one.
1770
1771   2- Constant values and copy assignments.  This is used to do very
1772      simplistic constant and copy propagation.  When a constant or copy
1773      assignment is found, we map the value on the RHS of the assignment to
1774      the variable in the LHS in the CONST_AND_COPIES table.  */
1775
1776static void
1777optimize_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
1778	       basic_block bb, block_stmt_iterator si)
1779{
1780  stmt_ann_t ann;
1781  tree stmt, old_stmt;
1782  bool may_optimize_p;
1783  bool may_have_exposed_new_symbols = false;
1784
1785  old_stmt = stmt = bsi_stmt (si);
1786
1787  if (TREE_CODE (stmt) == COND_EXPR)
1788    canonicalize_comparison (stmt);
1789
1790  update_stmt_if_modified (stmt);
1791  ann = stmt_ann (stmt);
1792  opt_stats.num_stmts++;
1793  may_have_exposed_new_symbols = false;
1794
1795  if (dump_file && (dump_flags & TDF_DETAILS))
1796    {
1797      fprintf (dump_file, "Optimizing statement ");
1798      print_generic_stmt (dump_file, stmt, TDF_SLIM);
1799    }
1800
1801  /* Const/copy propagate into USES, VUSES and the RHS of V_MAY_DEFs.  */
1802  may_have_exposed_new_symbols = cprop_into_stmt (stmt);
1803
1804  /* If the statement has been modified with constant replacements,
1805     fold its RHS before checking for redundant computations.  */
1806  if (ann->modified)
1807    {
1808      tree rhs;
1809
1810      /* Try to fold the statement making sure that STMT is kept
1811	 up to date.  */
1812      if (fold_stmt (bsi_stmt_ptr (si)))
1813	{
1814	  stmt = bsi_stmt (si);
1815	  ann = stmt_ann (stmt);
1816
1817	  if (dump_file && (dump_flags & TDF_DETAILS))
1818	    {
1819	      fprintf (dump_file, "  Folded to: ");
1820	      print_generic_stmt (dump_file, stmt, TDF_SLIM);
1821	    }
1822	}
1823
1824      rhs = get_rhs (stmt);
1825      if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
1826	recompute_tree_invariant_for_addr_expr (rhs);
1827
1828      /* Constant/copy propagation above may change the set of
1829	 virtual operands associated with this statement.  Folding
1830	 may remove the need for some virtual operands.
1831
1832	 Indicate we will need to rescan and rewrite the statement.  */
1833      may_have_exposed_new_symbols = true;
1834    }
1835
1836  /* Check for redundant computations.  Do this optimization only
1837     for assignments that have no volatile ops and conditionals.  */
1838  may_optimize_p = (!ann->has_volatile_ops
1839		    && ((TREE_CODE (stmt) == RETURN_EXPR
1840			 && TREE_OPERAND (stmt, 0)
1841			 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR
1842			 && ! (TREE_SIDE_EFFECTS
1843			       (TREE_OPERAND (TREE_OPERAND (stmt, 0), 1))))
1844			|| (TREE_CODE (stmt) == MODIFY_EXPR
1845			    && ! TREE_SIDE_EFFECTS (TREE_OPERAND (stmt, 1)))
1846			|| TREE_CODE (stmt) == COND_EXPR
1847			|| TREE_CODE (stmt) == SWITCH_EXPR));
1848
1849  if (may_optimize_p)
1850    may_have_exposed_new_symbols |= eliminate_redundant_computations (stmt);
1851
1852  /* Record any additional equivalences created by this statement.  */
1853  if (TREE_CODE (stmt) == MODIFY_EXPR)
1854    record_equivalences_from_stmt (stmt,
1855				   may_optimize_p,
1856				   ann);
1857
1858  /* If STMT is a COND_EXPR and it was modified, then we may know
1859     where it goes.  If that is the case, then mark the CFG as altered.
1860
1861     This will cause us to later call remove_unreachable_blocks and
1862     cleanup_tree_cfg when it is safe to do so.  It is not safe to
1863     clean things up here since removal of edges and such can trigger
1864     the removal of PHI nodes, which in turn can release SSA_NAMEs to
1865     the manager.
1866
1867     That's all fine and good, except that once SSA_NAMEs are released
1868     to the manager, we must not call create_ssa_name until all references
1869     to released SSA_NAMEs have been eliminated.
1870
1871     All references to the deleted SSA_NAMEs can not be eliminated until
1872     we remove unreachable blocks.
1873
1874     We can not remove unreachable blocks until after we have completed
1875     any queued jump threading.
1876
1877     We can not complete any queued jump threads until we have taken
1878     appropriate variables out of SSA form.  Taking variables out of
1879     SSA form can call create_ssa_name and thus we lose.
1880
1881     Ultimately I suspect we're going to need to change the interface
1882     into the SSA_NAME manager.  */
1883
1884  if (ann->modified)
1885    {
1886      tree val = NULL;
1887
1888      if (TREE_CODE (stmt) == COND_EXPR)
1889	val = COND_EXPR_COND (stmt);
1890      else if (TREE_CODE (stmt) == SWITCH_EXPR)
1891	val = SWITCH_COND (stmt);
1892
1893      if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
1894	cfg_altered = true;
1895
1896      /* If we simplified a statement in such a way as to be shown that it
1897	 cannot trap, update the eh information and the cfg to match.  */
1898      if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
1899	{
1900	  bitmap_set_bit (need_eh_cleanup, bb->index);
1901	  if (dump_file && (dump_flags & TDF_DETAILS))
1902	    fprintf (dump_file, "  Flagged to clear EH edges.\n");
1903	}
1904    }
1905
1906  if (may_have_exposed_new_symbols)
1907    VEC_safe_push (tree, heap, stmts_to_rescan, bsi_stmt (si));
1908}
1909
1910/* Search for an existing instance of STMT in the AVAIL_EXPRS table.  If
1911   found, return its LHS. Otherwise insert STMT in the table and return
1912   NULL_TREE.
1913
1914   Also, when an expression is first inserted in the AVAIL_EXPRS table, it
1915   is also added to the stack pointed to by BLOCK_AVAIL_EXPRS_P, so that they
1916   can be removed when we finish processing this block and its children.
1917
1918   NOTE: This function assumes that STMT is a MODIFY_EXPR node that
1919   contains no CALL_EXPR on its RHS and makes no volatile nor
1920   aliased references.  */
1921
1922static tree
1923lookup_avail_expr (tree stmt, bool insert)
1924{
1925  void **slot;
1926  tree lhs;
1927  tree temp;
1928  struct expr_hash_elt *element = XNEW (struct expr_hash_elt);
1929
1930  lhs = TREE_CODE (stmt) == MODIFY_EXPR ? TREE_OPERAND (stmt, 0) : NULL;
1931
1932  initialize_hash_element (stmt, lhs, element);
1933
1934  /* Don't bother remembering constant assignments and copy operations.
1935     Constants and copy operations are handled by the constant/copy propagator
1936     in optimize_stmt.  */
1937  if (TREE_CODE (element->rhs) == SSA_NAME
1938      || is_gimple_min_invariant (element->rhs))
1939    {
1940      free (element);
1941      return NULL_TREE;
1942    }
1943
1944  /* Finally try to find the expression in the main expression hash table.  */
1945  slot = htab_find_slot_with_hash (avail_exprs, element, element->hash,
1946				   (insert ? INSERT : NO_INSERT));
1947  if (slot == NULL)
1948    {
1949      free (element);
1950      return NULL_TREE;
1951    }
1952
1953  if (*slot == NULL)
1954    {
1955      *slot = (void *) element;
1956      VEC_safe_push (tree, heap, avail_exprs_stack,
1957		     stmt ? stmt : element->rhs);
1958      return NULL_TREE;
1959    }
1960
1961  /* Extract the LHS of the assignment so that it can be used as the current
1962     definition of another variable.  */
1963  lhs = ((struct expr_hash_elt *)*slot)->lhs;
1964
1965  /* See if the LHS appears in the CONST_AND_COPIES table.  If it does, then
1966     use the value from the const_and_copies table.  */
1967  if (TREE_CODE (lhs) == SSA_NAME)
1968    {
1969      temp = SSA_NAME_VALUE (lhs);
1970      if (temp && TREE_CODE (temp) != VALUE_HANDLE)
1971	lhs = temp;
1972    }
1973
1974  free (element);
1975  return lhs;
1976}
1977
1978/* Hashing and equality functions for AVAIL_EXPRS.  The table stores
1979   MODIFY_EXPR statements.  We compute a value number for expressions using
1980   the code of the expression and the SSA numbers of its operands.  */
1981
1982static hashval_t
1983avail_expr_hash (const void *p)
1984{
1985  tree stmt = ((struct expr_hash_elt *)p)->stmt;
1986  tree rhs = ((struct expr_hash_elt *)p)->rhs;
1987  tree vuse;
1988  ssa_op_iter iter;
1989  hashval_t val = 0;
1990
1991  /* iterative_hash_expr knows how to deal with any expression and
1992     deals with commutative operators as well, so just use it instead
1993     of duplicating such complexities here.  */
1994  val = iterative_hash_expr (rhs, val);
1995
1996  /* If the hash table entry is not associated with a statement, then we
1997     can just hash the expression and not worry about virtual operands
1998     and such.  */
1999  if (!stmt || !stmt_ann (stmt))
2000    return val;
2001
2002  /* Add the SSA version numbers of every vuse operand.  This is important
2003     because compound variables like arrays are not renamed in the
2004     operands.  Rather, the rename is done on the virtual variable
2005     representing all the elements of the array.  */
2006  FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VUSE)
2007    val = iterative_hash_expr (vuse, val);
2008
2009  return val;
2010}
2011
2012static hashval_t
2013real_avail_expr_hash (const void *p)
2014{
2015  return ((const struct expr_hash_elt *)p)->hash;
2016}
2017
2018static int
2019avail_expr_eq (const void *p1, const void *p2)
2020{
2021  tree stmt1 = ((struct expr_hash_elt *)p1)->stmt;
2022  tree rhs1 = ((struct expr_hash_elt *)p1)->rhs;
2023  tree stmt2 = ((struct expr_hash_elt *)p2)->stmt;
2024  tree rhs2 = ((struct expr_hash_elt *)p2)->rhs;
2025
2026  /* If they are the same physical expression, return true.  */
2027  if (rhs1 == rhs2 && stmt1 == stmt2)
2028    return true;
2029
2030  /* If their codes are not equal, then quit now.  */
2031  if (TREE_CODE (rhs1) != TREE_CODE (rhs2))
2032    return false;
2033
2034  /* In case of a collision, both RHS have to be identical and have the
2035     same VUSE operands.  */
2036  if ((TREE_TYPE (rhs1) == TREE_TYPE (rhs2)
2037       || lang_hooks.types_compatible_p (TREE_TYPE (rhs1), TREE_TYPE (rhs2)))
2038      && operand_equal_p (rhs1, rhs2, OEP_PURE_SAME))
2039    {
2040      bool ret = compare_ssa_operands_equal (stmt1, stmt2, SSA_OP_VUSE);
2041      gcc_assert (!ret || ((struct expr_hash_elt *)p1)->hash
2042		  == ((struct expr_hash_elt *)p2)->hash);
2043      return ret;
2044    }
2045
2046  return false;
2047}
2048
2049/* PHI-ONLY copy and constant propagation.  This pass is meant to clean
2050   up degenerate PHIs created by or exposed by jump threading.  */
2051
2052/* Given PHI, return its RHS if the PHI is a degenerate, otherwise return
2053   NULL.  */
2054
2055static tree
2056degenerate_phi_result (tree phi)
2057{
2058  tree lhs = PHI_RESULT (phi);
2059  tree val = NULL;
2060  int i;
2061
2062  /* Ignoring arguments which are the same as LHS, if all the remaining
2063     arguments are the same, then the PHI is a degenerate and has the
2064     value of that common argument.  */
2065  for (i = 0; i < PHI_NUM_ARGS (phi); i++)
2066    {
2067      tree arg = PHI_ARG_DEF (phi, i);
2068
2069      if (arg == lhs)
2070	continue;
2071      else if (!val)
2072	val = arg;
2073      else if (!operand_equal_p (arg, val, 0))
2074	break;
2075    }
2076  return (i == PHI_NUM_ARGS (phi) ? val : NULL);
2077}
2078
2079/* Given a tree node T, which is either a PHI_NODE or MODIFY_EXPR,
2080   remove it from the IL.  */
2081
2082static void
2083remove_stmt_or_phi (tree t)
2084{
2085  if (TREE_CODE (t) == PHI_NODE)
2086    remove_phi_node (t, NULL);
2087  else
2088    {
2089      block_stmt_iterator bsi = bsi_for_stmt (t);
2090      bsi_remove (&bsi, true);
2091    }
2092}
2093
2094/* Given a tree node T, which is either a PHI_NODE or MODIFY_EXPR,
2095   return the "rhs" of the node, in the case of a non-degenerate
2096   PHI, NULL is returned.  */
2097
2098static tree
2099get_rhs_or_phi_arg (tree t)
2100{
2101  if (TREE_CODE (t) == PHI_NODE)
2102    return degenerate_phi_result (t);
2103  else if (TREE_CODE (t) == MODIFY_EXPR)
2104    return TREE_OPERAND (t, 1);
2105  gcc_unreachable ();
2106}
2107
2108
2109/* Given a tree node T, which is either a PHI_NODE or a MODIFY_EXPR,
2110   return the "lhs" of the node.  */
2111
2112static tree
2113get_lhs_or_phi_result (tree t)
2114{
2115  if (TREE_CODE (t) == PHI_NODE)
2116    return PHI_RESULT (t);
2117  else if (TREE_CODE (t) == MODIFY_EXPR)
2118    return TREE_OPERAND (t, 0);
2119  gcc_unreachable ();
2120}
2121
2122/* Propagate RHS into all uses of LHS (when possible).
2123
2124   RHS and LHS are derived from STMT, which is passed in solely so
2125   that we can remove it if propagation is successful.
2126
2127   When propagating into a PHI node or into a statement which turns
2128   into a trivial copy or constant initialization, set the
2129   appropriate bit in INTERESTING_NAMEs so that we will visit those
2130   nodes as well in an effort to pick up secondary optimization
2131   opportunities.  */
2132
2133static void
2134propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
2135{
2136  /* First verify that propagation is valid and isn't going to move a
2137     loop variant variable outside its loop.  */
2138  if (! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)
2139      && (TREE_CODE (rhs) != SSA_NAME
2140	  || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
2141      && may_propagate_copy (lhs, rhs)
2142      && loop_depth_of_name (lhs) >= loop_depth_of_name (rhs))
2143    {
2144      use_operand_p use_p;
2145      imm_use_iterator iter;
2146      tree use_stmt;
2147      bool all = true;
2148
2149      /* Dump details.  */
2150      if (dump_file && (dump_flags & TDF_DETAILS))
2151	{
2152	  fprintf (dump_file, "  Replacing '");
2153	  print_generic_expr (dump_file, lhs, dump_flags);
2154	  fprintf (dump_file, "' with %s '",
2155	           (TREE_CODE (rhs) != SSA_NAME ? "constant" : "variable"));
2156		   print_generic_expr (dump_file, rhs, dump_flags);
2157	  fprintf (dump_file, "'\n");
2158	}
2159
2160      /* Walk over every use of LHS and try to replace the use with RHS.
2161	 At this point the only reason why such a propagation would not
2162	 be successful would be if the use occurs in an ASM_EXPR.  */
2163      FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2164	{
2165
2166	  /* It's not always safe to propagate into an ASM_EXPR.  */
2167	  if (TREE_CODE (use_stmt) == ASM_EXPR
2168	      && ! may_propagate_copy_into_asm (lhs))
2169	    {
2170	      all = false;
2171	      continue;
2172	    }
2173
2174	  /* Dump details.  */
2175	  if (dump_file && (dump_flags & TDF_DETAILS))
2176	    {
2177	      fprintf (dump_file, "    Original statement:");
2178	      print_generic_expr (dump_file, use_stmt, dump_flags);
2179	      fprintf (dump_file, "\n");
2180	    }
2181
2182	  /* Propagate the RHS into this use of the LHS.  */
2183	  FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2184	    propagate_value (use_p, rhs);
2185
2186	  /* Special cases to avoid useless calls into the folding
2187	     routines, operand scanning, etc.
2188
2189	     First, propagation into a PHI may cause the PHI to become
2190	     a degenerate, so mark the PHI as interesting.  No other
2191	     actions are necessary.
2192
2193	     Second, if we're propagating a virtual operand and the
2194	     propagation does not change the underlying _DECL node for
2195	     the virtual operand, then no further actions are necessary.  */
2196	  if (TREE_CODE (use_stmt) == PHI_NODE
2197	      || (! is_gimple_reg (lhs)
2198		  && TREE_CODE (rhs) == SSA_NAME
2199		  && SSA_NAME_VAR (lhs) == SSA_NAME_VAR (rhs)))
2200	    {
2201	      /* Dump details.  */
2202	      if (dump_file && (dump_flags & TDF_DETAILS))
2203		{
2204		  fprintf (dump_file, "    Updated statement:");
2205		  print_generic_expr (dump_file, use_stmt, dump_flags);
2206		  fprintf (dump_file, "\n");
2207		}
2208
2209	      /* Propagation into a PHI may expose new degenerate PHIs,
2210		 so mark the result of the PHI as interesting.  */
2211	      if (TREE_CODE (use_stmt) == PHI_NODE)
2212		{
2213		  tree result = get_lhs_or_phi_result (use_stmt);
2214		  bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2215		}
2216	      continue;
2217	    }
2218
2219	  /* From this point onward we are propagating into a
2220	     real statement.  Folding may (or may not) be possible,
2221	     we may expose new operands, expose dead EH edges,
2222	     etc.  */
2223	  fold_stmt_inplace (use_stmt);
2224
2225	  /* Sometimes propagation can expose new operands to the
2226	     renamer.  Note this will call update_stmt at the
2227	     appropriate time.  */
2228	  mark_new_vars_to_rename (use_stmt);
2229
2230	  /* Dump details.  */
2231	  if (dump_file && (dump_flags & TDF_DETAILS))
2232	    {
2233	      fprintf (dump_file, "    Updated statement:");
2234	      print_generic_expr (dump_file, use_stmt, dump_flags);
2235	      fprintf (dump_file, "\n");
2236	    }
2237
2238	  /* If we replaced a variable index with a constant, then
2239	     we would need to update the invariant flag for ADDR_EXPRs.  */
2240	  if (TREE_CODE (use_stmt) == MODIFY_EXPR
2241	      && TREE_CODE (TREE_OPERAND (use_stmt, 1)) == ADDR_EXPR)
2242	    recompute_tree_invariant_for_addr_expr (TREE_OPERAND (use_stmt, 1));
2243
2244	  /* If we cleaned up EH information from the statement,
2245	     mark its containing block as needing EH cleanups.  */
2246	  if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
2247	    {
2248	      bitmap_set_bit (need_eh_cleanup, bb_for_stmt (use_stmt)->index);
2249	      if (dump_file && (dump_flags & TDF_DETAILS))
2250		fprintf (dump_file, "  Flagged to clear EH edges.\n");
2251	    }
2252
2253	  /* Propagation may expose new trivial copy/constant propagation
2254	     opportunities.  */
2255	  if (TREE_CODE (use_stmt) == MODIFY_EXPR
2256	      && TREE_CODE (TREE_OPERAND (use_stmt, 0)) == SSA_NAME
2257	      && (TREE_CODE (TREE_OPERAND (use_stmt, 1)) == SSA_NAME
2258		  || is_gimple_min_invariant (TREE_OPERAND (use_stmt, 1))))
2259	    {
2260	      tree result = get_lhs_or_phi_result (use_stmt);
2261	      bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2262	    }
2263
2264	  /* Propagation into these nodes may make certain edges in
2265	     the CFG unexecutable.  We want to identify them as PHI nodes
2266	     at the destination of those unexecutable edges may become
2267	     degenerates.  */
2268	  else if (TREE_CODE (use_stmt) == COND_EXPR
2269		   || TREE_CODE (use_stmt) == SWITCH_EXPR
2270		   || TREE_CODE (use_stmt) == GOTO_EXPR)
2271	    {
2272	      tree val;
2273
2274	      if (TREE_CODE (use_stmt) == COND_EXPR)
2275		val = COND_EXPR_COND (use_stmt);
2276	      else if (TREE_CODE (use_stmt) == SWITCH_EXPR)
2277		val = SWITCH_COND (use_stmt);
2278	      else
2279		val = GOTO_DESTINATION  (use_stmt);
2280
2281	      if (is_gimple_min_invariant (val))
2282		{
2283		  basic_block bb = bb_for_stmt (use_stmt);
2284		  edge te = find_taken_edge (bb, val);
2285		  edge_iterator ei;
2286		  edge e;
2287		  block_stmt_iterator bsi;
2288
2289		  /* Remove all outgoing edges except TE.  */
2290		  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
2291		    {
2292		      if (e != te)
2293			{
2294			  tree phi;
2295
2296			  /* Mark all the PHI nodes at the destination of
2297			     the unexecutable edge as interesting.  */
2298			  for (phi = phi_nodes (e->dest);
2299			       phi;
2300			       phi = PHI_CHAIN (phi))
2301			    {
2302			      tree result = PHI_RESULT (phi);
2303			      int version = SSA_NAME_VERSION (result);
2304
2305			      bitmap_set_bit (interesting_names, version);
2306			    }
2307
2308			  te->probability += e->probability;
2309
2310			  te->count += e->count;
2311			  remove_edge (e);
2312			  cfg_altered = 1;
2313			}
2314		      else
2315			ei_next (&ei);
2316		    }
2317
2318		  bsi = bsi_last (bb_for_stmt (use_stmt));
2319		  bsi_remove (&bsi, true);
2320
2321		  /* And fixup the flags on the single remaining edge.  */
2322		  te->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
2323		  te->flags &= ~EDGE_ABNORMAL;
2324		  te->flags |= EDGE_FALLTHRU;
2325		  if (te->probability > REG_BR_PROB_BASE)
2326		    te->probability = REG_BR_PROB_BASE;
2327	        }
2328	    }
2329	}
2330
2331      /* Ensure there is nothing else to do. */
2332      gcc_assert (!all || has_zero_uses (lhs));
2333
2334      /* If we were able to propagate away all uses of LHS, then
2335	 we can remove STMT.  */
2336      if (all)
2337	remove_stmt_or_phi (stmt);
2338    }
2339}
2340
2341/* T is either a PHI node (potentially a degenerate PHI node) or
2342   a statement that is a trivial copy or constant initialization.
2343
2344   Attempt to eliminate T by propagating its RHS into all uses of
2345   its LHS.  This may in turn set new bits in INTERESTING_NAMES
2346   for nodes we want to revisit later.
2347
2348   All exit paths should clear INTERESTING_NAMES for the result
2349   of T.  */
2350
2351static void
2352eliminate_const_or_copy (tree t, bitmap interesting_names)
2353{
2354  tree lhs = get_lhs_or_phi_result (t);
2355  tree rhs;
2356  int version = SSA_NAME_VERSION (lhs);
2357
2358  /* If the LHS of this statement or PHI has no uses, then we can
2359     just eliminate it.  This can occur if, for example, the PHI
2360     was created by block duplication due to threading and its only
2361     use was in the conditional at the end of the block which was
2362     deleted.  */
2363  if (has_zero_uses (lhs))
2364    {
2365      bitmap_clear_bit (interesting_names, version);
2366      remove_stmt_or_phi (t);
2367      return;
2368    }
2369
2370  /* Get the RHS of the assignment or PHI node if the PHI is a
2371     degenerate.  */
2372  rhs = get_rhs_or_phi_arg (t);
2373  if (!rhs)
2374    {
2375      bitmap_clear_bit (interesting_names, version);
2376      return;
2377    }
2378
2379  propagate_rhs_into_lhs (t, lhs, rhs, interesting_names);
2380
2381  /* Note that T may well have been deleted by now, so do
2382     not access it, instead use the saved version # to clear
2383     T's entry in the worklist.  */
2384  bitmap_clear_bit (interesting_names, version);
2385}
2386
2387/* The first phase in degenerate PHI elimination.
2388
2389   Eliminate the degenerate PHIs in BB, then recurse on the
2390   dominator children of BB.  */
2391
2392static void
2393eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
2394{
2395  tree phi, next;
2396  basic_block son;
2397
2398  for (phi = phi_nodes (bb); phi; phi = next)
2399    {
2400      next = PHI_CHAIN (phi);
2401      eliminate_const_or_copy (phi, interesting_names);
2402    }
2403
2404  /* Recurse into the dominator children of BB.  */
2405  for (son = first_dom_son (CDI_DOMINATORS, bb);
2406       son;
2407       son = next_dom_son (CDI_DOMINATORS, son))
2408    eliminate_degenerate_phis_1 (son, interesting_names);
2409}
2410
2411
2412/* A very simple pass to eliminate degenerate PHI nodes from the
2413   IL.  This is meant to be fast enough to be able to be run several
2414   times in the optimization pipeline.
2415
2416   Certain optimizations, particularly those which duplicate blocks
2417   or remove edges from the CFG can create or expose PHIs which are
2418   trivial copies or constant initializations.
2419
2420   While we could pick up these optimizations in DOM or with the
2421   combination of copy-prop and CCP, those solutions are far too
2422   heavy-weight for our needs.
2423
2424   This implementation has two phases so that we can efficiently
2425   eliminate the first order degenerate PHIs and second order
2426   degenerate PHIs.
2427
2428   The first phase performs a dominator walk to identify and eliminate
2429   the vast majority of the degenerate PHIs.  When a degenerate PHI
2430   is identified and eliminated any affected statements or PHIs
2431   are put on a worklist.
2432
2433   The second phase eliminates degenerate PHIs and trivial copies
2434   or constant initializations using the worklist.  This is how we
2435   pick up the secondary optimization opportunities with minimal
2436   cost.  */
2437
2438static unsigned int
2439eliminate_degenerate_phis (void)
2440{
2441  bitmap interesting_names;
2442  bitmap interesting_names1;
2443
2444  /* Bitmap of blocks which need EH information updated.  We can not
2445     update it on-the-fly as doing so invalidates the dominator tree.  */
2446  need_eh_cleanup = BITMAP_ALLOC (NULL);
2447
2448  /* INTERESTING_NAMES is effectively our worklist, indexed by
2449     SSA_NAME_VERSION.
2450
2451     A set bit indicates that the statement or PHI node which
2452     defines the SSA_NAME should be (re)examined to determine if
2453     it has become a degenerate PHI or trivial const/copy propagation
2454     opportunity.
2455
2456     Experiments have show we generally get better compilation
2457     time behavior with bitmaps rather than sbitmaps.  */
2458  interesting_names = BITMAP_ALLOC (NULL);
2459  interesting_names1 = BITMAP_ALLOC (NULL);
2460
2461  /* First phase.  Eliminate degenerate PHIs via a dominator
2462     walk of the CFG.
2463
2464     Experiments have indicated that we generally get better
2465     compile-time behavior by visiting blocks in the first
2466     phase in dominator order.  Presumably this is because walking
2467     in dominator order leaves fewer PHIs for later examination
2468     by the worklist phase.  */
2469  calculate_dominance_info (CDI_DOMINATORS);
2470  eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR, interesting_names);
2471
2472  /* Second phase.  Eliminate second order degenerate PHIs as well
2473     as trivial copies or constant initializations identified by
2474     the first phase or this phase.  Basically we keep iterating
2475     until our set of INTERESTING_NAMEs is empty.   */
2476  while (!bitmap_empty_p (interesting_names))
2477    {
2478      unsigned int i;
2479      bitmap_iterator bi;
2480
2481      /* EXECUTE_IF_SET_IN_BITMAP does not like its bitmap
2482	 changed during the loop.  Copy it to another bitmap and
2483	 use that.  */
2484      bitmap_copy (interesting_names1, interesting_names);
2485
2486      EXECUTE_IF_SET_IN_BITMAP (interesting_names1, 0, i, bi)
2487	{
2488	  tree name = ssa_name (i);
2489
2490	  /* Ignore SSA_NAMEs that have been released because
2491	     their defining statement was deleted (unreachable).  */
2492	  if (name)
2493	    eliminate_const_or_copy (SSA_NAME_DEF_STMT (ssa_name (i)),
2494				     interesting_names);
2495	}
2496    }
2497
2498  /* Propagation of const and copies may make some EH edges dead.  Purge
2499     such edges from the CFG as needed.  */
2500  if (!bitmap_empty_p (need_eh_cleanup))
2501    {
2502      cfg_altered |= tree_purge_all_dead_eh_edges (need_eh_cleanup);
2503      BITMAP_FREE (need_eh_cleanup);
2504    }
2505
2506  BITMAP_FREE (interesting_names);
2507  BITMAP_FREE (interesting_names1);
2508  if (cfg_altered)
2509    free_dominance_info (CDI_DOMINATORS);
2510  return 0;
2511}
2512
2513struct tree_opt_pass pass_phi_only_cprop =
2514{
2515  "phicprop",                           /* name */
2516  gate_dominator,                       /* gate */
2517  eliminate_degenerate_phis,            /* execute */
2518  NULL,                                 /* sub */
2519  NULL,                                 /* next */
2520  0,                                    /* static_pass_number */
2521  TV_TREE_PHI_CPROP,                    /* tv_id */
2522  PROP_cfg | PROP_ssa | PROP_alias,     /* properties_required */
2523  0,                                    /* properties_provided */
2524  PROP_smt_usage,                       /* properties_destroyed */
2525  0,                                    /* todo_flags_start */
2526  TODO_cleanup_cfg | TODO_dump_func
2527    | TODO_ggc_collect | TODO_verify_ssa
2528    | TODO_verify_stmts | TODO_update_smt_usage
2529    | TODO_update_ssa, /* todo_flags_finish */
2530  0                                     /* letter */
2531};
2532