tree-ssa-pre.c revision 225736
1/* SSA-PRE for trees.
2   Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3   Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4   <stevenb@suse.de>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 2, or (at your option)
11any later version.
12
13GCC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to
20the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21Boston, MA 02110-1301, USA.  */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27#include "ggc.h"
28#include "tree.h"
29#include "basic-block.h"
30#include "diagnostic.h"
31#include "tree-inline.h"
32#include "tree-flow.h"
33#include "tree-gimple.h"
34#include "tree-dump.h"
35#include "timevar.h"
36#include "fibheap.h"
37#include "hashtab.h"
38#include "tree-iterator.h"
39#include "real.h"
40#include "alloc-pool.h"
41#include "tree-pass.h"
42#include "flags.h"
43#include "bitmap.h"
44#include "langhooks.h"
45#include "cfgloop.h"
46
47/* TODO:
48
49   1. Avail sets can be shared by making an avail_find_leader that
50      walks up the dominator tree and looks in those avail sets.
51      This might affect code optimality, it's unclear right now.
52   2. Strength reduction can be performed by anticipating expressions
53      we can repair later on.
54   3. We can do back-substitution or smarter value numbering to catch
55      commutative expressions split up over multiple statements.
56   4. ANTIC_SAFE_LOADS could be a lot smarter than it is now.
57      Right now, it is simply calculating loads that occur before
58      any store in a block, instead of loads that occur before
59      stores that affect them.  This is relatively more expensive, and
60      it's not clear how much more it will buy us.
61*/
62
63/* For ease of terminology, "expression node" in the below refers to
64   every expression node but MODIFY_EXPR, because MODIFY_EXPR's represent
65   the actual statement containing the expressions we care about, and
66   we cache the value number by putting it in the expression.  */
67
68/* Basic algorithm
69
70   First we walk the statements to generate the AVAIL sets, the
71   EXP_GEN sets, and the tmp_gen sets.  EXP_GEN sets represent the
72   generation of values/expressions by a given block.  We use them
73   when computing the ANTIC sets.  The AVAIL sets consist of
74   SSA_NAME's that represent values, so we know what values are
75   available in what blocks.  AVAIL is a forward dataflow problem.  In
76   SSA, values are never killed, so we don't need a kill set, or a
77   fixpoint iteration, in order to calculate the AVAIL sets.  In
78   traditional parlance, AVAIL sets tell us the downsafety of the
79   expressions/values.
80
81   Next, we generate the ANTIC sets.  These sets represent the
82   anticipatable expressions.  ANTIC is a backwards dataflow
83   problem.An expression is anticipatable in a given block if it could
84   be generated in that block.  This means that if we had to perform
85   an insertion in that block, of the value of that expression, we
86   could.  Calculating the ANTIC sets requires phi translation of
87   expressions, because the flow goes backwards through phis.  We must
88   iterate to a fixpoint of the ANTIC sets, because we have a kill
89   set.  Even in SSA form, values are not live over the entire
90   function, only from their definition point onwards.  So we have to
91   remove values from the ANTIC set once we go past the definition
92   point of the leaders that make them up.
93   compute_antic/compute_antic_aux performs this computation.
94
95   Third, we perform insertions to make partially redundant
96   expressions fully redundant.
97
98   An expression is partially redundant (excluding partial
99   anticipation) if:
100
101   1. It is AVAIL in some, but not all, of the predecessors of a
102      given block.
103   2. It is ANTIC in all the predecessors.
104
105   In order to make it fully redundant, we insert the expression into
106   the predecessors where it is not available, but is ANTIC.
107   insert/insert_aux performs this insertion.
108
109   Fourth, we eliminate fully redundant expressions.
110   This is a simple statement walk that replaces redundant
111   calculations  with the now available values.  */
112
113/* Representations of value numbers:
114
115   Value numbers are represented using the "value handle" approach.
116   This means that each SSA_NAME (and for other reasons to be
117   disclosed in a moment, expression nodes) has a value handle that
118   can be retrieved through get_value_handle.  This value handle, *is*
119   the value number of the SSA_NAME.  You can pointer compare the
120   value handles for equivalence purposes.
121
122   For debugging reasons, the value handle is internally more than
123   just a number, it is a VAR_DECL named "value.x", where x is a
124   unique number for each value number in use.  This allows
125   expressions with SSA_NAMES replaced by value handles to still be
126   pretty printed in a sane way.  They simply print as "value.3 *
127   value.5", etc.
128
129   Expression nodes have value handles associated with them as a
130   cache.  Otherwise, we'd have to look them up again in the hash
131   table This makes significant difference (factor of two or more) on
132   some test cases.  They can be thrown away after the pass is
133   finished.  */
134
135/* Representation of expressions on value numbers:
136
137   In some portions of this code, you will notice we allocate "fake"
138   analogues to the expression we are value numbering, and replace the
139   operands with the values of the expression.  Since we work on
140   values, and not just names, we canonicalize expressions to value
141   expressions for use in the ANTIC sets, the EXP_GEN set, etc.
142
143   This is theoretically unnecessary, it just saves a bunch of
144   repeated get_value_handle and find_leader calls in the remainder of
145   the code, trading off temporary memory usage for speed.  The tree
146   nodes aren't actually creating more garbage, since they are
147   allocated in a special pools which are thrown away at the end of
148   this pass.
149
150   All of this also means that if you print the EXP_GEN or ANTIC sets,
151   you will see "value.5 + value.7" in the set, instead of "a_55 +
152   b_66" or something.  The only thing that actually cares about
153   seeing the value leaders is phi translation, and it needs to be
154   able to find the leader for a value in an arbitrary block, so this
155   "value expression" form is perfect for it (otherwise you'd do
156   get_value_handle->find_leader->translate->get_value_handle->find_leader).*/
157
158
159/* Representation of sets:
160
161   There are currently two types of sets used, hopefully to be unified soon.
162   The AVAIL sets do not need to be sorted in any particular order,
163   and thus, are simply represented as two bitmaps, one that keeps
164   track of values present in the set, and one that keeps track of
165   expressions present in the set.
166
167   The other sets are represented as doubly linked lists kept in topological
168   order, with an optional supporting bitmap of values present in the
169   set.  The sets represent values, and the elements can be values or
170   expressions.  The elements can appear in different sets, but each
171   element can only appear once in each set.
172
173   Since each node in the set represents a value, we also want to be
174   able to map expression, set pairs to something that tells us
175   whether the value is present is a set.  We use a per-set bitmap for
176   that.  The value handles also point to a linked list of the
177   expressions they represent via a tree annotation.  This is mainly
178   useful only for debugging, since we don't do identity lookups.  */
179
180
181static bool in_fre = false;
182
183/* A value set element.  Basically a single linked list of
184   expressions/values.  */
185typedef struct value_set_node
186{
187  /* An expression.  */
188  tree expr;
189
190  /* A pointer to the next element of the value set.  */
191  struct value_set_node *next;
192} *value_set_node_t;
193
194
195/* A value set.  This is a singly linked list of value_set_node
196   elements with a possible bitmap that tells us what values exist in
197   the set.  This set must be kept in topologically sorted order.  */
198typedef struct value_set
199{
200  /* The head of the list.  Used for iterating over the list in
201     order.  */
202  value_set_node_t head;
203
204  /* The tail of the list.  Used for tail insertions, which are
205     necessary to keep the set in topologically sorted order because
206     of how the set is built.  */
207  value_set_node_t tail;
208
209  /* The length of the list.  */
210  size_t length;
211
212  /* True if the set is indexed, which means it contains a backing
213     bitmap for quick determination of whether certain values exist in the
214     set.  */
215  bool indexed;
216
217  /* The bitmap of values that exist in the set.  May be NULL in an
218     empty or non-indexed set.  */
219  bitmap values;
220
221} *value_set_t;
222
223
224/* An unordered bitmap set.  One bitmap tracks values, the other,
225   expressions.  */
226typedef struct bitmap_set
227{
228  bitmap expressions;
229  bitmap values;
230} *bitmap_set_t;
231
232/* Sets that we need to keep track of.  */
233typedef struct bb_value_sets
234{
235  /* The EXP_GEN set, which represents expressions/values generated in
236     a basic block.  */
237  value_set_t exp_gen;
238
239  /* The PHI_GEN set, which represents PHI results generated in a
240     basic block.  */
241  bitmap_set_t phi_gen;
242
243  /* The TMP_GEN set, which represents results/temporaries generated
244     in a basic block. IE the LHS of an expression.  */
245  bitmap_set_t tmp_gen;
246
247  /* The AVAIL_OUT set, which represents which values are available in
248     a given basic block.  */
249  bitmap_set_t avail_out;
250
251  /* The ANTIC_IN set, which represents which values are anticipatable
252     in a given basic block.  */
253  value_set_t antic_in;
254
255  /* The NEW_SETS set, which is used during insertion to augment the
256     AVAIL_OUT set of blocks with the new insertions performed during
257     the current iteration.  */
258  bitmap_set_t new_sets;
259
260  /* The RVUSE sets, which are used during ANTIC computation to ensure
261     that we don't mark loads ANTIC once they have died.  */
262  bitmap rvuse_in;
263  bitmap rvuse_out;
264  bitmap rvuse_gen;
265  bitmap rvuse_kill;
266
267  /* For actually occurring loads, as long as they occur before all the
268     other stores in the block, we know they are antic at the top of
269     the block, regardless of RVUSE_KILL.  */
270  value_set_t antic_safe_loads;
271} *bb_value_sets_t;
272
273#define EXP_GEN(BB)	((bb_value_sets_t) ((BB)->aux))->exp_gen
274#define PHI_GEN(BB)	((bb_value_sets_t) ((BB)->aux))->phi_gen
275#define TMP_GEN(BB)	((bb_value_sets_t) ((BB)->aux))->tmp_gen
276#define AVAIL_OUT(BB)	((bb_value_sets_t) ((BB)->aux))->avail_out
277#define ANTIC_IN(BB)	((bb_value_sets_t) ((BB)->aux))->antic_in
278#define RVUSE_IN(BB)    ((bb_value_sets_t) ((BB)->aux))->rvuse_in
279#define RVUSE_GEN(BB)   ((bb_value_sets_t) ((BB)->aux))->rvuse_gen
280#define RVUSE_KILL(BB)   ((bb_value_sets_t) ((BB)->aux))->rvuse_kill
281#define RVUSE_OUT(BB)    ((bb_value_sets_t) ((BB)->aux))->rvuse_out
282#define NEW_SETS(BB)	((bb_value_sets_t) ((BB)->aux))->new_sets
283#define ANTIC_SAFE_LOADS(BB) ((bb_value_sets_t) ((BB)->aux))->antic_safe_loads
284
285/* This structure is used to keep track of statistics on what
286   optimization PRE was able to perform.  */
287static struct
288{
289  /* The number of RHS computations eliminated by PRE.  */
290  int eliminations;
291
292  /* The number of new expressions/temporaries generated by PRE.  */
293  int insertions;
294
295  /* The number of new PHI nodes added by PRE.  */
296  int phis;
297
298  /* The number of values found constant.  */
299  int constified;
300
301} pre_stats;
302
303
304static tree bitmap_find_leader (bitmap_set_t, tree);
305static tree find_leader (value_set_t, tree);
306static void value_insert_into_set (value_set_t, tree);
307static void bitmap_value_insert_into_set (bitmap_set_t, tree);
308static void bitmap_value_replace_in_set (bitmap_set_t, tree);
309static void insert_into_set (value_set_t, tree);
310static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
311static bool bitmap_set_contains_value (bitmap_set_t, tree);
312static bitmap_set_t bitmap_set_new (void);
313static value_set_t set_new  (bool);
314static bool is_undefined_value (tree);
315static tree create_expression_by_pieces (basic_block, tree, tree);
316static tree find_or_generate_expression (basic_block, tree, tree);
317
318
319/* We can add and remove elements and entries to and from sets
320   and hash tables, so we use alloc pools for them.  */
321
322static alloc_pool value_set_pool;
323static alloc_pool bitmap_set_pool;
324static alloc_pool value_set_node_pool;
325static alloc_pool binary_node_pool;
326static alloc_pool unary_node_pool;
327static alloc_pool reference_node_pool;
328static alloc_pool comparison_node_pool;
329static alloc_pool expression_node_pool;
330static alloc_pool list_node_pool;
331static alloc_pool modify_expr_node_pool;
332static bitmap_obstack grand_bitmap_obstack;
333
334/* To avoid adding 300 temporary variables when we only need one, we
335   only create one temporary variable, on demand, and build ssa names
336   off that.  We do have to change the variable if the types don't
337   match the current variable's type.  */
338static tree pretemp;
339static tree storetemp;
340static tree mergephitemp;
341static tree prephitemp;
342
343/* Set of blocks with statements that have had its EH information
344   cleaned up.  */
345static bitmap need_eh_cleanup;
346
347/* The phi_translate_table caches phi translations for a given
348   expression and predecessor.  */
349
350static htab_t phi_translate_table;
351
352/* A three tuple {e, pred, v} used to cache phi translations in the
353   phi_translate_table.  */
354
355typedef struct expr_pred_trans_d
356{
357  /* The expression.  */
358  tree e;
359
360  /* The predecessor block along which we translated the expression.  */
361  basic_block pred;
362
363  /* vuses associated with the expression.  */
364  VEC (tree, gc) *vuses;
365
366  /* The value that resulted from the translation.  */
367  tree v;
368
369
370  /* The hashcode for the expression, pred pair. This is cached for
371     speed reasons.  */
372  hashval_t hashcode;
373} *expr_pred_trans_t;
374
375/* Return the hash value for a phi translation table entry.  */
376
377static hashval_t
378expr_pred_trans_hash (const void *p)
379{
380  const expr_pred_trans_t ve = (expr_pred_trans_t) p;
381  return ve->hashcode;
382}
383
384/* Return true if two phi translation table entries are the same.
385   P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
386
387static int
388expr_pred_trans_eq (const void *p1, const void *p2)
389{
390  const expr_pred_trans_t ve1 = (expr_pred_trans_t) p1;
391  const expr_pred_trans_t ve2 = (expr_pred_trans_t) p2;
392  basic_block b1 = ve1->pred;
393  basic_block b2 = ve2->pred;
394  int i;
395  tree vuse1;
396
397  /* If they are not translations for the same basic block, they can't
398     be equal.  */
399  if (b1 != b2)
400    return false;
401
402
403  /* If they are for the same basic block, determine if the
404     expressions are equal.  */
405  if (!expressions_equal_p (ve1->e, ve2->e))
406    return false;
407
408  /* Make sure the vuses are equivalent.  */
409  if (ve1->vuses == ve2->vuses)
410    return true;
411
412  if (VEC_length (tree, ve1->vuses) != VEC_length (tree, ve2->vuses))
413    return false;
414
415  for (i = 0; VEC_iterate (tree, ve1->vuses, i, vuse1); i++)
416    {
417      if (VEC_index (tree, ve2->vuses, i) != vuse1)
418	return false;
419    }
420
421  return true;
422}
423
424/* Search in the phi translation table for the translation of
425   expression E in basic block PRED with vuses VUSES.
426   Return the translated value, if found, NULL otherwise.  */
427
428static inline tree
429phi_trans_lookup (tree e, basic_block pred, VEC (tree, gc) *vuses)
430{
431  void **slot;
432  struct expr_pred_trans_d ept;
433
434  ept.e = e;
435  ept.pred = pred;
436  ept.vuses = vuses;
437  ept.hashcode = vn_compute (e, (unsigned long) pred);
438  slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
439				   NO_INSERT);
440  if (!slot)
441    return NULL;
442  else
443    return ((expr_pred_trans_t) *slot)->v;
444}
445
446
447/* Add the tuple mapping from {expression E, basic block PRED, vuses VUSES} to
448   value V, to the phi translation table.  */
449
450static inline void
451phi_trans_add (tree e, tree v, basic_block pred, VEC (tree, gc) *vuses)
452{
453  void **slot;
454  expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
455  new_pair->e = e;
456  new_pair->pred = pred;
457  new_pair->vuses = vuses;
458  new_pair->v = v;
459  new_pair->hashcode = vn_compute (e, (unsigned long) pred);
460  slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
461				   new_pair->hashcode, INSERT);
462  if (*slot)
463    free (*slot);
464  *slot = (void *) new_pair;
465}
466
467
468/* Add expression E to the expression set of value V.  */
469
470void
471add_to_value (tree v, tree e)
472{
473  /* Constants have no expression sets.  */
474  if (is_gimple_min_invariant (v))
475    return;
476
477  if (VALUE_HANDLE_EXPR_SET (v) == NULL)
478    VALUE_HANDLE_EXPR_SET (v) = set_new (false);
479
480  insert_into_set (VALUE_HANDLE_EXPR_SET (v), e);
481}
482
483
484/* Return true if value V exists in the bitmap for SET.  */
485
486static inline bool
487value_exists_in_set_bitmap (value_set_t set, tree v)
488{
489  if (!set->values)
490    return false;
491
492  return bitmap_bit_p (set->values, VALUE_HANDLE_ID (v));
493}
494
495
496/* Remove value V from the bitmap for SET.  */
497
498static void
499value_remove_from_set_bitmap (value_set_t set, tree v)
500{
501  gcc_assert (set->indexed);
502
503  if (!set->values)
504    return;
505
506  bitmap_clear_bit (set->values, VALUE_HANDLE_ID (v));
507}
508
509
510/* Insert the value number V into the bitmap of values existing in
511   SET.  */
512
513static inline void
514value_insert_into_set_bitmap (value_set_t set, tree v)
515{
516  gcc_assert (set->indexed);
517
518  if (set->values == NULL)
519    set->values = BITMAP_ALLOC (&grand_bitmap_obstack);
520
521  bitmap_set_bit (set->values, VALUE_HANDLE_ID (v));
522}
523
524
525/* Create a new bitmap set and return it.  */
526
527static bitmap_set_t
528bitmap_set_new (void)
529{
530  bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
531  ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack);
532  ret->values = BITMAP_ALLOC (&grand_bitmap_obstack);
533  return ret;
534}
535
536/* Create a new set.  */
537
538static value_set_t
539set_new  (bool indexed)
540{
541  value_set_t ret;
542  ret = (value_set_t) pool_alloc (value_set_pool);
543  ret->head = ret->tail = NULL;
544  ret->length = 0;
545  ret->indexed = indexed;
546  ret->values = NULL;
547  return ret;
548}
549
550/* Insert an expression EXPR into a bitmapped set.  */
551
552static void
553bitmap_insert_into_set (bitmap_set_t set, tree expr)
554{
555  tree val;
556  /* XXX: For now, we only let SSA_NAMES into the bitmap sets.  */
557  gcc_assert (TREE_CODE (expr) == SSA_NAME);
558  val = get_value_handle (expr);
559
560  gcc_assert (val);
561  if (!is_gimple_min_invariant (val))
562  {
563    bitmap_set_bit (set->values, VALUE_HANDLE_ID (val));
564    bitmap_set_bit (set->expressions, SSA_NAME_VERSION (expr));
565  }
566}
567
568/* Insert EXPR into SET.  */
569
570static void
571insert_into_set (value_set_t set, tree expr)
572{
573  value_set_node_t newnode = (value_set_node_t) pool_alloc (value_set_node_pool);
574  tree val = get_value_handle (expr);
575  gcc_assert (val);
576
577  if (is_gimple_min_invariant (val))
578    return;
579
580  /* For indexed sets, insert the value into the set value bitmap.
581     For all sets, add it to the linked list and increment the list
582     length.  */
583  if (set->indexed)
584    value_insert_into_set_bitmap (set, val);
585
586  newnode->next = NULL;
587  newnode->expr = expr;
588  set->length ++;
589  if (set->head == NULL)
590    {
591      set->head = set->tail = newnode;
592    }
593  else
594    {
595      set->tail->next = newnode;
596      set->tail = newnode;
597    }
598}
599
600/* Copy a bitmapped set ORIG, into bitmapped set DEST.  */
601
602static void
603bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
604{
605  bitmap_copy (dest->expressions, orig->expressions);
606  bitmap_copy (dest->values, orig->values);
607}
608
609/* Perform bitmapped set operation DEST &= ORIG.  */
610
611static void
612bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
613{
614  bitmap_iterator bi;
615  unsigned int i;
616  bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
617
618  bitmap_and_into (dest->values, orig->values);
619  bitmap_copy (temp, dest->expressions);
620  EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
621    {
622      tree name = ssa_name (i);
623      tree val = get_value_handle (name);
624      if (!bitmap_bit_p (dest->values, VALUE_HANDLE_ID (val)))
625	bitmap_clear_bit (dest->expressions, i);
626    }
627  BITMAP_FREE (temp);
628}
629
630/* Perform bitmapped value set operation DEST = DEST & ~ORIG.  */
631
632static void
633bitmap_set_and_compl (bitmap_set_t dest, bitmap_set_t orig)
634{
635  bitmap_iterator bi;
636  unsigned int i;
637  bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
638
639  bitmap_and_compl_into (dest->values, orig->values);
640  bitmap_copy (temp, dest->expressions);
641  EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
642    {
643      tree name = ssa_name (i);
644      tree val = get_value_handle (name);
645      if (!bitmap_bit_p (dest->values, VALUE_HANDLE_ID (val)))
646	bitmap_clear_bit (dest->expressions, i);
647    }
648  BITMAP_FREE (temp);
649}
650
651/* Return true if the bitmap set SET is empty.  */
652
653static bool
654bitmap_set_empty_p (bitmap_set_t set)
655{
656  return bitmap_empty_p (set->values);
657}
658
659/* Copy the set ORIG to the set DEST.  */
660
661static void
662set_copy (value_set_t dest, value_set_t orig)
663{
664  value_set_node_t node;
665
666  if (!orig || !orig->head)
667    return;
668
669  for (node = orig->head;
670       node;
671       node = node->next)
672    {
673      insert_into_set (dest, node->expr);
674    }
675}
676
677/* Remove EXPR from SET.  */
678
679static void
680set_remove (value_set_t set, tree expr)
681{
682  value_set_node_t node, prev;
683
684  /* Remove the value of EXPR from the bitmap, decrement the set
685     length, and remove it from the actual double linked list.  */
686  value_remove_from_set_bitmap (set, get_value_handle (expr));
687  set->length--;
688  prev = NULL;
689  for (node = set->head;
690       node != NULL;
691       prev = node, node = node->next)
692    {
693      if (node->expr == expr)
694	{
695	  if (prev == NULL)
696	    set->head = node->next;
697	  else
698	    prev->next= node->next;
699
700	  if (node == set->tail)
701	    set->tail = prev;
702	  pool_free (value_set_node_pool, node);
703	  return;
704	}
705    }
706}
707
708/* Return true if SET contains the value VAL.  */
709
710static bool
711set_contains_value (value_set_t set, tree val)
712{
713  /* All constants are in every set.  */
714  if (is_gimple_min_invariant (val))
715    return true;
716
717  if (!set || set->length == 0)
718    return false;
719
720  return value_exists_in_set_bitmap (set, val);
721}
722
723/* Return true if bitmapped set SET contains the expression EXPR.  */
724static bool
725bitmap_set_contains (bitmap_set_t set, tree expr)
726{
727  /* All constants are in every set.  */
728  if (is_gimple_min_invariant (get_value_handle (expr)))
729    return true;
730
731  /* XXX: Bitmapped sets only contain SSA_NAME's for now.  */
732  if (TREE_CODE (expr) != SSA_NAME)
733    return false;
734  return bitmap_bit_p (set->expressions, SSA_NAME_VERSION (expr));
735}
736
737
738/* Return true if bitmapped set SET contains the value VAL.  */
739
740static bool
741bitmap_set_contains_value (bitmap_set_t set, tree val)
742{
743  if (is_gimple_min_invariant (val))
744    return true;
745  return bitmap_bit_p (set->values, VALUE_HANDLE_ID (val));
746}
747
748/* Replace an instance of value LOOKFOR with expression EXPR in SET.  */
749
750static void
751bitmap_set_replace_value (bitmap_set_t set, tree lookfor, tree expr)
752{
753  value_set_t exprset;
754  value_set_node_t node;
755  if (is_gimple_min_invariant (lookfor))
756    return;
757  if (!bitmap_set_contains_value (set, lookfor))
758    return;
759
760  /* The number of expressions having a given value is usually
761     significantly less than the total number of expressions in SET.
762     Thus, rather than check, for each expression in SET, whether it
763     has the value LOOKFOR, we walk the reverse mapping that tells us
764     what expressions have a given value, and see if any of those
765     expressions are in our set.  For large testcases, this is about
766     5-10x faster than walking the bitmap.  If this is somehow a
767     significant lose for some cases, we can choose which set to walk
768     based on the set size.  */
769  exprset = VALUE_HANDLE_EXPR_SET (lookfor);
770  for (node = exprset->head; node; node = node->next)
771    {
772      if (TREE_CODE (node->expr) == SSA_NAME)
773	{
774	  if (bitmap_bit_p (set->expressions, SSA_NAME_VERSION (node->expr)))
775	    {
776	      bitmap_clear_bit (set->expressions, SSA_NAME_VERSION (node->expr));
777	      bitmap_set_bit (set->expressions, SSA_NAME_VERSION (expr));
778	      return;
779	    }
780	}
781    }
782}
783
784/* Subtract bitmapped set B from value set A, and return the new set.  */
785
786static value_set_t
787bitmap_set_subtract_from_value_set (value_set_t a, bitmap_set_t b,
788				    bool indexed)
789{
790  value_set_t ret = set_new (indexed);
791  value_set_node_t node;
792  for (node = a->head;
793       node;
794       node = node->next)
795    {
796      if (!bitmap_set_contains (b, node->expr))
797	insert_into_set (ret, node->expr);
798    }
799  return ret;
800}
801
802/* Return true if two sets are equal.  */
803
804static bool
805set_equal (value_set_t a, value_set_t b)
806{
807  value_set_node_t node;
808
809  if (a->length != b->length)
810    return false;
811  for (node = a->head;
812       node;
813       node = node->next)
814    {
815      if (!set_contains_value (b, get_value_handle (node->expr)))
816	return false;
817    }
818  return true;
819}
820
821/* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
822   and add it otherwise.  */
823
824static void
825bitmap_value_replace_in_set (bitmap_set_t set, tree expr)
826{
827  tree val = get_value_handle (expr);
828  if (bitmap_set_contains_value (set, val))
829    bitmap_set_replace_value (set, val, expr);
830  else
831    bitmap_insert_into_set (set, expr);
832}
833
834/* Insert EXPR into SET if EXPR's value is not already present in
835   SET.  */
836
837static void
838bitmap_value_insert_into_set (bitmap_set_t set, tree expr)
839{
840  tree val = get_value_handle (expr);
841
842  if (is_gimple_min_invariant (val))
843    return;
844
845  if (!bitmap_set_contains_value (set, val))
846    bitmap_insert_into_set (set, expr);
847}
848
849/* Insert the value for EXPR into SET, if it doesn't exist already.  */
850
851static void
852value_insert_into_set (value_set_t set, tree expr)
853{
854  tree val = get_value_handle (expr);
855
856  /* Constant and invariant values exist everywhere, and thus,
857     actually keeping them in the sets is pointless.  */
858  if (is_gimple_min_invariant (val))
859    return;
860
861  if (!set_contains_value (set, val))
862    insert_into_set (set, expr);
863}
864
865
866/* Print out SET to OUTFILE.  */
867
868static void
869bitmap_print_value_set (FILE *outfile, bitmap_set_t set,
870			const char *setname, int blockindex)
871{
872  fprintf (outfile, "%s[%d] := { ", setname, blockindex);
873  if (set)
874    {
875      bool first = true;
876      unsigned i;
877      bitmap_iterator bi;
878
879      EXECUTE_IF_SET_IN_BITMAP (set->expressions, 0, i, bi)
880	{
881	  if (!first)
882	    fprintf (outfile, ", ");
883	  first = false;
884	  print_generic_expr (outfile, ssa_name (i), 0);
885
886	  fprintf (outfile, " (");
887	  print_generic_expr (outfile, get_value_handle (ssa_name (i)), 0);
888	  fprintf (outfile, ") ");
889	}
890    }
891  fprintf (outfile, " }\n");
892}
893/* Print out the value_set SET to OUTFILE.  */
894
895static void
896print_value_set (FILE *outfile, value_set_t set,
897		 const char *setname, int blockindex)
898{
899  value_set_node_t node;
900  fprintf (outfile, "%s[%d] := { ", setname, blockindex);
901  if (set)
902    {
903      for (node = set->head;
904	   node;
905	   node = node->next)
906	{
907	  print_generic_expr (outfile, node->expr, 0);
908
909	  fprintf (outfile, " (");
910	  print_generic_expr (outfile, get_value_handle (node->expr), 0);
911	  fprintf (outfile, ") ");
912
913	  if (node->next)
914	    fprintf (outfile, ", ");
915	}
916    }
917
918  fprintf (outfile, " }\n");
919}
920
921/* Print out the expressions that have VAL to OUTFILE.  */
922
923void
924print_value_expressions (FILE *outfile, tree val)
925{
926  if (VALUE_HANDLE_EXPR_SET (val))
927    {
928      char s[10];
929      sprintf (s, "VH.%04d", VALUE_HANDLE_ID (val));
930      print_value_set (outfile, VALUE_HANDLE_EXPR_SET (val), s, 0);
931    }
932}
933
934
935void
936debug_value_expressions (tree val)
937{
938  print_value_expressions (stderr, val);
939}
940
941
942void debug_value_set (value_set_t, const char *, int);
943
944void
945debug_value_set (value_set_t set, const char *setname, int blockindex)
946{
947  print_value_set (stderr, set, setname, blockindex);
948}
949
950/* Return the folded version of T if T, when folded, is a gimple
951   min_invariant.  Otherwise, return T.  */
952
953static tree
954fully_constant_expression (tree t)
955{
956  tree folded;
957  folded = fold (t);
958  if (folded && is_gimple_min_invariant (folded))
959    return folded;
960  return t;
961}
962
963/* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
964   For example, this can copy a list made of TREE_LIST nodes.
965   Allocates the nodes in list_node_pool*/
966
967static tree
968pool_copy_list (tree list)
969{
970  tree head;
971  tree prev, next;
972
973  if (list == 0)
974    return 0;
975  head = (tree) pool_alloc (list_node_pool);
976
977  memcpy (head, list, tree_size (list));
978  prev = head;
979
980  next = TREE_CHAIN (list);
981  while (next)
982    {
983      TREE_CHAIN (prev) = (tree) pool_alloc (list_node_pool);
984      memcpy (TREE_CHAIN (prev), next, tree_size (next));
985      prev = TREE_CHAIN (prev);
986      next = TREE_CHAIN (next);
987    }
988  return head;
989}
990
991/* Translate the vuses in the VUSES vector backwards through phi
992   nodes, so that they have the value they would have in BLOCK. */
993
994static VEC(tree, gc) *
995translate_vuses_through_block (VEC (tree, gc) *vuses, basic_block block)
996{
997  tree oldvuse;
998  VEC(tree, gc) *result = NULL;
999  int i;
1000
1001  for (i = 0; VEC_iterate (tree, vuses, i, oldvuse); i++)
1002    {
1003      tree phi = SSA_NAME_DEF_STMT (oldvuse);
1004      if (TREE_CODE (phi) == PHI_NODE)
1005	{
1006	  edge e = find_edge (block, bb_for_stmt (phi));
1007	  if (e)
1008	    {
1009	      tree def = PHI_ARG_DEF (phi, e->dest_idx);
1010	      if (def != oldvuse)
1011		{
1012		  if (!result)
1013		    result = VEC_copy (tree, gc, vuses);
1014		  VEC_replace (tree, result, i, def);
1015		}
1016	    }
1017	}
1018    }
1019  if (result)
1020    {
1021      sort_vuses (result);
1022      return result;
1023    }
1024  return vuses;
1025
1026}
1027/* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1028   the phis in PRED.  Return NULL if we can't find a leader for each
1029   part of the translated expression.  */
1030
1031static tree
1032phi_translate (tree expr, value_set_t set, basic_block pred,
1033	       basic_block phiblock)
1034{
1035  tree phitrans = NULL;
1036  tree oldexpr = expr;
1037  if (expr == NULL)
1038    return NULL;
1039
1040  if (is_gimple_min_invariant (expr))
1041    return expr;
1042
1043  /* Phi translations of a given expression don't change.  */
1044  if (EXPR_P (expr))
1045    {
1046      tree vh;
1047
1048      vh = get_value_handle (expr);
1049      if (vh && TREE_CODE (vh) == VALUE_HANDLE)
1050	phitrans = phi_trans_lookup (expr, pred, VALUE_HANDLE_VUSES (vh));
1051      else
1052	phitrans = phi_trans_lookup (expr, pred, NULL);
1053    }
1054  else
1055    phitrans = phi_trans_lookup (expr, pred, NULL);
1056
1057  if (phitrans)
1058    return phitrans;
1059
1060  switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1061    {
1062    case tcc_expression:
1063      {
1064	if (TREE_CODE (expr) != CALL_EXPR)
1065	  return NULL;
1066	else
1067	  {
1068	    tree oldop0 = TREE_OPERAND (expr, 0);
1069	    tree oldarglist = TREE_OPERAND (expr, 1);
1070	    tree oldop2 = TREE_OPERAND (expr, 2);
1071	    tree newop0;
1072	    tree newarglist;
1073	    tree newop2 = NULL;
1074	    tree oldwalker;
1075	    tree newwalker;
1076	    tree newexpr;
1077	    tree vh = get_value_handle (expr);
1078	    bool listchanged = false;
1079	    VEC (tree, gc) *vuses = VALUE_HANDLE_VUSES (vh);
1080	    VEC (tree, gc) *tvuses;
1081
1082	    /* Call expressions are kind of weird because they have an
1083	       argument list.  We don't want to value number the list
1084	       as one value number, because that doesn't make much
1085	       sense, and just breaks the support functions we call,
1086	       which expect TREE_OPERAND (call_expr, 2) to be a
1087	       TREE_LIST. */
1088
1089	    newop0 = phi_translate (find_leader (set, oldop0),
1090				    set, pred, phiblock);
1091	    if (newop0 == NULL)
1092	      return NULL;
1093	    if (oldop2)
1094	      {
1095		newop2 = phi_translate (find_leader (set, oldop2),
1096					set, pred, phiblock);
1097		if (newop2 == NULL)
1098		  return NULL;
1099	      }
1100
1101	    /* phi translate the argument list piece by piece.
1102
1103	      We could actually build the list piece by piece here,
1104	      but it's likely to not be worth the memory we will save,
1105	      unless you have millions of call arguments.  */
1106
1107	    newarglist = pool_copy_list (oldarglist);
1108	    for (oldwalker = oldarglist, newwalker = newarglist;
1109		 oldwalker && newwalker;
1110		 oldwalker = TREE_CHAIN (oldwalker),
1111		   newwalker = TREE_CHAIN (newwalker))
1112	      {
1113
1114		tree oldval = TREE_VALUE (oldwalker);
1115		tree newval;
1116		if (oldval)
1117		  {
1118		    /* This may seem like a weird place for this
1119		       check, but it's actually the easiest place to
1120		       do it.  We can't do it lower on in the
1121		       recursion because it's valid for pieces of a
1122		       component ref to be of AGGREGATE_TYPE, as long
1123		       as the outermost one is not.
1124		       To avoid *that* case, we have a check for
1125		       AGGREGATE_TYPE_P in insert_aux.  However, that
1126		       check will *not* catch this case because here
1127		       it occurs in the argument list.  */
1128		    if (AGGREGATE_TYPE_P (TREE_TYPE (oldval)))
1129		      return NULL;
1130		    newval = phi_translate (find_leader (set, oldval),
1131					    set, pred, phiblock);
1132		    if (newval == NULL)
1133		      return NULL;
1134		    if (newval != oldval)
1135		      {
1136			listchanged = true;
1137			TREE_VALUE (newwalker) = get_value_handle (newval);
1138		      }
1139		  }
1140	      }
1141	    if (listchanged)
1142	      vn_lookup_or_add (newarglist, NULL);
1143
1144	    tvuses = translate_vuses_through_block (vuses, pred);
1145
1146	    if (listchanged || (newop0 != oldop0) || (oldop2 != newop2)
1147		|| vuses != tvuses)
1148	      {
1149		newexpr = (tree) pool_alloc (expression_node_pool);
1150		memcpy (newexpr, expr, tree_size (expr));
1151		TREE_OPERAND (newexpr, 0) = newop0 == oldop0 ? oldop0 : get_value_handle (newop0);
1152		TREE_OPERAND (newexpr, 1) = listchanged ? newarglist : oldarglist;
1153		TREE_OPERAND (newexpr, 2) = newop2 == oldop2 ? oldop2 : get_value_handle (newop2);
1154		newexpr->common.ann = NULL;
1155		vn_lookup_or_add_with_vuses (newexpr, tvuses);
1156		expr = newexpr;
1157		phi_trans_add (oldexpr, newexpr, pred, tvuses);
1158	      }
1159	  }
1160      }
1161      return expr;
1162
1163    case tcc_declaration:
1164      {
1165	VEC (tree, gc) * oldvuses = NULL;
1166	VEC (tree, gc) * newvuses = NULL;
1167
1168	oldvuses = VALUE_HANDLE_VUSES (get_value_handle (expr));
1169	if (oldvuses)
1170	  newvuses = translate_vuses_through_block (oldvuses, pred);
1171
1172	if (oldvuses != newvuses)
1173	  vn_lookup_or_add_with_vuses (expr, newvuses);
1174
1175	phi_trans_add (oldexpr, expr, pred, newvuses);
1176      }
1177      return expr;
1178
1179    case tcc_reference:
1180      {
1181	tree oldop0 = TREE_OPERAND (expr, 0);
1182	tree oldop1 = NULL;
1183	tree newop0;
1184	tree newop1 = NULL;
1185	tree oldop2 = NULL;
1186	tree newop2 = NULL;
1187	tree oldop3 = NULL;
1188	tree newop3 = NULL;
1189	tree newexpr;
1190	VEC (tree, gc) * oldvuses = NULL;
1191	VEC (tree, gc) * newvuses = NULL;
1192
1193	if (TREE_CODE (expr) != INDIRECT_REF
1194	    && TREE_CODE (expr) != COMPONENT_REF
1195	    && TREE_CODE (expr) != ARRAY_REF)
1196	  return NULL;
1197
1198	newop0 = phi_translate (find_leader (set, oldop0),
1199				set, pred, phiblock);
1200	if (newop0 == NULL)
1201	  return NULL;
1202
1203	if (TREE_CODE (expr) == ARRAY_REF)
1204	  {
1205	    oldop1 = TREE_OPERAND (expr, 1);
1206	    newop1 = phi_translate (find_leader (set, oldop1),
1207				    set, pred, phiblock);
1208
1209	    if (newop1 == NULL)
1210	      return NULL;
1211	    oldop2 = TREE_OPERAND (expr, 2);
1212	    if (oldop2)
1213	      {
1214		newop2 = phi_translate (find_leader (set, oldop2),
1215					set, pred, phiblock);
1216
1217		if (newop2 == NULL)
1218		  return NULL;
1219	      }
1220	    oldop3 = TREE_OPERAND (expr, 3);
1221	    if (oldop3)
1222	      {
1223		newop3 = phi_translate (find_leader (set, oldop3),
1224					set, pred, phiblock);
1225
1226		if (newop3 == NULL)
1227		  return NULL;
1228	      }
1229	  }
1230
1231	oldvuses = VALUE_HANDLE_VUSES (get_value_handle (expr));
1232	if (oldvuses)
1233	  newvuses = translate_vuses_through_block (oldvuses, pred);
1234
1235	if (newop0 != oldop0 || newvuses != oldvuses
1236	    || newop1 != oldop1
1237	    || newop2 != oldop2
1238	    || newop3 != oldop3)
1239	  {
1240	    tree t;
1241
1242	    newexpr = pool_alloc (reference_node_pool);
1243	    memcpy (newexpr, expr, tree_size (expr));
1244	    TREE_OPERAND (newexpr, 0) = get_value_handle (newop0);
1245	    if (TREE_CODE (expr) == ARRAY_REF)
1246	      {
1247		TREE_OPERAND (newexpr, 1) = get_value_handle (newop1);
1248		if (newop2)
1249		  TREE_OPERAND (newexpr, 2) = get_value_handle (newop2);
1250		if (newop3)
1251		  TREE_OPERAND (newexpr, 3) = get_value_handle (newop3);
1252	      }
1253
1254	    t = fully_constant_expression (newexpr);
1255
1256	    if (t != newexpr)
1257	      {
1258		pool_free (reference_node_pool, newexpr);
1259		newexpr = t;
1260	      }
1261	    else
1262	      {
1263		newexpr->common.ann = NULL;
1264		vn_lookup_or_add_with_vuses (newexpr, newvuses);
1265	      }
1266	    expr = newexpr;
1267	    phi_trans_add (oldexpr, newexpr, pred, newvuses);
1268	  }
1269      }
1270      return expr;
1271      break;
1272
1273    case tcc_binary:
1274    case tcc_comparison:
1275      {
1276	tree oldop1 = TREE_OPERAND (expr, 0);
1277	tree oldop2 = TREE_OPERAND (expr, 1);
1278	tree newop1;
1279	tree newop2;
1280	tree newexpr;
1281
1282	newop1 = phi_translate (find_leader (set, oldop1),
1283				set, pred, phiblock);
1284	if (newop1 == NULL)
1285	  return NULL;
1286	newop2 = phi_translate (find_leader (set, oldop2),
1287				set, pred, phiblock);
1288	if (newop2 == NULL)
1289	  return NULL;
1290	if (newop1 != oldop1 || newop2 != oldop2)
1291	  {
1292	    tree t;
1293	    newexpr = (tree) pool_alloc (binary_node_pool);
1294	    memcpy (newexpr, expr, tree_size (expr));
1295	    TREE_OPERAND (newexpr, 0) = newop1 == oldop1 ? oldop1 : get_value_handle (newop1);
1296	    TREE_OPERAND (newexpr, 1) = newop2 == oldop2 ? oldop2 : get_value_handle (newop2);
1297	    t = fully_constant_expression (newexpr);
1298	    if (t != newexpr)
1299	      {
1300		pool_free (binary_node_pool, newexpr);
1301		newexpr = t;
1302	      }
1303	    else
1304	      {
1305		newexpr->common.ann = NULL;
1306		vn_lookup_or_add (newexpr, NULL);
1307	      }
1308	    expr = newexpr;
1309	    phi_trans_add (oldexpr, newexpr, pred, NULL);
1310	  }
1311      }
1312      return expr;
1313
1314    case tcc_unary:
1315      {
1316	tree oldop1 = TREE_OPERAND (expr, 0);
1317	tree newop1;
1318	tree newexpr;
1319
1320	newop1 = phi_translate (find_leader (set, oldop1),
1321				set, pred, phiblock);
1322	if (newop1 == NULL)
1323	  return NULL;
1324	if (newop1 != oldop1)
1325	  {
1326	    tree t;
1327	    newexpr = (tree) pool_alloc (unary_node_pool);
1328	    memcpy (newexpr, expr, tree_size (expr));
1329	    TREE_OPERAND (newexpr, 0) = get_value_handle (newop1);
1330	    t = fully_constant_expression (newexpr);
1331	    if (t != newexpr)
1332	      {
1333		pool_free (unary_node_pool, newexpr);
1334		newexpr = t;
1335	      }
1336	    else
1337	      {
1338		newexpr->common.ann = NULL;
1339		vn_lookup_or_add (newexpr, NULL);
1340	      }
1341	    expr = newexpr;
1342	    phi_trans_add (oldexpr, newexpr, pred, NULL);
1343	  }
1344      }
1345      return expr;
1346
1347    case tcc_exceptional:
1348      {
1349	tree phi = NULL;
1350	edge e;
1351	gcc_assert (TREE_CODE (expr) == SSA_NAME);
1352	if (TREE_CODE (SSA_NAME_DEF_STMT (expr)) == PHI_NODE)
1353	  phi = SSA_NAME_DEF_STMT (expr);
1354	else
1355	  return expr;
1356
1357	e = find_edge (pred, bb_for_stmt (phi));
1358	if (e)
1359	  {
1360	    if (is_undefined_value (PHI_ARG_DEF (phi, e->dest_idx)))
1361	      return NULL;
1362	    vn_lookup_or_add (PHI_ARG_DEF (phi, e->dest_idx), NULL);
1363	    return PHI_ARG_DEF (phi, e->dest_idx);
1364	  }
1365      }
1366      return expr;
1367
1368    default:
1369      gcc_unreachable ();
1370    }
1371}
1372
1373/* For each expression in SET, translate the value handles through phi nodes
1374   in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1375   expressions in DEST.  */
1376
1377static void
1378phi_translate_set (value_set_t dest, value_set_t set, basic_block pred,
1379		   basic_block phiblock)
1380{
1381  value_set_node_t node;
1382  for (node = set->head;
1383       node;
1384       node = node->next)
1385    {
1386      tree translated;
1387
1388      translated = phi_translate (node->expr, set, pred, phiblock);
1389
1390      /* Don't add constants or empty translations to the cache, since
1391	 we won't look them up that way, or use the result, anyway.  */
1392      if (translated && !is_gimple_min_invariant (translated))
1393	{
1394	  tree vh = get_value_handle (translated);
1395	  VEC (tree, gc) *vuses;
1396
1397	  /* The value handle itself may also be an invariant, in
1398	     which case, it has no vuses.  */
1399	  vuses = !is_gimple_min_invariant (vh)
1400	    ? VALUE_HANDLE_VUSES (vh) : NULL;
1401	  phi_trans_add (node->expr, translated, pred, vuses);
1402	}
1403
1404      if (translated != NULL)
1405	value_insert_into_set (dest, translated);
1406    }
1407}
1408
1409/* Find the leader for a value (i.e., the name representing that
1410   value) in a given set, and return it.  Return NULL if no leader is
1411   found.  */
1412
1413static tree
1414bitmap_find_leader (bitmap_set_t set, tree val)
1415{
1416  if (val == NULL)
1417    return NULL;
1418
1419  if (is_gimple_min_invariant (val))
1420    return val;
1421  if (bitmap_set_contains_value (set, val))
1422    {
1423      /* Rather than walk the entire bitmap of expressions, and see
1424	 whether any of them has the value we are looking for, we look
1425	 at the reverse mapping, which tells us the set of expressions
1426	 that have a given value (IE value->expressions with that
1427	 value) and see if any of those expressions are in our set.
1428	 The number of expressions per value is usually significantly
1429	 less than the number of expressions in the set.  In fact, for
1430	 large testcases, doing it this way is roughly 5-10x faster
1431	 than walking the bitmap.
1432	 If this is somehow a significant lose for some cases, we can
1433	 choose which set to walk based on which set is smaller.  */
1434      value_set_t exprset;
1435      value_set_node_t node;
1436      exprset = VALUE_HANDLE_EXPR_SET (val);
1437      for (node = exprset->head; node; node = node->next)
1438	{
1439	  if (TREE_CODE (node->expr) == SSA_NAME)
1440	    {
1441	      if (bitmap_bit_p (set->expressions,
1442				SSA_NAME_VERSION (node->expr)))
1443		return node->expr;
1444	    }
1445	}
1446    }
1447  return NULL;
1448}
1449
1450
1451/* Find the leader for a value (i.e., the name representing that
1452   value) in a given set, and return it.  Return NULL if no leader is
1453   found.  */
1454
1455static tree
1456find_leader (value_set_t set, tree val)
1457{
1458  value_set_node_t node;
1459
1460  if (val == NULL)
1461    return NULL;
1462
1463  /* Constants represent themselves.  */
1464  if (is_gimple_min_invariant (val))
1465    return val;
1466
1467  if (set->length == 0)
1468    return NULL;
1469
1470  if (value_exists_in_set_bitmap (set, val))
1471    {
1472      for (node = set->head;
1473	   node;
1474	   node = node->next)
1475	{
1476	  if (get_value_handle (node->expr) == val)
1477	    return node->expr;
1478	}
1479    }
1480
1481  return NULL;
1482}
1483
1484/* Given the vuse representative map, MAP, and an SSA version number,
1485   ID, return the bitmap of names ID represents, or NULL, if none
1486   exists.  */
1487
1488static bitmap
1489get_representative (bitmap *map, int id)
1490{
1491  if (map[id] != NULL)
1492    return map[id];
1493  return NULL;
1494}
1495
1496/* A vuse is anticipable at the top of block x, from the bottom of the
1497   block, if it reaches the top of the block, and is not killed in the
1498   block.  In effect, we are trying to see if the vuse is transparent
1499   backwards in the block.  */
1500
1501static bool
1502vuses_dies_in_block_x (VEC (tree, gc) *vuses, basic_block block)
1503{
1504  int i;
1505  tree vuse;
1506
1507  for (i = 0; VEC_iterate (tree, vuses, i, vuse); i++)
1508    {
1509      /* Any places where this is too conservative, are places
1510	 where we created a new version and shouldn't have.  */
1511
1512      if (!bitmap_bit_p (RVUSE_IN (block), SSA_NAME_VERSION (vuse))
1513	  || bitmap_bit_p (RVUSE_KILL (block), SSA_NAME_VERSION (vuse)))
1514	return true;
1515    }
1516  return false;
1517}
1518
1519/* Determine if the expression EXPR is valid in SET.  This means that
1520   we have a leader for each part of the expression (if it consists of
1521   values), or the expression is an SSA_NAME.
1522
1523   NB: We never should run into a case where we have SSA_NAME +
1524   SSA_NAME or SSA_NAME + value.  The sets valid_in_set is called on,
1525   the ANTIC sets, will only ever have SSA_NAME's or value expressions
1526   (IE VALUE1 + VALUE2, *VALUE1, VALUE1 < VALUE2)  */
1527
1528static bool
1529valid_in_set (value_set_t set, tree expr, basic_block block)
1530{
1531 tree vh = get_value_handle (expr);
1532 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1533    {
1534    case tcc_binary:
1535    case tcc_comparison:
1536      {
1537	tree op1 = TREE_OPERAND (expr, 0);
1538	tree op2 = TREE_OPERAND (expr, 1);
1539	return set_contains_value (set, op1) && set_contains_value (set, op2);
1540      }
1541
1542    case tcc_unary:
1543      {
1544	tree op1 = TREE_OPERAND (expr, 0);
1545	return set_contains_value (set, op1);
1546      }
1547
1548    case tcc_expression:
1549      {
1550	if (TREE_CODE (expr) == CALL_EXPR)
1551	  {
1552	    tree op0 = TREE_OPERAND (expr, 0);
1553	    tree arglist = TREE_OPERAND (expr, 1);
1554	    tree op2 = TREE_OPERAND (expr, 2);
1555
1556	    /* Check the non-list operands first.  */
1557	    if (!set_contains_value (set, op0)
1558		|| (op2 && !set_contains_value (set, op2)))
1559	      return false;
1560
1561	    /* Now check the operands.  */
1562	    for (; arglist; arglist = TREE_CHAIN (arglist))
1563	      {
1564		if (!set_contains_value (set, TREE_VALUE (arglist)))
1565		  return false;
1566	      }
1567	    return !vuses_dies_in_block_x (VALUE_HANDLE_VUSES (vh), block);
1568	  }
1569	return false;
1570      }
1571
1572    case tcc_reference:
1573      {
1574	if (TREE_CODE (expr) == INDIRECT_REF
1575	    || TREE_CODE (expr) == COMPONENT_REF
1576            || TREE_CODE (expr) == ARRAY_REF)
1577	  {
1578	    tree op0 = TREE_OPERAND (expr, 0);
1579	    gcc_assert (is_gimple_min_invariant (op0)
1580			|| TREE_CODE (op0) == VALUE_HANDLE);
1581	    if (!set_contains_value (set, op0))
1582	      return false;
1583	    if (TREE_CODE (expr) == ARRAY_REF)
1584	      {
1585		tree op1 = TREE_OPERAND (expr, 1);
1586		tree op2 = TREE_OPERAND (expr, 2);
1587		tree op3 = TREE_OPERAND (expr, 3);
1588		gcc_assert (is_gimple_min_invariant (op1)
1589		            || TREE_CODE (op1) == VALUE_HANDLE);
1590		if (!set_contains_value (set, op1))
1591		  return false;
1592		gcc_assert (!op2 || is_gimple_min_invariant (op2)
1593		            || TREE_CODE (op2) == VALUE_HANDLE);
1594		if (op2
1595		    && !set_contains_value (set, op2))
1596		  return false;
1597		gcc_assert (!op3 || is_gimple_min_invariant (op3)
1598		            || TREE_CODE (op3) == VALUE_HANDLE);
1599		if (op3
1600		    && !set_contains_value (set, op3))
1601		  return false;
1602	    }
1603	  return set_contains_value (ANTIC_SAFE_LOADS (block),
1604				     vh)
1605	    || !vuses_dies_in_block_x (VALUE_HANDLE_VUSES (vh),
1606				       block);
1607	  }
1608      }
1609      return false;
1610
1611    case tcc_exceptional:
1612      gcc_assert (TREE_CODE (expr) == SSA_NAME);
1613      return true;
1614
1615    case tcc_declaration:
1616      return !vuses_dies_in_block_x (VALUE_HANDLE_VUSES (vh), block);
1617
1618    default:
1619      /* No other cases should be encountered.  */
1620      gcc_unreachable ();
1621   }
1622}
1623
1624/* Clean the set of expressions that are no longer valid in SET.  This
1625   means expressions that are made up of values we have no leaders for
1626   in SET.  */
1627
1628static void
1629clean (value_set_t set, basic_block block)
1630{
1631  value_set_node_t node;
1632  value_set_node_t next;
1633  node = set->head;
1634  while (node)
1635    {
1636      next = node->next;
1637      if (!valid_in_set (set, node->expr, block))
1638	set_remove (set, node->expr);
1639      node = next;
1640    }
1641}
1642
1643static sbitmap has_abnormal_preds;
1644
1645/* Compute the ANTIC set for BLOCK.
1646
1647   If succs(BLOCK) > 1 then
1648     ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
1649   else if succs(BLOCK) == 1 then
1650     ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
1651
1652   ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
1653
1654   XXX: It would be nice to either write a set_clear, and use it for
1655   ANTIC_OUT, or to mark the antic_out set as deleted at the end
1656   of this routine, so that the pool can hand the same memory back out
1657   again for the next ANTIC_OUT.  */
1658
1659static bool
1660compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
1661{
1662  basic_block son;
1663  bool changed = false;
1664  value_set_t S, old, ANTIC_OUT;
1665  value_set_node_t node;
1666
1667  ANTIC_OUT = S = NULL;
1668
1669  /* If any edges from predecessors are abnormal, antic_in is empty,
1670     so do nothing.  */
1671  if (block_has_abnormal_pred_edge)
1672    goto maybe_dump_sets;
1673
1674  old = set_new (false);
1675  set_copy (old, ANTIC_IN (block));
1676  ANTIC_OUT = set_new (true);
1677
1678  /* If the block has no successors, ANTIC_OUT is empty.  */
1679  if (EDGE_COUNT (block->succs) == 0)
1680    ;
1681  /* If we have one successor, we could have some phi nodes to
1682     translate through.  */
1683  else if (single_succ_p (block))
1684    {
1685      phi_translate_set (ANTIC_OUT, ANTIC_IN (single_succ (block)),
1686			 block, single_succ (block));
1687    }
1688  /* If we have multiple successors, we take the intersection of all of
1689     them.  */
1690  else
1691    {
1692      VEC(basic_block, heap) * worklist;
1693      edge e;
1694      size_t i;
1695      basic_block bprime, first;
1696      edge_iterator ei;
1697
1698      worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
1699      FOR_EACH_EDGE (e, ei, block->succs)
1700	VEC_quick_push (basic_block, worklist, e->dest);
1701      first = VEC_index (basic_block, worklist, 0);
1702      set_copy (ANTIC_OUT, ANTIC_IN (first));
1703
1704      for (i = 1; VEC_iterate (basic_block, worklist, i, bprime); i++)
1705	{
1706	  node = ANTIC_OUT->head;
1707	  while (node)
1708	    {
1709	      tree val;
1710	      value_set_node_t next = node->next;
1711
1712	      val = get_value_handle (node->expr);
1713	      if (!set_contains_value (ANTIC_IN (bprime), val))
1714		set_remove (ANTIC_OUT, node->expr);
1715	      node = next;
1716	    }
1717	}
1718      VEC_free (basic_block, heap, worklist);
1719    }
1720
1721  /* Generate ANTIC_OUT - TMP_GEN.  */
1722  S = bitmap_set_subtract_from_value_set (ANTIC_OUT, TMP_GEN (block), false);
1723
1724  /* Start ANTIC_IN with EXP_GEN - TMP_GEN */
1725  ANTIC_IN (block) = bitmap_set_subtract_from_value_set (EXP_GEN (block),
1726							 TMP_GEN (block),
1727							 true);
1728
1729  /* Then union in the ANTIC_OUT - TMP_GEN values,
1730     to get ANTIC_OUT U EXP_GEN - TMP_GEN */
1731  for (node = S->head; node; node = node->next)
1732    value_insert_into_set (ANTIC_IN (block), node->expr);
1733
1734  clean (ANTIC_IN (block), block);
1735  if (!set_equal (old, ANTIC_IN (block)))
1736    changed = true;
1737
1738 maybe_dump_sets:
1739  if (dump_file && (dump_flags & TDF_DETAILS))
1740    {
1741      if (ANTIC_OUT)
1742	print_value_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
1743
1744      if (ANTIC_SAFE_LOADS (block))
1745	print_value_set (dump_file, ANTIC_SAFE_LOADS (block),
1746			 "ANTIC_SAFE_LOADS", block->index);
1747      print_value_set (dump_file, ANTIC_IN (block), "ANTIC_IN", block->index);
1748
1749      if (S)
1750	print_value_set (dump_file, S, "S", block->index);
1751    }
1752
1753  for (son = first_dom_son (CDI_POST_DOMINATORS, block);
1754       son;
1755       son = next_dom_son (CDI_POST_DOMINATORS, son))
1756    {
1757      changed |= compute_antic_aux (son,
1758				    TEST_BIT (has_abnormal_preds, son->index));
1759    }
1760  return changed;
1761}
1762
1763/* Compute ANTIC sets.  */
1764
1765static void
1766compute_antic (void)
1767{
1768  bool changed = true;
1769  int num_iterations = 0;
1770  basic_block block;
1771
1772  /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
1773     We pre-build the map of blocks with incoming abnormal edges here.  */
1774  has_abnormal_preds = sbitmap_alloc (last_basic_block);
1775  sbitmap_zero (has_abnormal_preds);
1776  FOR_EACH_BB (block)
1777    {
1778      edge_iterator ei;
1779      edge e;
1780
1781      FOR_EACH_EDGE (e, ei, block->preds)
1782	if (e->flags & EDGE_ABNORMAL)
1783	  {
1784	    SET_BIT (has_abnormal_preds, block->index);
1785	    break;
1786	  }
1787
1788      /* While we are here, give empty ANTIC_IN sets to each block.  */
1789      ANTIC_IN (block) = set_new (true);
1790    }
1791  /* At the exit block we anticipate nothing.  */
1792  ANTIC_IN (EXIT_BLOCK_PTR) = set_new (true);
1793
1794  while (changed)
1795    {
1796      num_iterations++;
1797      changed = false;
1798      changed = compute_antic_aux (EXIT_BLOCK_PTR, false);
1799    }
1800
1801  sbitmap_free (has_abnormal_preds);
1802
1803  if (dump_file && (dump_flags & TDF_STATS))
1804    fprintf (dump_file, "compute_antic required %d iterations\n", num_iterations);
1805}
1806
1807/* Print the names represented by the bitmap NAMES, to the file OUT.  */
1808static void
1809dump_bitmap_of_names (FILE *out, bitmap names)
1810{
1811  bitmap_iterator bi;
1812  unsigned int i;
1813
1814  fprintf (out, " { ");
1815  EXECUTE_IF_SET_IN_BITMAP (names, 0, i, bi)
1816    {
1817      print_generic_expr (out, ssa_name (i), 0);
1818      fprintf (out, " ");
1819    }
1820  fprintf (out, "}\n");
1821}
1822
1823  /* Compute a set of representative vuse versions for each phi.  This
1824     is so we can compute conservative kill sets in terms of all vuses
1825     that are killed, instead of continually walking chains.
1826
1827     We also have to be able kill all names associated with a phi when
1828     the phi dies in order to ensure we don't generate overlapping
1829     live ranges, which are not allowed in virtual SSA.  */
1830
1831static bitmap *vuse_names;
1832static void
1833compute_vuse_representatives (void)
1834{
1835  tree phi;
1836  basic_block bb;
1837  VEC (tree, heap) *phis = NULL;
1838  bool changed = true;
1839  size_t i;
1840
1841  FOR_EACH_BB (bb)
1842    {
1843      for (phi = phi_nodes (bb);
1844	   phi;
1845	   phi = PHI_CHAIN (phi))
1846	if (!is_gimple_reg (PHI_RESULT (phi)))
1847	  VEC_safe_push (tree, heap, phis, phi);
1848    }
1849
1850  while (changed)
1851    {
1852      changed = false;
1853
1854      for (i = 0; VEC_iterate (tree, phis, i, phi); i++)
1855	{
1856	  size_t ver = SSA_NAME_VERSION (PHI_RESULT (phi));
1857	  use_operand_p usep;
1858	  ssa_op_iter iter;
1859
1860	  if (vuse_names[ver] == NULL)
1861	    {
1862	      vuse_names[ver] = BITMAP_ALLOC (&grand_bitmap_obstack);
1863	      bitmap_set_bit (vuse_names[ver], ver);
1864	    }
1865	  FOR_EACH_PHI_ARG (usep, phi, iter, SSA_OP_ALL_USES)
1866	    {
1867	      tree use = USE_FROM_PTR (usep);
1868	      bitmap usebitmap = get_representative (vuse_names,
1869						     SSA_NAME_VERSION (use));
1870	      if (usebitmap != NULL)
1871		{
1872		  changed |= bitmap_ior_into (vuse_names[ver],
1873					      usebitmap);
1874		}
1875	      else
1876		{
1877		  changed |= !bitmap_bit_p (vuse_names[ver],
1878					    SSA_NAME_VERSION (use));
1879		  if (changed)
1880		    bitmap_set_bit (vuse_names[ver],
1881				    SSA_NAME_VERSION (use));
1882		}
1883	    }
1884	}
1885    }
1886
1887  if (dump_file && (dump_flags & TDF_DETAILS))
1888    for (i = 0; VEC_iterate (tree, phis, i, phi); i++)
1889      {
1890	bitmap reps = get_representative (vuse_names,
1891					  SSA_NAME_VERSION (PHI_RESULT (phi)));
1892	if (reps)
1893	  {
1894	    print_generic_expr (dump_file, PHI_RESULT (phi), 0);
1895	    fprintf (dump_file, " represents ");
1896	    dump_bitmap_of_names (dump_file, reps);
1897	  }
1898      }
1899  VEC_free (tree, heap, phis);
1900}
1901
1902/* Compute reaching vuses and antic safe loads.  RVUSE computation is
1903   is a small bit of iterative dataflow to determine what virtual uses
1904   reach what blocks.  Because we can't generate overlapping virtual
1905   uses, and virtual uses *do* actually die, this ends up being faster
1906   in most cases than continually walking the virtual use/def chains
1907   to determine whether we are inside a block where a given virtual is
1908   still available to be used.
1909
1910   ANTIC_SAFE_LOADS are those loads that actually occur before any kill to
1911   their vuses in the block,and thus, are safe at the top of the
1912   block.
1913
1914   An example:
1915
1916   <block begin>
1917   b = *a
1918   *a = 9
1919   <block end>
1920
1921   b = *a is an antic safe load because it still safe to consider it
1922   ANTIC at the top of the block.
1923
1924   We currently compute a conservative approximation to
1925   ANTIC_SAFE_LOADS.  We compute those loads that occur before *any*
1926   stores in the block.  This is not because it is difficult to
1927   compute the precise answer, but because it is expensive.  More
1928   testing is necessary to determine whether it is worth computing the
1929   precise answer.  */
1930
1931static void
1932compute_rvuse_and_antic_safe (void)
1933{
1934
1935  size_t i;
1936  tree phi;
1937  basic_block bb;
1938  int *postorder;
1939  bool changed = true;
1940  unsigned int *first_store_uid;
1941
1942  first_store_uid = xcalloc (n_basic_blocks, sizeof (unsigned int));
1943
1944  compute_vuse_representatives ();
1945
1946  FOR_ALL_BB (bb)
1947    {
1948      RVUSE_IN (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1949      RVUSE_GEN (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1950      RVUSE_KILL (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1951      RVUSE_OUT (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1952      ANTIC_SAFE_LOADS (bb) = NULL;
1953    }
1954
1955  /* Mark live on entry */
1956  for (i = 0; i < num_ssa_names; i++)
1957    {
1958      tree name = ssa_name (i);
1959      if (name && !is_gimple_reg (name)
1960	  && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
1961	bitmap_set_bit (RVUSE_OUT (ENTRY_BLOCK_PTR),
1962			SSA_NAME_VERSION (name));
1963    }
1964
1965  /* Compute local sets for reaching vuses.
1966     GEN(block) = generated in block and not locally killed.
1967     KILL(block) = set of vuses killed in block.
1968  */
1969
1970  FOR_EACH_BB (bb)
1971    {
1972      block_stmt_iterator bsi;
1973      ssa_op_iter iter;
1974      def_operand_p defp;
1975      use_operand_p usep;
1976
1977      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1978	{
1979	  tree stmt = bsi_stmt (bsi);
1980
1981	  if (first_store_uid[bb->index] == 0
1982	      && !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYUSE | SSA_OP_VMAYDEF
1983				     | SSA_OP_VMUSTDEF | SSA_OP_VMUSTKILL))
1984	    {
1985	      first_store_uid[bb->index] = stmt_ann (stmt)->uid;
1986	    }
1987
1988
1989	  FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_VIRTUAL_KILLS
1990				    | SSA_OP_VMAYUSE)
1991	    {
1992	      tree use = USE_FROM_PTR (usep);
1993	      bitmap repbit = get_representative (vuse_names,
1994						  SSA_NAME_VERSION (use));
1995	      if (repbit != NULL)
1996		{
1997		  bitmap_and_compl_into (RVUSE_GEN (bb), repbit);
1998		  bitmap_ior_into (RVUSE_KILL (bb), repbit);
1999		}
2000	      else
2001		{
2002		  bitmap_set_bit (RVUSE_KILL (bb), SSA_NAME_VERSION (use));
2003		  bitmap_clear_bit (RVUSE_GEN (bb), SSA_NAME_VERSION (use));
2004		}
2005	    }
2006	  FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_VIRTUAL_DEFS)
2007	    {
2008	      tree def = DEF_FROM_PTR (defp);
2009	      bitmap_set_bit (RVUSE_GEN (bb), SSA_NAME_VERSION (def));
2010	    }
2011	}
2012    }
2013
2014  FOR_EACH_BB (bb)
2015    {
2016      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2017	{
2018	  if (!is_gimple_reg (PHI_RESULT (phi)))
2019	    {
2020	      edge e;
2021	      edge_iterator ei;
2022
2023	      tree def = PHI_RESULT (phi);
2024	      /* In reality, the PHI result is generated at the end of
2025		 each predecessor block.  This will make the value
2026		 LVUSE_IN for the bb containing the PHI, which is
2027		 correct.  */
2028	      FOR_EACH_EDGE (e, ei, bb->preds)
2029		bitmap_set_bit (RVUSE_GEN (e->src), SSA_NAME_VERSION (def));
2030	    }
2031	}
2032    }
2033
2034  /* Solve reaching vuses.
2035
2036     RVUSE_IN[BB] = Union of RVUSE_OUT of predecessors.
2037     RVUSE_OUT[BB] = RVUSE_GEN[BB] U (RVUSE_IN[BB] - RVUSE_KILL[BB])
2038  */
2039  postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
2040  pre_and_rev_post_order_compute (NULL, postorder, false);
2041
2042  changed = true;
2043  while (changed)
2044    {
2045      int j;
2046      changed = false;
2047      for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2048	{
2049	  edge e;
2050	  edge_iterator ei;
2051	  bb = BASIC_BLOCK (postorder[j]);
2052
2053	  FOR_EACH_EDGE (e, ei, bb->preds)
2054	    bitmap_ior_into (RVUSE_IN (bb), RVUSE_OUT (e->src));
2055
2056	  changed |= bitmap_ior_and_compl (RVUSE_OUT (bb),
2057					   RVUSE_GEN (bb),
2058					   RVUSE_IN (bb),
2059					   RVUSE_KILL (bb));
2060	}
2061    }
2062  free (postorder);
2063
2064  if (dump_file && (dump_flags & TDF_DETAILS))
2065    {
2066      FOR_ALL_BB (bb)
2067	{
2068	  fprintf (dump_file, "RVUSE_IN (%d) =", bb->index);
2069	  dump_bitmap_of_names (dump_file, RVUSE_IN (bb));
2070
2071	  fprintf (dump_file, "RVUSE_KILL (%d) =", bb->index);
2072	  dump_bitmap_of_names (dump_file, RVUSE_KILL (bb));
2073
2074	  fprintf (dump_file, "RVUSE_GEN (%d) =", bb->index);
2075	  dump_bitmap_of_names (dump_file, RVUSE_GEN (bb));
2076
2077	  fprintf (dump_file, "RVUSE_OUT (%d) =", bb->index);
2078	  dump_bitmap_of_names (dump_file, RVUSE_OUT (bb));
2079	}
2080    }
2081
2082  FOR_EACH_BB (bb)
2083    {
2084      value_set_node_t node;
2085      if (bitmap_empty_p (RVUSE_KILL (bb)))
2086	continue;
2087
2088      for (node = EXP_GEN (bb)->head; node; node = node->next)
2089	{
2090	  if (REFERENCE_CLASS_P (node->expr))
2091	    {
2092	      tree vh = get_value_handle (node->expr);
2093	      tree maybe = bitmap_find_leader (AVAIL_OUT (bb), vh);
2094
2095	      if (maybe)
2096		{
2097		  tree def = SSA_NAME_DEF_STMT (maybe);
2098
2099		  if (bb_for_stmt (def) != bb)
2100		    continue;
2101
2102		  if (TREE_CODE (def) == PHI_NODE
2103		      || stmt_ann (def)->uid < first_store_uid[bb->index])
2104		    {
2105		      if (ANTIC_SAFE_LOADS (bb) == NULL)
2106			ANTIC_SAFE_LOADS (bb) = set_new (true);
2107		      value_insert_into_set (ANTIC_SAFE_LOADS (bb),
2108					     node->expr);
2109		    }
2110		}
2111	    }
2112	}
2113    }
2114  free (first_store_uid);
2115}
2116
2117/* Return true if we can value number the call in STMT.  This is true
2118   if we have a pure or constant call.  */
2119
2120static bool
2121can_value_number_call (tree stmt)
2122{
2123  tree call = get_call_expr_in (stmt);
2124
2125  if (call_expr_flags (call)  & (ECF_PURE | ECF_CONST))
2126    return true;
2127  return false;
2128}
2129
2130/* Return true if OP is a tree which we can perform value numbering
2131   on.  */
2132
2133static bool
2134can_value_number_operation (tree op)
2135{
2136  return UNARY_CLASS_P (op)
2137    || BINARY_CLASS_P (op)
2138    || COMPARISON_CLASS_P (op)
2139    || REFERENCE_CLASS_P (op)
2140    || (TREE_CODE (op) == CALL_EXPR
2141	&& can_value_number_call (op));
2142}
2143
2144
2145/* Return true if OP is a tree which we can perform PRE on
2146   on.  This may not match the operations we can value number, but in
2147   a perfect world would.  */
2148
2149static bool
2150can_PRE_operation (tree op)
2151{
2152  return UNARY_CLASS_P (op)
2153    || BINARY_CLASS_P (op)
2154    || COMPARISON_CLASS_P (op)
2155    || TREE_CODE (op) == INDIRECT_REF
2156    || TREE_CODE (op) == COMPONENT_REF
2157    || TREE_CODE (op) == CALL_EXPR
2158    || TREE_CODE (op) == ARRAY_REF;
2159}
2160
2161
2162/* Inserted expressions are placed onto this worklist, which is used
2163   for performing quick dead code elimination of insertions we made
2164   that didn't turn out to be necessary.   */
2165static VEC(tree,heap) *inserted_exprs;
2166
2167/* Pool allocated fake store expressions are placed onto this
2168   worklist, which, after performing dead code elimination, is walked
2169   to see which expressions need to be put into GC'able memory  */
2170static VEC(tree, heap) *need_creation;
2171
2172/* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2173   COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
2174   trying to rename aggregates into ssa form directly, which is a no
2175   no.
2176
2177   Thus, this routine doesn't create temporaries, it just builds a
2178   single access expression for the array, calling
2179   find_or_generate_expression to build the innermost pieces.
2180
2181   This function is a subroutine of create_expression_by_pieces, and
2182   should not be called on it's own unless you really know what you
2183   are doing.
2184*/
2185static tree
2186create_component_ref_by_pieces (basic_block block, tree expr, tree stmts)
2187{
2188  tree genop = expr;
2189  tree folded;
2190
2191  if (TREE_CODE (genop) == VALUE_HANDLE)
2192    {
2193      tree found = bitmap_find_leader (AVAIL_OUT (block), expr);
2194      if (found)
2195	return found;
2196    }
2197
2198  if (TREE_CODE (genop) == VALUE_HANDLE)
2199    genop = VALUE_HANDLE_EXPR_SET (expr)->head->expr;
2200
2201  switch TREE_CODE (genop)
2202    {
2203    case ARRAY_REF:
2204      {
2205	tree op0;
2206	tree op1, op2, op3;
2207	op0 = create_component_ref_by_pieces (block,
2208					      TREE_OPERAND (genop, 0),
2209					      stmts);
2210	op1 = TREE_OPERAND (genop, 1);
2211	if (TREE_CODE (op1) == VALUE_HANDLE)
2212	  op1 = find_or_generate_expression (block, op1, stmts);
2213	op2 = TREE_OPERAND (genop, 2);
2214	if (op2 && TREE_CODE (op2) == VALUE_HANDLE)
2215	  op2 = find_or_generate_expression (block, op2, stmts);
2216	op3 = TREE_OPERAND (genop, 3);
2217	if (op3 && TREE_CODE (op3) == VALUE_HANDLE)
2218	  op3 = find_or_generate_expression (block, op3, stmts);
2219	folded = build4 (ARRAY_REF, TREE_TYPE (genop), op0, op1,
2220			      op2, op3);
2221	return folded;
2222      }
2223    case COMPONENT_REF:
2224      {
2225	tree op0;
2226	tree op1;
2227	op0 = create_component_ref_by_pieces (block,
2228					      TREE_OPERAND (genop, 0),
2229					      stmts);
2230	op1 = VALUE_HANDLE_EXPR_SET (TREE_OPERAND (genop, 1))->head->expr;
2231	folded = fold_build3 (COMPONENT_REF, TREE_TYPE (genop), op0, op1,
2232			      NULL_TREE);
2233	return folded;
2234      }
2235      break;
2236    case INDIRECT_REF:
2237      {
2238	tree op1 = TREE_OPERAND (genop, 0);
2239	tree genop1 = find_or_generate_expression (block, op1, stmts);
2240
2241	folded = fold_build1 (TREE_CODE (genop), TREE_TYPE (genop),
2242			      genop1);
2243	return folded;
2244      }
2245      break;
2246    case VAR_DECL:
2247    case PARM_DECL:
2248    case RESULT_DECL:
2249    case SSA_NAME:
2250    case STRING_CST:
2251      return genop;
2252    default:
2253      gcc_unreachable ();
2254    }
2255
2256  return NULL_TREE;
2257}
2258
2259/* Find a leader for an expression, or generate one using
2260   create_expression_by_pieces if it's ANTIC but
2261   complex.
2262   BLOCK is the basic_block we are looking for leaders in.
2263   EXPR is the expression to find a leader or generate for.
2264   STMTS is the statement list to put the inserted expressions on.
2265   Returns the SSA_NAME of the LHS of the generated expression or the
2266   leader.  */
2267
2268static tree
2269find_or_generate_expression (basic_block block, tree expr, tree stmts)
2270{
2271  tree genop = bitmap_find_leader (AVAIL_OUT (block), expr);
2272
2273  /* If it's still NULL, it must be a complex expression, so generate
2274     it recursively.  */
2275  if (genop == NULL)
2276    {
2277      genop = VALUE_HANDLE_EXPR_SET (expr)->head->expr;
2278
2279      gcc_assert (can_PRE_operation (genop));
2280      genop = create_expression_by_pieces (block, genop, stmts);
2281    }
2282  return genop;
2283}
2284
2285#define NECESSARY(stmt)		stmt->common.asm_written_flag
2286/* Create an expression in pieces, so that we can handle very complex
2287   expressions that may be ANTIC, but not necessary GIMPLE.
2288   BLOCK is the basic block the expression will be inserted into,
2289   EXPR is the expression to insert (in value form)
2290   STMTS is a statement list to append the necessary insertions into.
2291
2292   This function will die if we hit some value that shouldn't be
2293   ANTIC but is (IE there is no leader for it, or its components).
2294   This function may also generate expressions that are themselves
2295   partially or fully redundant.  Those that are will be either made
2296   fully redundant during the next iteration of insert (for partially
2297   redundant ones), or eliminated by eliminate (for fully redundant
2298   ones).  */
2299
2300static tree
2301create_expression_by_pieces (basic_block block, tree expr, tree stmts)
2302{
2303  tree temp, name;
2304  tree folded, forced_stmts, newexpr;
2305  tree v;
2306  tree_stmt_iterator tsi;
2307
2308  switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2309    {
2310    case tcc_expression:
2311      {
2312	tree op0, op2;
2313	tree arglist;
2314	tree genop0, genop2;
2315	tree genarglist;
2316	tree walker, genwalker;
2317
2318	gcc_assert (TREE_CODE (expr) == CALL_EXPR);
2319	genop2 = NULL;
2320
2321	op0 = TREE_OPERAND (expr, 0);
2322	arglist = TREE_OPERAND (expr, 1);
2323	op2 = TREE_OPERAND (expr, 2);
2324
2325	genop0 = find_or_generate_expression (block, op0, stmts);
2326	genarglist = copy_list (arglist);
2327	for (walker = arglist, genwalker = genarglist;
2328	     genwalker && walker;
2329	     genwalker = TREE_CHAIN (genwalker), walker = TREE_CHAIN (walker))
2330	  {
2331	    TREE_VALUE (genwalker)
2332	      = find_or_generate_expression (block, TREE_VALUE (walker),
2333					     stmts);
2334	  }
2335
2336	if (op2)
2337	  genop2 = find_or_generate_expression (block, op2, stmts);
2338	folded = fold_build3 (TREE_CODE (expr), TREE_TYPE (expr),
2339			      genop0, genarglist, genop2);
2340	break;
2341
2342
2343      }
2344      break;
2345    case tcc_reference:
2346      {
2347	if (TREE_CODE (expr) == COMPONENT_REF
2348	    || TREE_CODE (expr) == ARRAY_REF)
2349	  {
2350	    folded = create_component_ref_by_pieces (block, expr, stmts);
2351	  }
2352	else
2353	  {
2354	    tree op1 = TREE_OPERAND (expr, 0);
2355	    tree genop1 = find_or_generate_expression (block, op1, stmts);
2356
2357	    folded = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr),
2358				  genop1);
2359	  }
2360	break;
2361      }
2362
2363    case tcc_binary:
2364    case tcc_comparison:
2365      {
2366	tree op1 = TREE_OPERAND (expr, 0);
2367	tree op2 = TREE_OPERAND (expr, 1);
2368	tree genop1 = find_or_generate_expression (block, op1, stmts);
2369	tree genop2 = find_or_generate_expression (block, op2, stmts);
2370	folded = fold_build2 (TREE_CODE (expr), TREE_TYPE (expr),
2371			      genop1, genop2);
2372	break;
2373      }
2374
2375    case tcc_unary:
2376      {
2377	tree op1 = TREE_OPERAND (expr, 0);
2378	tree genop1 = find_or_generate_expression (block, op1, stmts);
2379	folded = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr),
2380			      genop1);
2381	break;
2382      }
2383
2384    default:
2385      gcc_unreachable ();
2386    }
2387
2388  /* Force the generated expression to be a sequence of GIMPLE
2389     statements.
2390     We have to call unshare_expr because force_gimple_operand may
2391     modify the tree we pass to it.  */
2392  newexpr = force_gimple_operand (unshare_expr (folded), &forced_stmts,
2393                                  false, NULL);
2394
2395  /* If we have any intermediate expressions to the value sets, add them
2396     to the value sets and chain them on in the instruction stream.  */
2397  if (forced_stmts)
2398    {
2399      tsi = tsi_start (forced_stmts);
2400      for (; !tsi_end_p (tsi); tsi_next (&tsi))
2401	{
2402	  tree stmt = tsi_stmt (tsi);
2403	  tree forcedname = TREE_OPERAND (stmt, 0);
2404	  tree forcedexpr = TREE_OPERAND (stmt, 1);
2405	  tree val = vn_lookup_or_add (forcedexpr, NULL);
2406
2407	  VEC_safe_push (tree, heap, inserted_exprs, stmt);
2408	  vn_add (forcedname, val);
2409	  bitmap_value_replace_in_set (NEW_SETS (block), forcedname);
2410	  bitmap_value_replace_in_set (AVAIL_OUT (block), forcedname);
2411	  mark_new_vars_to_rename (stmt);
2412	}
2413      tsi = tsi_last (stmts);
2414      tsi_link_after (&tsi, forced_stmts, TSI_CONTINUE_LINKING);
2415    }
2416
2417  /* Build and insert the assignment of the end result to the temporary
2418     that we will return.  */
2419  if (!pretemp || TREE_TYPE (expr) != TREE_TYPE (pretemp))
2420    {
2421      pretemp = create_tmp_var (TREE_TYPE (expr), "pretmp");
2422      get_var_ann (pretemp);
2423    }
2424
2425  temp = pretemp;
2426  add_referenced_var (temp);
2427
2428  if (TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
2429    DECL_COMPLEX_GIMPLE_REG_P (temp) = 1;
2430
2431  newexpr = build2 (MODIFY_EXPR, TREE_TYPE (expr), temp, newexpr);
2432  name = make_ssa_name (temp, newexpr);
2433  TREE_OPERAND (newexpr, 0) = name;
2434  NECESSARY (newexpr) = 0;
2435
2436  tsi = tsi_last (stmts);
2437  tsi_link_after (&tsi, newexpr, TSI_CONTINUE_LINKING);
2438  VEC_safe_push (tree, heap, inserted_exprs, newexpr);
2439  mark_new_vars_to_rename (newexpr);
2440
2441  /* Add a value handle to the temporary.
2442     The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2443     we are creating the expression by pieces, and this particular piece of
2444     the expression may have been represented.  There is no harm in replacing
2445     here.  */
2446  v = get_value_handle (expr);
2447  vn_add (name, v);
2448  bitmap_value_replace_in_set (NEW_SETS (block), name);
2449  bitmap_value_replace_in_set (AVAIL_OUT (block), name);
2450
2451  pre_stats.insertions++;
2452  if (dump_file && (dump_flags & TDF_DETAILS))
2453    {
2454      fprintf (dump_file, "Inserted ");
2455      print_generic_expr (dump_file, newexpr, 0);
2456      fprintf (dump_file, " in predecessor %d\n", block->index);
2457    }
2458
2459  return name;
2460}
2461
2462/* Insert the to-be-made-available values of NODE for each
2463   predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2464   merge the result with a phi node, given the same value handle as
2465   NODE.  Return true if we have inserted new stuff.  */
2466
2467static bool
2468insert_into_preds_of_block (basic_block block, value_set_node_t node,
2469			    tree *avail)
2470{
2471  tree val = get_value_handle (node->expr);
2472  edge pred;
2473  bool insertions = false;
2474  bool nophi = false;
2475  basic_block bprime;
2476  tree eprime;
2477  edge_iterator ei;
2478  tree type = TREE_TYPE (avail[EDGE_PRED (block, 0)->src->index]);
2479  tree temp;
2480
2481  if (dump_file && (dump_flags & TDF_DETAILS))
2482    {
2483      fprintf (dump_file, "Found partial redundancy for expression ");
2484      print_generic_expr (dump_file, node->expr, 0);
2485      fprintf (dump_file, " (");
2486      print_generic_expr (dump_file, val, 0);
2487      fprintf (dump_file, ")");
2488      fprintf (dump_file, "\n");
2489    }
2490
2491  /* Make sure we aren't creating an induction variable.  */
2492  if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2
2493      && TREE_CODE_CLASS (TREE_CODE (node->expr)) != tcc_reference )
2494    {
2495      bool firstinsideloop = false;
2496      bool secondinsideloop = false;
2497      firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
2498					       EDGE_PRED (block, 0)->src);
2499      secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
2500						EDGE_PRED (block, 1)->src);
2501      /* Induction variables only have one edge inside the loop.  */
2502      if (firstinsideloop ^ secondinsideloop)
2503	{
2504	  if (dump_file && (dump_flags & TDF_DETAILS))
2505	    fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
2506	  nophi = true;
2507	}
2508    }
2509
2510
2511  /* Make the necessary insertions.  */
2512  FOR_EACH_EDGE (pred, ei, block->preds)
2513    {
2514      tree stmts = alloc_stmt_list ();
2515      tree builtexpr;
2516      bprime = pred->src;
2517      eprime = avail[bprime->index];
2518
2519      if (can_PRE_operation (eprime))
2520	{
2521#ifdef ENABLE_CHECKING
2522	  tree vh;
2523
2524	  /* eprime may be an invariant.  */
2525	  vh = TREE_CODE (eprime) == VALUE_HANDLE
2526	    ? eprime
2527	    : get_value_handle (eprime);
2528
2529	  /* ensure that the virtual uses we need reach our block.  */
2530	  if (TREE_CODE (vh) == VALUE_HANDLE)
2531	    {
2532	      int i;
2533	      tree vuse;
2534	      for (i = 0;
2535		   VEC_iterate (tree, VALUE_HANDLE_VUSES (vh), i, vuse);
2536		   i++)
2537		{
2538		  size_t id = SSA_NAME_VERSION (vuse);
2539		  gcc_assert (bitmap_bit_p (RVUSE_OUT (bprime), id)
2540			      || IS_EMPTY_STMT (SSA_NAME_DEF_STMT (vuse)));
2541		}
2542	    }
2543#endif
2544	  builtexpr = create_expression_by_pieces (bprime,
2545						   eprime,
2546						   stmts);
2547	  bsi_insert_on_edge (pred, stmts);
2548	  avail[bprime->index] = builtexpr;
2549	  insertions = true;
2550	}
2551    }
2552  /* If we didn't want a phi node, and we made insertions, we still have
2553     inserted new stuff, and thus return true.  If we didn't want a phi node,
2554     and didn't make insertions, we haven't added anything new, so return
2555     false.  */
2556  if (nophi && insertions)
2557    return true;
2558  else if (nophi && !insertions)
2559    return false;
2560
2561  /* Now build a phi for the new variable.  */
2562  if (!prephitemp || TREE_TYPE (prephitemp) != type)
2563    {
2564      prephitemp = create_tmp_var (type, "prephitmp");
2565      get_var_ann (prephitemp);
2566    }
2567
2568  temp = prephitemp;
2569  add_referenced_var (temp);
2570
2571  if (TREE_CODE (type) == COMPLEX_TYPE)
2572    DECL_COMPLEX_GIMPLE_REG_P (temp) = 1;
2573  temp = create_phi_node (temp, block);
2574
2575  NECESSARY (temp) = 0;
2576  VEC_safe_push (tree, heap, inserted_exprs, temp);
2577  FOR_EACH_EDGE (pred, ei, block->preds)
2578    add_phi_arg (temp, avail[pred->src->index], pred);
2579
2580  vn_add (PHI_RESULT (temp), val);
2581
2582  /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
2583     this insertion, since we test for the existence of this value in PHI_GEN
2584     before proceeding with the partial redundancy checks in insert_aux.
2585
2586     The value may exist in AVAIL_OUT, in particular, it could be represented
2587     by the expression we are trying to eliminate, in which case we want the
2588     replacement to occur.  If it's not existing in AVAIL_OUT, we want it
2589     inserted there.
2590
2591     Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
2592     this block, because if it did, it would have existed in our dominator's
2593     AVAIL_OUT, and would have been skipped due to the full redundancy check.
2594  */
2595
2596  bitmap_insert_into_set (PHI_GEN (block),
2597			  PHI_RESULT (temp));
2598  bitmap_value_replace_in_set (AVAIL_OUT (block),
2599			       PHI_RESULT (temp));
2600  bitmap_insert_into_set (NEW_SETS (block),
2601			  PHI_RESULT (temp));
2602
2603  if (dump_file && (dump_flags & TDF_DETAILS))
2604    {
2605      fprintf (dump_file, "Created phi ");
2606      print_generic_expr (dump_file, temp, 0);
2607      fprintf (dump_file, " in block %d\n", block->index);
2608    }
2609  pre_stats.phis++;
2610  return true;
2611}
2612
2613
2614
2615/* Perform insertion of partially redundant values.
2616   For BLOCK, do the following:
2617   1.  Propagate the NEW_SETS of the dominator into the current block.
2618   If the block has multiple predecessors,
2619       2a. Iterate over the ANTIC expressions for the block to see if
2620           any of them are partially redundant.
2621       2b. If so, insert them into the necessary predecessors to make
2622           the expression fully redundant.
2623       2c. Insert a new PHI merging the values of the predecessors.
2624       2d. Insert the new PHI, and the new expressions, into the
2625           NEW_SETS set.
2626   3. Recursively call ourselves on the dominator children of BLOCK.
2627
2628*/
2629
2630static bool
2631insert_aux (basic_block block)
2632{
2633  basic_block son;
2634  bool new_stuff = false;
2635
2636  if (block)
2637    {
2638      basic_block dom;
2639      dom = get_immediate_dominator (CDI_DOMINATORS, block);
2640      if (dom)
2641	{
2642	  unsigned i;
2643	  bitmap_iterator bi;
2644	  bitmap_set_t newset = NEW_SETS (dom);
2645	  if (newset)
2646	    {
2647	      /* Note that we need to value_replace both NEW_SETS, and
2648		 AVAIL_OUT. For both the case of NEW_SETS, the value may be
2649		 represented by some non-simple expression here that we want
2650		 to replace it with.  */
2651	      EXECUTE_IF_SET_IN_BITMAP (newset->expressions, 0, i, bi)
2652		{
2653		  bitmap_value_replace_in_set (NEW_SETS (block), ssa_name (i));
2654		  bitmap_value_replace_in_set (AVAIL_OUT (block), ssa_name (i));
2655		}
2656	    }
2657	  if (!single_pred_p (block))
2658	    {
2659	      value_set_node_t node;
2660	      for (node = ANTIC_IN (block)->head;
2661		   node;
2662		   node = node->next)
2663		{
2664		  if (can_PRE_operation (node->expr)
2665		      && !AGGREGATE_TYPE_P (TREE_TYPE (node->expr)))
2666		    {
2667		      tree *avail;
2668		      tree val;
2669		      bool by_some = false;
2670		      bool cant_insert = false;
2671		      bool all_same = true;
2672		      tree first_s = NULL;
2673		      edge pred;
2674		      basic_block bprime;
2675		      tree eprime = NULL_TREE;
2676		      edge_iterator ei;
2677
2678		      val = get_value_handle (node->expr);
2679		      if (bitmap_set_contains_value (PHI_GEN (block), val))
2680			continue;
2681		      if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
2682			{
2683			  if (dump_file && (dump_flags & TDF_DETAILS))
2684			    fprintf (dump_file, "Found fully redundant value\n");
2685			  continue;
2686			}
2687
2688		      avail = XCNEWVEC (tree, last_basic_block);
2689		      FOR_EACH_EDGE (pred, ei, block->preds)
2690			{
2691			  tree vprime;
2692			  tree edoubleprime;
2693
2694			  /* This can happen in the very weird case
2695			     that our fake infinite loop edges have caused a
2696			     critical edge to appear.  */
2697			  if (EDGE_CRITICAL_P (pred))
2698			    {
2699			      cant_insert = true;
2700			      break;
2701			    }
2702			  bprime = pred->src;
2703			  eprime = phi_translate (node->expr,
2704						  ANTIC_IN (block),
2705						  bprime, block);
2706
2707			  /* eprime will generally only be NULL if the
2708			     value of the expression, translated
2709			     through the PHI for this predecessor, is
2710			     undefined.  If that is the case, we can't
2711			     make the expression fully redundant,
2712			     because its value is undefined along a
2713			     predecessor path.  We can thus break out
2714			     early because it doesn't matter what the
2715			     rest of the results are.  */
2716			  if (eprime == NULL)
2717			    {
2718			      cant_insert = true;
2719			      break;
2720			    }
2721
2722			  eprime = fully_constant_expression (eprime);
2723			  vprime = get_value_handle (eprime);
2724			  gcc_assert (vprime);
2725			  edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
2726							     vprime);
2727			  if (edoubleprime == NULL)
2728			    {
2729			      avail[bprime->index] = eprime;
2730			      all_same = false;
2731			    }
2732			  else
2733			    {
2734			      avail[bprime->index] = edoubleprime;
2735			      by_some = true;
2736			      if (first_s == NULL)
2737				first_s = edoubleprime;
2738			      else if (!operand_equal_p (first_s, edoubleprime,
2739							 0))
2740				all_same = false;
2741			    }
2742			}
2743		      /* If we can insert it, it's not the same value
2744			 already existing along every predecessor, and
2745			 it's defined by some predecessor, it is
2746			 partially redundant.  */
2747		      if (!cant_insert && !all_same && by_some)
2748			{
2749			  if (insert_into_preds_of_block (block, node, avail))
2750 			    new_stuff = true;
2751			}
2752		      /* If all edges produce the same value and that value is
2753			 an invariant, then the PHI has the same value on all
2754			 edges.  Note this.  */
2755		      else if (!cant_insert && all_same && eprime
2756			       && is_gimple_min_invariant (eprime)
2757			       && !is_gimple_min_invariant (val))
2758			{
2759			  value_set_t exprset = VALUE_HANDLE_EXPR_SET (val);
2760			  value_set_node_t node;
2761
2762			  for (node = exprset->head; node; node = node->next)
2763 			    {
2764			      if (TREE_CODE (node->expr) == SSA_NAME)
2765				{
2766				  vn_add (node->expr, eprime);
2767				  pre_stats.constified++;
2768				}
2769 			    }
2770			}
2771		      free (avail);
2772		    }
2773		}
2774	    }
2775	}
2776    }
2777  for (son = first_dom_son (CDI_DOMINATORS, block);
2778       son;
2779       son = next_dom_son (CDI_DOMINATORS, son))
2780    {
2781      new_stuff |= insert_aux (son);
2782    }
2783
2784  return new_stuff;
2785}
2786
2787/* Perform insertion of partially redundant values.  */
2788
2789static void
2790insert (void)
2791{
2792  bool new_stuff = true;
2793  basic_block bb;
2794  int num_iterations = 0;
2795
2796  FOR_ALL_BB (bb)
2797    NEW_SETS (bb) = bitmap_set_new ();
2798
2799  while (new_stuff)
2800    {
2801      num_iterations++;
2802      new_stuff = false;
2803      new_stuff = insert_aux (ENTRY_BLOCK_PTR);
2804    }
2805  if (num_iterations > 2 && dump_file && (dump_flags & TDF_STATS))
2806    fprintf (dump_file, "insert required %d iterations\n", num_iterations);
2807}
2808
2809
2810/* Return true if VAR is an SSA variable with no defining statement in
2811   this procedure, *AND* isn't a live-on-entry parameter.  */
2812
2813static bool
2814is_undefined_value (tree expr)
2815{
2816  return (TREE_CODE (expr) == SSA_NAME
2817          && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (expr))
2818	  /* PARM_DECLs and hard registers are always defined.  */
2819	  && TREE_CODE (SSA_NAME_VAR (expr)) != PARM_DECL);
2820}
2821
2822
2823/* Given an SSA variable VAR and an expression EXPR, compute the value
2824   number for EXPR and create a value handle (VAL) for it.  If VAR and
2825   EXPR are not the same, associate VAL with VAR.  Finally, add VAR to
2826   S1 and its value handle to S2.
2827
2828   VUSES represent the virtual use operands associated with EXPR (if
2829   any).  */
2830
2831static inline void
2832add_to_sets (tree var, tree expr, tree stmt, bitmap_set_t s1,
2833	     bitmap_set_t s2)
2834{
2835  tree val = vn_lookup_or_add (expr, stmt);
2836
2837  /* VAR and EXPR may be the same when processing statements for which
2838     we are not computing value numbers (e.g., non-assignments, or
2839     statements that make aliased stores).  In those cases, we are
2840     only interested in making VAR available as its own value.  */
2841  if (var != expr)
2842    vn_add (var, val);
2843
2844  if (s1)
2845    bitmap_insert_into_set (s1, var);
2846  bitmap_value_insert_into_set (s2, var);
2847}
2848
2849
2850/* Given a unary or binary expression EXPR, create and return a new
2851   expression with the same structure as EXPR but with its operands
2852   replaced with the value handles of each of the operands of EXPR.
2853
2854   VUSES represent the virtual use operands associated with EXPR (if
2855   any). Insert EXPR's operands into the EXP_GEN set for BLOCK. */
2856
2857static inline tree
2858create_value_expr_from (tree expr, basic_block block, tree stmt)
2859{
2860  int i;
2861  enum tree_code code = TREE_CODE (expr);
2862  tree vexpr;
2863  alloc_pool pool;
2864
2865  gcc_assert (TREE_CODE_CLASS (code) == tcc_unary
2866	      || TREE_CODE_CLASS (code) == tcc_binary
2867	      || TREE_CODE_CLASS (code) == tcc_comparison
2868	      || TREE_CODE_CLASS (code) == tcc_reference
2869	      || TREE_CODE_CLASS (code) == tcc_expression
2870	      || TREE_CODE_CLASS (code) == tcc_exceptional
2871	      || TREE_CODE_CLASS (code) == tcc_declaration);
2872
2873  if (TREE_CODE_CLASS (code) == tcc_unary)
2874    pool = unary_node_pool;
2875  else if (TREE_CODE_CLASS (code) == tcc_reference)
2876    pool = reference_node_pool;
2877  else if (TREE_CODE_CLASS (code) == tcc_binary)
2878    pool = binary_node_pool;
2879  else if (TREE_CODE_CLASS (code) == tcc_comparison)
2880    pool = comparison_node_pool;
2881  else if (TREE_CODE_CLASS (code) == tcc_exceptional)
2882    {
2883      gcc_assert (code == TREE_LIST);
2884      pool = list_node_pool;
2885    }
2886  else
2887    {
2888      gcc_assert (code == CALL_EXPR);
2889      pool = expression_node_pool;
2890    }
2891
2892  vexpr = (tree) pool_alloc (pool);
2893  memcpy (vexpr, expr, tree_size (expr));
2894
2895  /* This case is only for TREE_LIST's that appear as part of
2896     CALL_EXPR's.  Anything else is a bug, but we can't easily verify
2897     this, hence this comment.  TREE_LIST is not handled by the
2898     general case below is because they don't have a fixed length, or
2899     operands, so you can't access purpose/value/chain through
2900     TREE_OPERAND macros.  */
2901
2902  if (code == TREE_LIST)
2903    {
2904      tree op = NULL_TREE;
2905      tree temp = NULL_TREE;
2906      if (TREE_CHAIN (vexpr))
2907	temp = create_value_expr_from (TREE_CHAIN (vexpr), block, stmt);
2908      TREE_CHAIN (vexpr) = temp ? temp : TREE_CHAIN (vexpr);
2909
2910
2911      /* Recursively value-numberize reference ops.  */
2912      if (REFERENCE_CLASS_P (TREE_VALUE (vexpr)))
2913	{
2914	  tree tempop;
2915	  op = TREE_VALUE (vexpr);
2916	  tempop = create_value_expr_from (op, block, stmt);
2917	  op = tempop ? tempop : op;
2918
2919	  TREE_VALUE (vexpr)  = vn_lookup_or_add (op, stmt);
2920	}
2921      else
2922	{
2923	  op = TREE_VALUE (vexpr);
2924	  TREE_VALUE (vexpr) = vn_lookup_or_add (TREE_VALUE (vexpr), NULL);
2925	}
2926      /* This is the equivalent of inserting op into EXP_GEN like we
2927	 do below */
2928      if (!is_undefined_value (op))
2929	value_insert_into_set (EXP_GEN (block), op);
2930
2931      return vexpr;
2932    }
2933
2934  for (i = 0; i < TREE_CODE_LENGTH (code); i++)
2935    {
2936      tree val, op;
2937
2938      op = TREE_OPERAND (expr, i);
2939      if (op == NULL_TREE)
2940	continue;
2941
2942      /* Recursively value-numberize reference ops and tree lists.  */
2943      if (REFERENCE_CLASS_P (op))
2944	{
2945	  tree tempop = create_value_expr_from (op, block, stmt);
2946	  op = tempop ? tempop : op;
2947	  val = vn_lookup_or_add (op, stmt);
2948	}
2949      else if (TREE_CODE (op) == TREE_LIST)
2950	{
2951	  tree tempop;
2952
2953	  gcc_assert (TREE_CODE (expr) == CALL_EXPR);
2954	  tempop = create_value_expr_from (op, block, stmt);
2955
2956	  op = tempop ? tempop : op;
2957	  vn_lookup_or_add (op, NULL);
2958	  /* Unlike everywhere else, we do *not* want to replace the
2959	     TREE_LIST itself with a value number, because support
2960	     functions we call will blow up.  */
2961	  val = op;
2962	}
2963      else
2964	/* Create a value handle for OP and add it to VEXPR.  */
2965	val = vn_lookup_or_add (op, NULL);
2966
2967      if (!is_undefined_value (op) && TREE_CODE (op) != TREE_LIST)
2968	value_insert_into_set (EXP_GEN (block), op);
2969
2970      if (TREE_CODE (val) == VALUE_HANDLE)
2971	TREE_TYPE (val) = TREE_TYPE (TREE_OPERAND (vexpr, i));
2972
2973      TREE_OPERAND (vexpr, i) = val;
2974    }
2975
2976  return vexpr;
2977}
2978
2979
2980
2981/* Insert extra phis to merge values that are fully available from
2982   preds of BLOCK, but have no dominating representative coming from
2983   block DOM.   */
2984
2985static void
2986insert_extra_phis (basic_block block, basic_block dom)
2987{
2988
2989  if (!single_pred_p (block))
2990    {
2991      edge e;
2992      edge_iterator ei;
2993      bool first = true;
2994      bitmap_set_t tempset = bitmap_set_new ();
2995
2996      FOR_EACH_EDGE (e, ei, block->preds)
2997	{
2998	  /* We cannot handle abnormal incoming edges correctly.  */
2999	  if (e->flags & EDGE_ABNORMAL)
3000	    return;
3001
3002	  if (first)
3003	    {
3004	      bitmap_set_copy (tempset, AVAIL_OUT (e->src));
3005	      first = false;
3006	    }
3007	  else
3008	    bitmap_set_and (tempset, AVAIL_OUT (e->src));
3009	}
3010
3011      if (dom)
3012	bitmap_set_and_compl (tempset, AVAIL_OUT (dom));
3013
3014      if (!bitmap_set_empty_p (tempset))
3015	{
3016	  unsigned int i;
3017	  bitmap_iterator bi;
3018
3019	  EXECUTE_IF_SET_IN_BITMAP (tempset->expressions, 0, i, bi)
3020	    {
3021	      tree name = ssa_name (i);
3022	      tree val = get_value_handle (name);
3023	      tree temp;
3024
3025	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
3026		continue;
3027
3028	      if (!mergephitemp
3029		  || TREE_TYPE (name) != TREE_TYPE (mergephitemp))
3030		{
3031		  mergephitemp = create_tmp_var (TREE_TYPE (name),
3032						 "mergephitmp");
3033		  get_var_ann (mergephitemp);
3034		}
3035	      temp = mergephitemp;
3036
3037	      if (dump_file && (dump_flags & TDF_DETAILS))
3038		{
3039		  fprintf (dump_file, "Creating phi ");
3040		  print_generic_expr (dump_file, temp, 0);
3041		  fprintf (dump_file, " to merge available but not dominating values ");
3042		}
3043
3044	      add_referenced_var (temp);
3045	      temp = create_phi_node (temp, block);
3046	      NECESSARY (temp) = 0;
3047	      VEC_safe_push (tree, heap, inserted_exprs, temp);
3048
3049	      FOR_EACH_EDGE (e, ei, block->preds)
3050		{
3051		  tree leader = bitmap_find_leader (AVAIL_OUT (e->src), val);
3052
3053		  gcc_assert (leader);
3054		  add_phi_arg (temp, leader, e);
3055
3056		  if (dump_file && (dump_flags & TDF_DETAILS))
3057		    {
3058		      print_generic_expr (dump_file, leader, 0);
3059		      fprintf (dump_file, " in block %d,", e->src->index);
3060		    }
3061		}
3062
3063	      vn_add (PHI_RESULT (temp), val);
3064
3065	      if (dump_file && (dump_flags & TDF_DETAILS))
3066		fprintf (dump_file, "\n");
3067	    }
3068	}
3069    }
3070}
3071
3072/* Given a statement STMT and its right hand side which is a load, try
3073   to look for the expression stored in the location for the load, and
3074   return true if a useful equivalence was recorded for LHS.  */
3075
3076static bool
3077try_look_through_load (tree lhs, tree mem_ref, tree stmt, basic_block block)
3078{
3079  tree store_stmt = NULL;
3080  tree rhs;
3081  ssa_op_iter i;
3082  tree vuse;
3083
3084  FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, i, SSA_OP_VIRTUAL_USES)
3085    {
3086      tree def_stmt;
3087
3088      gcc_assert (TREE_CODE (vuse) == SSA_NAME);
3089      def_stmt = SSA_NAME_DEF_STMT (vuse);
3090
3091      /* If there is no useful statement for this VUSE, we'll not find a
3092	 useful expression to return either.  Likewise, if there is a
3093	 statement but it is not a simple assignment or it has virtual
3094	 uses, we can stop right here.  Note that this means we do
3095	 not look through PHI nodes, which is intentional.  */
3096      if (!def_stmt
3097	  || TREE_CODE (def_stmt) != MODIFY_EXPR
3098	  || !ZERO_SSA_OPERANDS (def_stmt, SSA_OP_VIRTUAL_USES))
3099	return false;
3100
3101      /* If this is not the same statement as one we have looked at for
3102	 another VUSE of STMT already, we have two statements producing
3103	 something that reaches our STMT.  */
3104      if (store_stmt && store_stmt != def_stmt)
3105	return false;
3106      else
3107	{
3108	  /* Is this a store to the exact same location as the one we are
3109	     loading from in STMT?  */
3110	  if (!operand_equal_p (TREE_OPERAND (def_stmt, 0), mem_ref, 0))
3111	    return false;
3112
3113	  /* Otherwise remember this statement and see if all other VUSEs
3114	     come from the same statement.  */
3115	  store_stmt = def_stmt;
3116	}
3117    }
3118
3119  /* Alright then, we have visited all VUSEs of STMT and we've determined
3120     that all of them come from the same statement STORE_STMT.  See if there
3121     is a useful expression we can deduce from STORE_STMT.  */
3122  rhs = TREE_OPERAND (store_stmt, 1);
3123  if ((TREE_CODE (rhs) == SSA_NAME
3124       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
3125      || is_gimple_min_invariant (rhs)
3126      || TREE_CODE (rhs) == ADDR_EXPR
3127      || TREE_INVARIANT (rhs))
3128    {
3129
3130      /* Yay!  Compute a value number for the RHS of the statement and
3131 	 add its value to the AVAIL_OUT set for the block.  Add the LHS
3132	 to TMP_GEN.  */
3133      add_to_sets (lhs, rhs, store_stmt, TMP_GEN (block), AVAIL_OUT (block));
3134      if (TREE_CODE (rhs) == SSA_NAME
3135	  && !is_undefined_value (rhs))
3136	value_insert_into_set (EXP_GEN (block), rhs);
3137      return true;
3138    }
3139
3140  return false;
3141}
3142
3143/* Return a copy of NODE that is stored in the temporary alloc_pool's.
3144   This is made recursively true, so that the operands are stored in
3145   the pool as well.  */
3146
3147static tree
3148poolify_tree (tree node)
3149{
3150  switch  (TREE_CODE (node))
3151    {
3152    case INDIRECT_REF:
3153      {
3154	tree temp = pool_alloc (reference_node_pool);
3155	memcpy (temp, node, tree_size (node));
3156	TREE_OPERAND (temp, 0) = poolify_tree (TREE_OPERAND (temp, 0));
3157	return temp;
3158      }
3159      break;
3160    case MODIFY_EXPR:
3161      {
3162	tree temp = pool_alloc (modify_expr_node_pool);
3163	memcpy (temp, node, tree_size (node));
3164	TREE_OPERAND (temp, 0) = poolify_tree (TREE_OPERAND (temp, 0));
3165	TREE_OPERAND (temp, 1) = poolify_tree (TREE_OPERAND (temp, 1));
3166	return temp;
3167      }
3168      break;
3169    case SSA_NAME:
3170    case INTEGER_CST:
3171    case STRING_CST:
3172    case REAL_CST:
3173    case PARM_DECL:
3174    case VAR_DECL:
3175    case RESULT_DECL:
3176      return node;
3177    default:
3178      gcc_unreachable ();
3179    }
3180}
3181
3182static tree modify_expr_template;
3183
3184/* Allocate a MODIFY_EXPR with TYPE, and operands OP1, OP2 in the
3185   alloc pools and return it.  */
3186static tree
3187poolify_modify_expr (tree type, tree op1, tree op2)
3188{
3189  if (modify_expr_template == NULL)
3190    modify_expr_template = build2 (MODIFY_EXPR, type, op1, op2);
3191
3192  TREE_OPERAND (modify_expr_template, 0) = op1;
3193  TREE_OPERAND (modify_expr_template, 1) = op2;
3194  TREE_TYPE (modify_expr_template) = type;
3195
3196  return poolify_tree (modify_expr_template);
3197}
3198
3199
3200/* For each real store operation of the form
3201   *a = <value> that we see, create a corresponding fake store of the
3202   form storetmp_<version> = *a.
3203
3204   This enables AVAIL computation to mark the results of stores as
3205   available.  Without this, you'd need to do some computation to
3206   mark the result of stores as ANTIC and AVAIL at all the right
3207   points.
3208   To save memory, we keep the store
3209   statements pool allocated until we decide whether they are
3210   necessary or not.  */
3211
3212static void
3213insert_fake_stores (void)
3214{
3215  basic_block block;
3216
3217  FOR_ALL_BB (block)
3218    {
3219      block_stmt_iterator bsi;
3220      for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
3221	{
3222	  tree stmt = bsi_stmt (bsi);
3223
3224	  /* We can't generate SSA names for stores that are complex
3225	     or aggregate.  We also want to ignore things whose
3226	     virtual uses occur in abnormal phis.  */
3227
3228	  if (TREE_CODE (stmt) == MODIFY_EXPR
3229	      && TREE_CODE (TREE_OPERAND (stmt, 0)) == INDIRECT_REF
3230	      && !AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 0)))
3231	      && TREE_CODE (TREE_TYPE (TREE_OPERAND (stmt, 0))) != COMPLEX_TYPE)
3232	    {
3233	      ssa_op_iter iter;
3234	      def_operand_p defp;
3235	      tree lhs = TREE_OPERAND (stmt, 0);
3236	      tree rhs = TREE_OPERAND (stmt, 1);
3237	      tree new;
3238	      bool notokay = false;
3239
3240	      FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_VIRTUAL_DEFS)
3241		{
3242		  tree defvar = DEF_FROM_PTR (defp);
3243		  if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defvar))
3244		    {
3245		      notokay = true;
3246		      break;
3247		    }
3248		}
3249
3250	      if (notokay)
3251		continue;
3252
3253	      if (!storetemp || TREE_TYPE (rhs) != TREE_TYPE (storetemp))
3254		{
3255		  storetemp = create_tmp_var (TREE_TYPE (rhs), "storetmp");
3256		  get_var_ann (storetemp);
3257		}
3258
3259	      new = poolify_modify_expr (TREE_TYPE (stmt), storetemp, lhs);
3260
3261	      lhs = make_ssa_name (storetemp, new);
3262	      TREE_OPERAND (new, 0) = lhs;
3263	      create_ssa_artficial_load_stmt (new, stmt);
3264
3265	      NECESSARY (new) = 0;
3266	      VEC_safe_push (tree, heap, inserted_exprs, new);
3267	      VEC_safe_push (tree, heap, need_creation, new);
3268	      bsi_insert_after (&bsi, new, BSI_NEW_STMT);
3269	    }
3270	}
3271    }
3272}
3273
3274/* Turn the pool allocated fake stores that we created back into real
3275   GC allocated ones if they turned out to be necessary to PRE some
3276   expressions.  */
3277
3278static void
3279realify_fake_stores (void)
3280{
3281  unsigned int i;
3282  tree stmt;
3283
3284  for (i = 0; VEC_iterate (tree, need_creation, i, stmt); i++)
3285    {
3286      if (NECESSARY (stmt))
3287	{
3288	  block_stmt_iterator bsi;
3289	  tree newstmt;
3290
3291	  /* Mark the temp variable as referenced */
3292	  add_referenced_var (SSA_NAME_VAR (TREE_OPERAND (stmt, 0)));
3293
3294	  /* Put the new statement in GC memory, fix up the
3295	     SSA_NAME_DEF_STMT on it, and then put it in place of
3296	     the old statement before the store in the IR stream
3297	     as a plain ssa name copy.  */
3298	  bsi = bsi_for_stmt (stmt);
3299	  bsi_prev (&bsi);
3300	  newstmt = build2 (MODIFY_EXPR, void_type_node,
3301			    TREE_OPERAND (stmt, 0),
3302			    TREE_OPERAND (bsi_stmt (bsi), 1));
3303	  SSA_NAME_DEF_STMT (TREE_OPERAND (newstmt, 0)) = newstmt;
3304	  bsi_insert_before (&bsi, newstmt, BSI_SAME_STMT);
3305	  bsi = bsi_for_stmt (stmt);
3306	  bsi_remove (&bsi, true);
3307	}
3308      else
3309	release_defs (stmt);
3310    }
3311}
3312
3313/* Tree-combine a value number expression *EXPR_P that does a type
3314   conversion with the value number expression of its operand.
3315   Returns true, if *EXPR_P simplifies to a value number or
3316   gimple min-invariant expression different from EXPR_P and
3317   sets *EXPR_P to the simplified expression value number.
3318   Otherwise returns false and does not change *EXPR_P.  */
3319
3320static bool
3321try_combine_conversion (tree *expr_p)
3322{
3323  tree expr = *expr_p;
3324  tree t;
3325
3326  if (!((TREE_CODE (expr) == NOP_EXPR
3327	 || TREE_CODE (expr) == CONVERT_EXPR)
3328	&& TREE_CODE (TREE_OPERAND (expr, 0)) == VALUE_HANDLE
3329	&& !VALUE_HANDLE_VUSES (TREE_OPERAND (expr, 0))))
3330    return false;
3331
3332  t = fold_unary (TREE_CODE (expr), TREE_TYPE (expr),
3333		  VALUE_HANDLE_EXPR_SET (TREE_OPERAND (expr, 0))->head->expr);
3334  if (!t)
3335    return false;
3336
3337  /* Strip useless type conversions, which is safe in the optimizers but
3338     not generally in fold.  */
3339  STRIP_USELESS_TYPE_CONVERSION (t);
3340
3341  /* Disallow value expressions we have no value number for already, as
3342     we would miss a leader for it here.  */
3343  if (!(TREE_CODE (t) == VALUE_HANDLE
3344	|| is_gimple_min_invariant (t)))
3345    t = vn_lookup (t, NULL);
3346
3347  if (t && t != expr)
3348    {
3349      *expr_p = t;
3350      return true;
3351    }
3352  return false;
3353}
3354
3355/* Compute the AVAIL set for all basic blocks.
3356
3357   This function performs value numbering of the statements in each basic
3358   block.  The AVAIL sets are built from information we glean while doing
3359   this value numbering, since the AVAIL sets contain only one entry per
3360   value.
3361
3362   AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3363   AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK].  */
3364
3365static void
3366compute_avail (void)
3367{
3368  basic_block block, son;
3369  basic_block *worklist;
3370  size_t sp = 0;
3371  tree param;
3372  /* For arguments with default definitions, we pretend they are
3373     defined in the entry block.  */
3374  for (param = DECL_ARGUMENTS (current_function_decl);
3375       param;
3376       param = TREE_CHAIN (param))
3377    {
3378      if (default_def (param) != NULL)
3379	{
3380	  tree def = default_def (param);
3381	  vn_lookup_or_add (def, NULL);
3382	  bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), def);
3383	  bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), def);
3384	}
3385    }
3386
3387  /* Likewise for the static chain decl. */
3388  if (cfun->static_chain_decl)
3389    {
3390      param = cfun->static_chain_decl;
3391      if (default_def (param) != NULL)
3392        {
3393          tree def = default_def (param);
3394          vn_lookup_or_add (def, NULL);
3395          bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), def);
3396          bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), def);
3397        }
3398    }
3399
3400  /* Allocate the worklist.  */
3401  worklist = XNEWVEC (basic_block, n_basic_blocks);
3402
3403  /* Seed the algorithm by putting the dominator children of the entry
3404     block on the worklist.  */
3405  for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3406       son;
3407       son = next_dom_son (CDI_DOMINATORS, son))
3408    worklist[sp++] = son;
3409
3410  /* Loop until the worklist is empty.  */
3411  while (sp)
3412    {
3413      block_stmt_iterator bsi;
3414      tree stmt, phi;
3415      basic_block dom;
3416      unsigned int stmt_uid = 1;
3417
3418      /* Pick a block from the worklist.  */
3419      block = worklist[--sp];
3420
3421      /* Initially, the set of available values in BLOCK is that of
3422	 its immediate dominator.  */
3423      dom = get_immediate_dominator (CDI_DOMINATORS, block);
3424      if (dom)
3425	bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3426
3427      if (!in_fre)
3428	insert_extra_phis (block, dom);
3429
3430      /* Generate values for PHI nodes.  */
3431      for (phi = phi_nodes (block); phi; phi = PHI_CHAIN (phi))
3432	/* We have no need for virtual phis, as they don't represent
3433	   actual computations.  */
3434	if (is_gimple_reg (PHI_RESULT (phi)))
3435	  add_to_sets (PHI_RESULT (phi), PHI_RESULT (phi), NULL,
3436		       PHI_GEN (block), AVAIL_OUT (block));
3437
3438      /* Now compute value numbers and populate value sets with all
3439	 the expressions computed in BLOCK.  */
3440      for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
3441	{
3442	  stmt_ann_t ann;
3443	  ssa_op_iter iter;
3444	  tree op;
3445
3446	  stmt = bsi_stmt (bsi);
3447	  ann = stmt_ann (stmt);
3448
3449	  ann->uid = stmt_uid++;
3450
3451	  /* For regular value numbering, we are only interested in
3452	     assignments of the form X_i = EXPR, where EXPR represents
3453	     an "interesting" computation, it has no volatile operands
3454	     and X_i doesn't flow through an abnormal edge.  */
3455	  if (TREE_CODE (stmt) == MODIFY_EXPR
3456	      && !ann->has_volatile_ops
3457	      && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME
3458	      && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (stmt, 0)))
3459	    {
3460	      tree lhs = TREE_OPERAND (stmt, 0);
3461	      tree rhs = TREE_OPERAND (stmt, 1);
3462
3463	      /* Try to look through loads.  */
3464	      if (TREE_CODE (lhs) == SSA_NAME
3465		  && !ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_USES)
3466		  && try_look_through_load (lhs, rhs, stmt, block))
3467		continue;
3468
3469	      STRIP_USELESS_TYPE_CONVERSION (rhs);
3470	      if (can_value_number_operation (rhs))
3471		{
3472		  /* For value numberable operation, create a
3473		     duplicate expression with the operands replaced
3474		     with the value handles of the original RHS.  */
3475		  tree newt = create_value_expr_from (rhs, block, stmt);
3476		  if (newt)
3477		    {
3478		      /* If we can combine a conversion expression
3479			 with the expression for its operand just
3480			 record the value number for it.  */
3481		      if (try_combine_conversion (&newt))
3482			vn_add (lhs, newt);
3483		      else
3484			{
3485			  tree val = vn_lookup_or_add (newt, stmt);
3486			  vn_add (lhs, val);
3487			  value_insert_into_set (EXP_GEN (block), newt);
3488			}
3489		      bitmap_insert_into_set (TMP_GEN (block), lhs);
3490		      bitmap_value_insert_into_set (AVAIL_OUT (block), lhs);
3491		      continue;
3492		    }
3493		}
3494	      else if ((TREE_CODE (rhs) == SSA_NAME
3495			&& !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
3496		       || is_gimple_min_invariant (rhs)
3497		       || TREE_CODE (rhs) == ADDR_EXPR
3498		       || TREE_INVARIANT (rhs)
3499		       || DECL_P (rhs))
3500		{
3501		  /* Compute a value number for the RHS of the statement
3502		     and add its value to the AVAIL_OUT set for the block.
3503		     Add the LHS to TMP_GEN.  */
3504		  add_to_sets (lhs, rhs, stmt, TMP_GEN (block),
3505			       AVAIL_OUT (block));
3506
3507		  if (TREE_CODE (rhs) == SSA_NAME
3508		      && !is_undefined_value (rhs))
3509		    value_insert_into_set (EXP_GEN (block), rhs);
3510		  continue;
3511		}
3512	    }
3513
3514	  /* For any other statement that we don't recognize, simply
3515	     make the names generated by the statement available in
3516	     AVAIL_OUT and TMP_GEN.  */
3517	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3518	    add_to_sets (op, op, NULL, TMP_GEN (block), AVAIL_OUT (block));
3519
3520	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3521	    add_to_sets (op, op, NULL, NULL , AVAIL_OUT (block));
3522	}
3523
3524      /* Put the dominator children of BLOCK on the worklist of blocks
3525	 to compute available sets for.  */
3526      for (son = first_dom_son (CDI_DOMINATORS, block);
3527	   son;
3528	   son = next_dom_son (CDI_DOMINATORS, son))
3529	worklist[sp++] = son;
3530    }
3531
3532  free (worklist);
3533}
3534
3535
3536/* Eliminate fully redundant computations.  */
3537
3538static void
3539eliminate (void)
3540{
3541  basic_block b;
3542
3543  FOR_EACH_BB (b)
3544    {
3545      block_stmt_iterator i;
3546
3547      for (i = bsi_start (b); !bsi_end_p (i); bsi_next (&i))
3548        {
3549          tree stmt = bsi_stmt (i);
3550
3551	  /* Lookup the RHS of the expression, see if we have an
3552	     available computation for it.  If so, replace the RHS with
3553	     the available computation.  */
3554	  if (TREE_CODE (stmt) == MODIFY_EXPR
3555	      && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME
3556	      && TREE_CODE (TREE_OPERAND (stmt ,1)) != SSA_NAME
3557	      && !is_gimple_min_invariant (TREE_OPERAND (stmt, 1))
3558	      && !stmt_ann (stmt)->has_volatile_ops)
3559	    {
3560	      tree lhs = TREE_OPERAND (stmt, 0);
3561	      tree *rhs_p = &TREE_OPERAND (stmt, 1);
3562	      tree sprime;
3563
3564	      sprime = bitmap_find_leader (AVAIL_OUT (b),
3565					   vn_lookup (lhs, NULL));
3566	      if (sprime
3567		  && sprime != lhs
3568		  && (TREE_CODE (*rhs_p) != SSA_NAME
3569		      || may_propagate_copy (*rhs_p, sprime)))
3570		{
3571		  gcc_assert (sprime != *rhs_p);
3572
3573		  if (dump_file && (dump_flags & TDF_DETAILS))
3574		    {
3575		      fprintf (dump_file, "Replaced ");
3576		      print_generic_expr (dump_file, *rhs_p, 0);
3577		      fprintf (dump_file, " with ");
3578		      print_generic_expr (dump_file, sprime, 0);
3579		      fprintf (dump_file, " in ");
3580		      print_generic_stmt (dump_file, stmt, 0);
3581		    }
3582
3583		  if (TREE_CODE (sprime) == SSA_NAME)
3584		    NECESSARY (SSA_NAME_DEF_STMT (sprime)) = 1;
3585		  /* We need to make sure the new and old types actually match,
3586		     which may require adding a simple cast, which fold_convert
3587		     will do for us.  */
3588		  if (TREE_CODE (*rhs_p) != SSA_NAME
3589		      && !tree_ssa_useless_type_conversion_1 (TREE_TYPE (*rhs_p),
3590							      TREE_TYPE (sprime)))
3591		    sprime = fold_convert (TREE_TYPE (*rhs_p), sprime);
3592
3593		  pre_stats.eliminations++;
3594		  propagate_tree_value (rhs_p, sprime);
3595		  update_stmt (stmt);
3596
3597		  /* If we removed EH side effects from the statement, clean
3598		     its EH information.  */
3599		  if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
3600		    {
3601		      bitmap_set_bit (need_eh_cleanup,
3602				      bb_for_stmt (stmt)->index);
3603		      if (dump_file && (dump_flags & TDF_DETAILS))
3604			fprintf (dump_file, "  Removed EH side effects.\n");
3605		    }
3606		}
3607	    }
3608        }
3609    }
3610}
3611
3612/* Borrow a bit of tree-ssa-dce.c for the moment.
3613   XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
3614   this may be a bit faster, and we may want critical edges kept split.  */
3615
3616/* If OP's defining statement has not already been determined to be necessary,
3617   mark that statement necessary. Return the stmt, if it is newly
3618   necessary.  */
3619
3620static inline tree
3621mark_operand_necessary (tree op)
3622{
3623  tree stmt;
3624
3625  gcc_assert (op);
3626
3627  if (TREE_CODE (op) != SSA_NAME)
3628    return NULL;
3629
3630  stmt = SSA_NAME_DEF_STMT (op);
3631  gcc_assert (stmt);
3632
3633  if (NECESSARY (stmt)
3634      || IS_EMPTY_STMT (stmt))
3635    return NULL;
3636
3637  NECESSARY (stmt) = 1;
3638  return stmt;
3639}
3640
3641/* Because we don't follow exactly the standard PRE algorithm, and decide not
3642   to insert PHI nodes sometimes, and because value numbering of casts isn't
3643   perfect, we sometimes end up inserting dead code.   This simple DCE-like
3644   pass removes any insertions we made that weren't actually used.  */
3645
3646static void
3647remove_dead_inserted_code (void)
3648{
3649  VEC(tree,heap) *worklist = NULL;
3650  int i;
3651  tree t;
3652
3653  worklist = VEC_alloc (tree, heap, VEC_length (tree, inserted_exprs));
3654  for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
3655    {
3656      if (NECESSARY (t))
3657	VEC_quick_push (tree, worklist, t);
3658    }
3659  while (VEC_length (tree, worklist) > 0)
3660    {
3661      t = VEC_pop (tree, worklist);
3662
3663      /* PHI nodes are somewhat special in that each PHI alternative has
3664	 data and control dependencies.  All the statements feeding the
3665	 PHI node's arguments are always necessary. */
3666      if (TREE_CODE (t) == PHI_NODE)
3667	{
3668	  int k;
3669
3670	  VEC_reserve (tree, heap, worklist, PHI_NUM_ARGS (t));
3671	  for (k = 0; k < PHI_NUM_ARGS (t); k++)
3672            {
3673	      tree arg = PHI_ARG_DEF (t, k);
3674	      if (TREE_CODE (arg) == SSA_NAME)
3675		{
3676		  arg = mark_operand_necessary (arg);
3677		  if (arg)
3678		    VEC_quick_push (tree, worklist, arg);
3679		}
3680	    }
3681	}
3682      else
3683	{
3684	  /* Propagate through the operands.  Examine all the USE, VUSE and
3685	     V_MAY_DEF operands in this statement.  Mark all the statements
3686	     which feed this statement's uses as necessary.  */
3687	  ssa_op_iter iter;
3688	  tree use;
3689
3690	  /* The operands of V_MAY_DEF expressions are also needed as they
3691	     represent potential definitions that may reach this
3692	     statement (V_MAY_DEF operands allow us to follow def-def
3693	     links).  */
3694
3695	  FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
3696	    {
3697	      tree n = mark_operand_necessary (use);
3698	      if (n)
3699		VEC_safe_push (tree, heap, worklist, n);
3700	    }
3701	}
3702    }
3703
3704  for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
3705    {
3706      if (!NECESSARY (t))
3707	{
3708	  block_stmt_iterator bsi;
3709
3710	  if (dump_file && (dump_flags & TDF_DETAILS))
3711	    {
3712	      fprintf (dump_file, "Removing unnecessary insertion:");
3713	      print_generic_stmt (dump_file, t, 0);
3714	    }
3715
3716	  if (TREE_CODE (t) == PHI_NODE)
3717	    {
3718	      remove_phi_node (t, NULL);
3719	    }
3720	  else
3721	    {
3722	      bsi = bsi_for_stmt (t);
3723	      bsi_remove (&bsi, true);
3724	      release_defs (t);
3725	    }
3726	}
3727    }
3728  VEC_free (tree, heap, worklist);
3729}
3730
3731/* Initialize data structures used by PRE.  */
3732
3733static void
3734init_pre (bool do_fre)
3735{
3736  basic_block bb;
3737
3738  in_fre = do_fre;
3739
3740  inserted_exprs = NULL;
3741  need_creation = NULL;
3742  pretemp = NULL_TREE;
3743  storetemp = NULL_TREE;
3744  mergephitemp = NULL_TREE;
3745  prephitemp = NULL_TREE;
3746
3747  vn_init ();
3748  if (!do_fre)
3749    current_loops = loop_optimizer_init (LOOPS_NORMAL);
3750
3751  connect_infinite_loops_to_exit ();
3752  memset (&pre_stats, 0, sizeof (pre_stats));
3753
3754  /* If block 0 has more than one predecessor, it means that its PHI
3755     nodes will have arguments coming from block -1.  This creates
3756     problems for several places in PRE that keep local arrays indexed
3757     by block number.  To prevent this, we split the edge coming from
3758     ENTRY_BLOCK_PTR (FIXME, if ENTRY_BLOCK_PTR had an index number
3759     different than -1 we wouldn't have to hack this.  tree-ssa-dce.c
3760     needs a similar change).  */
3761  if (!single_pred_p (single_succ (ENTRY_BLOCK_PTR)))
3762    if (!(single_succ_edge (ENTRY_BLOCK_PTR)->flags & EDGE_ABNORMAL))
3763      split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
3764
3765  FOR_ALL_BB (bb)
3766    bb->aux = xcalloc (1, sizeof (struct bb_value_sets));
3767
3768  bitmap_obstack_initialize (&grand_bitmap_obstack);
3769  phi_translate_table = htab_create (511, expr_pred_trans_hash,
3770				     expr_pred_trans_eq, free);
3771  value_set_pool = create_alloc_pool ("Value sets",
3772				      sizeof (struct value_set), 30);
3773  bitmap_set_pool = create_alloc_pool ("Bitmap sets",
3774				       sizeof (struct bitmap_set), 30);
3775  value_set_node_pool = create_alloc_pool ("Value set nodes",
3776				           sizeof (struct value_set_node), 30);
3777  calculate_dominance_info (CDI_POST_DOMINATORS);
3778  calculate_dominance_info (CDI_DOMINATORS);
3779  binary_node_pool = create_alloc_pool ("Binary tree nodes",
3780				        tree_code_size (PLUS_EXPR), 30);
3781  unary_node_pool = create_alloc_pool ("Unary tree nodes",
3782				       tree_code_size (NEGATE_EXPR), 30);
3783  reference_node_pool = create_alloc_pool ("Reference tree nodes",
3784					   tree_code_size (ARRAY_REF), 30);
3785  expression_node_pool = create_alloc_pool ("Expression tree nodes",
3786					    tree_code_size (CALL_EXPR), 30);
3787  list_node_pool = create_alloc_pool ("List tree nodes",
3788				      tree_code_size (TREE_LIST), 30);
3789  comparison_node_pool = create_alloc_pool ("Comparison tree nodes",
3790      					    tree_code_size (EQ_EXPR), 30);
3791  modify_expr_node_pool = create_alloc_pool ("MODIFY_EXPR nodes",
3792					     tree_code_size (MODIFY_EXPR),
3793					     30);
3794  modify_expr_template = NULL;
3795
3796  FOR_ALL_BB (bb)
3797    {
3798      EXP_GEN (bb) = set_new (true);
3799      PHI_GEN (bb) = bitmap_set_new ();
3800      TMP_GEN (bb) = bitmap_set_new ();
3801      AVAIL_OUT (bb) = bitmap_set_new ();
3802    }
3803
3804  need_eh_cleanup = BITMAP_ALLOC (NULL);
3805}
3806
3807
3808/* Deallocate data structures used by PRE.  */
3809
3810static void
3811fini_pre (bool do_fre)
3812{
3813  basic_block bb;
3814  unsigned int i;
3815
3816  VEC_free (tree, heap, inserted_exprs);
3817  VEC_free (tree, heap, need_creation);
3818  bitmap_obstack_release (&grand_bitmap_obstack);
3819  free_alloc_pool (value_set_pool);
3820  free_alloc_pool (bitmap_set_pool);
3821  free_alloc_pool (value_set_node_pool);
3822  free_alloc_pool (binary_node_pool);
3823  free_alloc_pool (reference_node_pool);
3824  free_alloc_pool (unary_node_pool);
3825  free_alloc_pool (list_node_pool);
3826  free_alloc_pool (expression_node_pool);
3827  free_alloc_pool (comparison_node_pool);
3828  free_alloc_pool (modify_expr_node_pool);
3829  htab_delete (phi_translate_table);
3830  remove_fake_exit_edges ();
3831
3832  FOR_ALL_BB (bb)
3833    {
3834      free (bb->aux);
3835      bb->aux = NULL;
3836    }
3837
3838  free_dominance_info (CDI_POST_DOMINATORS);
3839  vn_delete ();
3840
3841  if (!bitmap_empty_p (need_eh_cleanup))
3842    {
3843      tree_purge_all_dead_eh_edges (need_eh_cleanup);
3844      cleanup_tree_cfg ();
3845    }
3846
3847  BITMAP_FREE (need_eh_cleanup);
3848
3849  /* Wipe out pointers to VALUE_HANDLEs.  In the not terribly distant
3850     future we will want them to be persistent though.  */
3851  for (i = 0; i < num_ssa_names; i++)
3852    {
3853      tree name = ssa_name (i);
3854
3855      if (!name)
3856	continue;
3857
3858      if (SSA_NAME_VALUE (name)
3859	  && TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
3860	SSA_NAME_VALUE (name) = NULL;
3861    }
3862  if (!do_fre && current_loops)
3863    {
3864      loop_optimizer_finalize (current_loops);
3865      current_loops = NULL;
3866    }
3867}
3868
3869/* Main entry point to the SSA-PRE pass.  DO_FRE is true if the caller
3870   only wants to do full redundancy elimination.  */
3871
3872static void
3873execute_pre (bool do_fre)
3874{
3875  init_pre (do_fre);
3876
3877  if (!do_fre)
3878    insert_fake_stores ();
3879
3880  /* Collect and value number expressions computed in each basic block.  */
3881  compute_avail ();
3882
3883  if (dump_file && (dump_flags & TDF_DETAILS))
3884    {
3885      basic_block bb;
3886
3887      FOR_ALL_BB (bb)
3888	{
3889	  print_value_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
3890	  bitmap_print_value_set (dump_file, TMP_GEN (bb), "tmp_gen",
3891				  bb->index);
3892	  bitmap_print_value_set (dump_file, AVAIL_OUT (bb), "avail_out",
3893				  bb->index);
3894	}
3895    }
3896
3897  /* Insert can get quite slow on an incredibly large number of basic
3898     blocks due to some quadratic behavior.  Until this behavior is
3899     fixed, don't run it when he have an incredibly large number of
3900     bb's.  If we aren't going to run insert, there is no point in
3901     computing ANTIC, either, even though it's plenty fast.  */
3902  if (!do_fre && n_basic_blocks < 4000)
3903    {
3904      vuse_names = XCNEWVEC (bitmap, num_ssa_names);
3905      compute_rvuse_and_antic_safe ();
3906      compute_antic ();
3907      insert ();
3908      free (vuse_names);
3909    }
3910
3911  /* Remove all the redundant expressions.  */
3912  eliminate ();
3913
3914
3915  if (dump_file && (dump_flags & TDF_STATS))
3916    {
3917      fprintf (dump_file, "Insertions: %d\n", pre_stats.insertions);
3918      fprintf (dump_file, "New PHIs: %d\n", pre_stats.phis);
3919      fprintf (dump_file, "Eliminated: %d\n", pre_stats.eliminations);
3920      fprintf (dump_file, "Constified: %d\n", pre_stats.constified);
3921    }
3922
3923  bsi_commit_edge_inserts ();
3924
3925  if (!do_fre)
3926    {
3927      remove_dead_inserted_code ();
3928      realify_fake_stores ();
3929    }
3930
3931  fini_pre (do_fre);
3932
3933}
3934
3935/* Gate and execute functions for PRE.  */
3936
3937static unsigned int
3938do_pre (void)
3939{
3940  execute_pre (false);
3941  return 0;
3942}
3943
3944static bool
3945gate_pre (void)
3946{
3947  return flag_tree_pre != 0;
3948}
3949
3950struct tree_opt_pass pass_pre =
3951{
3952  "pre",				/* name */
3953  gate_pre,				/* gate */
3954  do_pre,				/* execute */
3955  NULL,					/* sub */
3956  NULL,					/* next */
3957  0,					/* static_pass_number */
3958  TV_TREE_PRE,				/* tv_id */
3959  PROP_no_crit_edges | PROP_cfg
3960    | PROP_ssa | PROP_alias,		/* properties_required */
3961  0,					/* properties_provided */
3962  0,					/* properties_destroyed */
3963  0,					/* todo_flags_start */
3964  TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect
3965  | TODO_verify_ssa, /* todo_flags_finish */
3966  0					/* letter */
3967};
3968
3969
3970/* Gate and execute functions for FRE.  */
3971
3972static unsigned int
3973execute_fre (void)
3974{
3975  execute_pre (true);
3976  return 0;
3977}
3978
3979static bool
3980gate_fre (void)
3981{
3982  return flag_tree_fre != 0;
3983}
3984
3985struct tree_opt_pass pass_fre =
3986{
3987  "fre",				/* name */
3988  gate_fre,				/* gate */
3989  execute_fre,				/* execute */
3990  NULL,					/* sub */
3991  NULL,					/* next */
3992  0,					/* static_pass_number */
3993  TV_TREE_FRE,				/* tv_id */
3994  PROP_cfg | PROP_ssa | PROP_alias,	/* properties_required */
3995  0,					/* properties_provided */
3996  0,					/* properties_destroyed */
3997  0,					/* todo_flags_start */
3998  TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
3999  0					/* letter */
4000};
4001