tree-ssa-pre.c revision 267654
1/* SSA-PRE for trees.
2   Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3   Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4   <stevenb@suse.de>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 2, or (at your option)
11any later version.
12
13GCC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to
20the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21Boston, MA 02110-1301, USA.  */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27#include "ggc.h"
28#include "tree.h"
29#include "basic-block.h"
30#include "diagnostic.h"
31#include "tree-inline.h"
32#include "tree-flow.h"
33#include "tree-gimple.h"
34#include "tree-dump.h"
35#include "timevar.h"
36#include "fibheap.h"
37#include "hashtab.h"
38#include "tree-iterator.h"
39#include "real.h"
40#include "alloc-pool.h"
41#include "tree-pass.h"
42#include "flags.h"
43#include "bitmap.h"
44#include "langhooks.h"
45#include "cfgloop.h"
46
47/* TODO:
48
49   1. Avail sets can be shared by making an avail_find_leader that
50      walks up the dominator tree and looks in those avail sets.
51      This might affect code optimality, it's unclear right now.
52   2. Strength reduction can be performed by anticipating expressions
53      we can repair later on.
54   3. We can do back-substitution or smarter value numbering to catch
55      commutative expressions split up over multiple statements.
56   4. ANTIC_SAFE_LOADS could be a lot smarter than it is now.
57      Right now, it is simply calculating loads that occur before
58      any store in a block, instead of loads that occur before
59      stores that affect them.  This is relatively more expensive, and
60      it's not clear how much more it will buy us.
61*/
62
63/* For ease of terminology, "expression node" in the below refers to
64   every expression node but MODIFY_EXPR, because MODIFY_EXPR's represent
65   the actual statement containing the expressions we care about, and
66   we cache the value number by putting it in the expression.  */
67
68/* Basic algorithm
69
70   First we walk the statements to generate the AVAIL sets, the
71   EXP_GEN sets, and the tmp_gen sets.  EXP_GEN sets represent the
72   generation of values/expressions by a given block.  We use them
73   when computing the ANTIC sets.  The AVAIL sets consist of
74   SSA_NAME's that represent values, so we know what values are
75   available in what blocks.  AVAIL is a forward dataflow problem.  In
76   SSA, values are never killed, so we don't need a kill set, or a
77   fixpoint iteration, in order to calculate the AVAIL sets.  In
78   traditional parlance, AVAIL sets tell us the downsafety of the
79   expressions/values.
80
81   Next, we generate the ANTIC sets.  These sets represent the
82   anticipatable expressions.  ANTIC is a backwards dataflow
83   problem.An expression is anticipatable in a given block if it could
84   be generated in that block.  This means that if we had to perform
85   an insertion in that block, of the value of that expression, we
86   could.  Calculating the ANTIC sets requires phi translation of
87   expressions, because the flow goes backwards through phis.  We must
88   iterate to a fixpoint of the ANTIC sets, because we have a kill
89   set.  Even in SSA form, values are not live over the entire
90   function, only from their definition point onwards.  So we have to
91   remove values from the ANTIC set once we go past the definition
92   point of the leaders that make them up.
93   compute_antic/compute_antic_aux performs this computation.
94
95   Third, we perform insertions to make partially redundant
96   expressions fully redundant.
97
98   An expression is partially redundant (excluding partial
99   anticipation) if:
100
101   1. It is AVAIL in some, but not all, of the predecessors of a
102      given block.
103   2. It is ANTIC in all the predecessors.
104
105   In order to make it fully redundant, we insert the expression into
106   the predecessors where it is not available, but is ANTIC.
107   insert/insert_aux performs this insertion.
108
109   Fourth, we eliminate fully redundant expressions.
110   This is a simple statement walk that replaces redundant
111   calculations  with the now available values.  */
112
113/* Representations of value numbers:
114
115   Value numbers are represented using the "value handle" approach.
116   This means that each SSA_NAME (and for other reasons to be
117   disclosed in a moment, expression nodes) has a value handle that
118   can be retrieved through get_value_handle.  This value handle, *is*
119   the value number of the SSA_NAME.  You can pointer compare the
120   value handles for equivalence purposes.
121
122   For debugging reasons, the value handle is internally more than
123   just a number, it is a VAR_DECL named "value.x", where x is a
124   unique number for each value number in use.  This allows
125   expressions with SSA_NAMES replaced by value handles to still be
126   pretty printed in a sane way.  They simply print as "value.3 *
127   value.5", etc.
128
129   Expression nodes have value handles associated with them as a
130   cache.  Otherwise, we'd have to look them up again in the hash
131   table This makes significant difference (factor of two or more) on
132   some test cases.  They can be thrown away after the pass is
133   finished.  */
134
135/* Representation of expressions on value numbers:
136
137   In some portions of this code, you will notice we allocate "fake"
138   analogues to the expression we are value numbering, and replace the
139   operands with the values of the expression.  Since we work on
140   values, and not just names, we canonicalize expressions to value
141   expressions for use in the ANTIC sets, the EXP_GEN set, etc.
142
143   This is theoretically unnecessary, it just saves a bunch of
144   repeated get_value_handle and find_leader calls in the remainder of
145   the code, trading off temporary memory usage for speed.  The tree
146   nodes aren't actually creating more garbage, since they are
147   allocated in a special pools which are thrown away at the end of
148   this pass.
149
150   All of this also means that if you print the EXP_GEN or ANTIC sets,
151   you will see "value.5 + value.7" in the set, instead of "a_55 +
152   b_66" or something.  The only thing that actually cares about
153   seeing the value leaders is phi translation, and it needs to be
154   able to find the leader for a value in an arbitrary block, so this
155   "value expression" form is perfect for it (otherwise you'd do
156   get_value_handle->find_leader->translate->get_value_handle->find_leader).*/
157
158
159/* Representation of sets:
160
161   There are currently two types of sets used, hopefully to be unified soon.
162   The AVAIL sets do not need to be sorted in any particular order,
163   and thus, are simply represented as two bitmaps, one that keeps
164   track of values present in the set, and one that keeps track of
165   expressions present in the set.
166
167   The other sets are represented as doubly linked lists kept in topological
168   order, with an optional supporting bitmap of values present in the
169   set.  The sets represent values, and the elements can be values or
170   expressions.  The elements can appear in different sets, but each
171   element can only appear once in each set.
172
173   Since each node in the set represents a value, we also want to be
174   able to map expression, set pairs to something that tells us
175   whether the value is present is a set.  We use a per-set bitmap for
176   that.  The value handles also point to a linked list of the
177   expressions they represent via a tree annotation.  This is mainly
178   useful only for debugging, since we don't do identity lookups.  */
179
180
181static bool in_fre = false;
182
183/* A value set element.  Basically a single linked list of
184   expressions/values.  */
185typedef struct value_set_node
186{
187  /* An expression.  */
188  tree expr;
189
190  /* A pointer to the next element of the value set.  */
191  struct value_set_node *next;
192} *value_set_node_t;
193
194
195/* A value set.  This is a singly linked list of value_set_node
196   elements with a possible bitmap that tells us what values exist in
197   the set.  This set must be kept in topologically sorted order.  */
198typedef struct value_set
199{
200  /* The head of the list.  Used for iterating over the list in
201     order.  */
202  value_set_node_t head;
203
204  /* The tail of the list.  Used for tail insertions, which are
205     necessary to keep the set in topologically sorted order because
206     of how the set is built.  */
207  value_set_node_t tail;
208
209  /* The length of the list.  */
210  size_t length;
211
212  /* True if the set is indexed, which means it contains a backing
213     bitmap for quick determination of whether certain values exist in the
214     set.  */
215  bool indexed;
216
217  /* The bitmap of values that exist in the set.  May be NULL in an
218     empty or non-indexed set.  */
219  bitmap values;
220
221} *value_set_t;
222
223
224/* An unordered bitmap set.  One bitmap tracks values, the other,
225   expressions.  */
226typedef struct bitmap_set
227{
228  bitmap expressions;
229  bitmap values;
230} *bitmap_set_t;
231
232/* Sets that we need to keep track of.  */
233typedef struct bb_value_sets
234{
235  /* The EXP_GEN set, which represents expressions/values generated in
236     a basic block.  */
237  value_set_t exp_gen;
238
239  /* The PHI_GEN set, which represents PHI results generated in a
240     basic block.  */
241  bitmap_set_t phi_gen;
242
243  /* The TMP_GEN set, which represents results/temporaries generated
244     in a basic block. IE the LHS of an expression.  */
245  bitmap_set_t tmp_gen;
246
247  /* The AVAIL_OUT set, which represents which values are available in
248     a given basic block.  */
249  bitmap_set_t avail_out;
250
251  /* The ANTIC_IN set, which represents which values are anticipatable
252     in a given basic block.  */
253  value_set_t antic_in;
254
255  /* The NEW_SETS set, which is used during insertion to augment the
256     AVAIL_OUT set of blocks with the new insertions performed during
257     the current iteration.  */
258  bitmap_set_t new_sets;
259
260  /* The RVUSE sets, which are used during ANTIC computation to ensure
261     that we don't mark loads ANTIC once they have died.  */
262  bitmap rvuse_in;
263  bitmap rvuse_out;
264  bitmap rvuse_gen;
265  bitmap rvuse_kill;
266
267  /* For actually occurring loads, as long as they occur before all the
268     other stores in the block, we know they are antic at the top of
269     the block, regardless of RVUSE_KILL.  */
270  value_set_t antic_safe_loads;
271} *bb_value_sets_t;
272
273#define EXP_GEN(BB)	((bb_value_sets_t) ((BB)->aux))->exp_gen
274#define PHI_GEN(BB)	((bb_value_sets_t) ((BB)->aux))->phi_gen
275#define TMP_GEN(BB)	((bb_value_sets_t) ((BB)->aux))->tmp_gen
276#define AVAIL_OUT(BB)	((bb_value_sets_t) ((BB)->aux))->avail_out
277#define ANTIC_IN(BB)	((bb_value_sets_t) ((BB)->aux))->antic_in
278#define RVUSE_IN(BB)    ((bb_value_sets_t) ((BB)->aux))->rvuse_in
279#define RVUSE_GEN(BB)   ((bb_value_sets_t) ((BB)->aux))->rvuse_gen
280#define RVUSE_KILL(BB)   ((bb_value_sets_t) ((BB)->aux))->rvuse_kill
281#define RVUSE_OUT(BB)    ((bb_value_sets_t) ((BB)->aux))->rvuse_out
282#define NEW_SETS(BB)	((bb_value_sets_t) ((BB)->aux))->new_sets
283#define ANTIC_SAFE_LOADS(BB) ((bb_value_sets_t) ((BB)->aux))->antic_safe_loads
284
285/* This structure is used to keep track of statistics on what
286   optimization PRE was able to perform.  */
287static struct
288{
289  /* The number of RHS computations eliminated by PRE.  */
290  int eliminations;
291
292  /* The number of new expressions/temporaries generated by PRE.  */
293  int insertions;
294
295  /* The number of new PHI nodes added by PRE.  */
296  int phis;
297
298  /* The number of values found constant.  */
299  int constified;
300
301} pre_stats;
302
303
304static tree bitmap_find_leader (bitmap_set_t, tree);
305static tree find_leader (value_set_t, tree);
306static void value_insert_into_set (value_set_t, tree);
307static void bitmap_value_insert_into_set (bitmap_set_t, tree);
308static void bitmap_value_replace_in_set (bitmap_set_t, tree);
309static void insert_into_set (value_set_t, tree);
310static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
311static bool bitmap_set_contains_value (bitmap_set_t, tree);
312static bitmap_set_t bitmap_set_new (void);
313static value_set_t set_new  (bool);
314static bool is_undefined_value (tree);
315static tree create_expression_by_pieces (basic_block, tree, tree);
316static tree find_or_generate_expression (basic_block, tree, tree);
317
318
319/* We can add and remove elements and entries to and from sets
320   and hash tables, so we use alloc pools for them.  */
321
322static alloc_pool value_set_pool;
323static alloc_pool bitmap_set_pool;
324static alloc_pool value_set_node_pool;
325static alloc_pool binary_node_pool;
326static alloc_pool unary_node_pool;
327static alloc_pool reference_node_pool;
328static alloc_pool comparison_node_pool;
329static alloc_pool expression_node_pool;
330static alloc_pool list_node_pool;
331static alloc_pool modify_expr_node_pool;
332static bitmap_obstack grand_bitmap_obstack;
333
334/* To avoid adding 300 temporary variables when we only need one, we
335   only create one temporary variable, on demand, and build ssa names
336   off that.  We do have to change the variable if the types don't
337   match the current variable's type.  */
338static tree pretemp;
339static tree storetemp;
340static tree mergephitemp;
341static tree prephitemp;
342
343/* Set of blocks with statements that have had its EH information
344   cleaned up.  */
345static bitmap need_eh_cleanup;
346
347/* The phi_translate_table caches phi translations for a given
348   expression and predecessor.  */
349
350static htab_t phi_translate_table;
351
352/* A three tuple {e, pred, v} used to cache phi translations in the
353   phi_translate_table.  */
354
355typedef struct expr_pred_trans_d
356{
357  /* The expression.  */
358  tree e;
359
360  /* The predecessor block along which we translated the expression.  */
361  basic_block pred;
362
363  /* vuses associated with the expression.  */
364  VEC (tree, gc) *vuses;
365
366  /* The value that resulted from the translation.  */
367  tree v;
368
369
370  /* The hashcode for the expression, pred pair. This is cached for
371     speed reasons.  */
372  hashval_t hashcode;
373} *expr_pred_trans_t;
374
375/* Return the hash value for a phi translation table entry.  */
376
377static hashval_t
378expr_pred_trans_hash (const void *p)
379{
380  const expr_pred_trans_t ve = (expr_pred_trans_t) p;
381  return ve->hashcode;
382}
383
384/* Return true if two phi translation table entries are the same.
385   P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
386
387static int
388expr_pred_trans_eq (const void *p1, const void *p2)
389{
390  const expr_pred_trans_t ve1 = (expr_pred_trans_t) p1;
391  const expr_pred_trans_t ve2 = (expr_pred_trans_t) p2;
392  basic_block b1 = ve1->pred;
393  basic_block b2 = ve2->pred;
394  int i;
395  tree vuse1;
396
397  /* If they are not translations for the same basic block, they can't
398     be equal.  */
399  if (b1 != b2)
400    return false;
401
402
403  /* If they are for the same basic block, determine if the
404     expressions are equal.  */
405  if (!expressions_equal_p (ve1->e, ve2->e))
406    return false;
407
408  /* Make sure the vuses are equivalent.  */
409  if (ve1->vuses == ve2->vuses)
410    return true;
411
412  if (VEC_length (tree, ve1->vuses) != VEC_length (tree, ve2->vuses))
413    return false;
414
415  for (i = 0; VEC_iterate (tree, ve1->vuses, i, vuse1); i++)
416    {
417      if (VEC_index (tree, ve2->vuses, i) != vuse1)
418	return false;
419    }
420
421  return true;
422}
423
424/* Search in the phi translation table for the translation of
425   expression E in basic block PRED with vuses VUSES.
426   Return the translated value, if found, NULL otherwise.  */
427
428static inline tree
429phi_trans_lookup (tree e, basic_block pred, VEC (tree, gc) *vuses)
430{
431  void **slot;
432  struct expr_pred_trans_d ept;
433
434  ept.e = e;
435  ept.pred = pred;
436  ept.vuses = vuses;
437  ept.hashcode = vn_compute (e, (unsigned long) pred);
438  slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
439				   NO_INSERT);
440  if (!slot)
441    return NULL;
442  else
443    return ((expr_pred_trans_t) *slot)->v;
444}
445
446
447/* Add the tuple mapping from {expression E, basic block PRED, vuses VUSES} to
448   value V, to the phi translation table.  */
449
450static inline void
451phi_trans_add (tree e, tree v, basic_block pred, VEC (tree, gc) *vuses)
452{
453  void **slot;
454  expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
455  new_pair->e = e;
456  new_pair->pred = pred;
457  new_pair->vuses = vuses;
458  new_pair->v = v;
459  new_pair->hashcode = vn_compute (e, (unsigned long) pred);
460  slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
461				   new_pair->hashcode, INSERT);
462  if (*slot)
463    free (*slot);
464  *slot = (void *) new_pair;
465}
466
467
468/* Add expression E to the expression set of value V.  */
469
470void
471add_to_value (tree v, tree e)
472{
473  /* Constants have no expression sets.  */
474  if (is_gimple_min_invariant (v))
475    return;
476
477  if (VALUE_HANDLE_EXPR_SET (v) == NULL)
478    VALUE_HANDLE_EXPR_SET (v) = set_new (false);
479
480  insert_into_set (VALUE_HANDLE_EXPR_SET (v), e);
481}
482
483
484/* Return true if value V exists in the bitmap for SET.  */
485
486static inline bool
487value_exists_in_set_bitmap (value_set_t set, tree v)
488{
489  if (!set->values)
490    return false;
491
492  return bitmap_bit_p (set->values, VALUE_HANDLE_ID (v));
493}
494
495
496/* Remove value V from the bitmap for SET.  */
497
498static void
499value_remove_from_set_bitmap (value_set_t set, tree v)
500{
501  gcc_assert (set->indexed);
502
503  if (!set->values)
504    return;
505
506  bitmap_clear_bit (set->values, VALUE_HANDLE_ID (v));
507}
508
509
510/* Insert the value number V into the bitmap of values existing in
511   SET.  */
512
513static inline void
514value_insert_into_set_bitmap (value_set_t set, tree v)
515{
516  gcc_assert (set->indexed);
517
518  if (set->values == NULL)
519    set->values = BITMAP_ALLOC (&grand_bitmap_obstack);
520
521  bitmap_set_bit (set->values, VALUE_HANDLE_ID (v));
522}
523
524
525/* Create a new bitmap set and return it.  */
526
527static bitmap_set_t
528bitmap_set_new (void)
529{
530  bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
531  ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack);
532  ret->values = BITMAP_ALLOC (&grand_bitmap_obstack);
533  return ret;
534}
535
536/* Create a new set.  */
537
538static value_set_t
539set_new  (bool indexed)
540{
541  value_set_t ret;
542  ret = (value_set_t) pool_alloc (value_set_pool);
543  ret->head = ret->tail = NULL;
544  ret->length = 0;
545  ret->indexed = indexed;
546  ret->values = NULL;
547  return ret;
548}
549
550/* Insert an expression EXPR into a bitmapped set.  */
551
552static void
553bitmap_insert_into_set (bitmap_set_t set, tree expr)
554{
555  tree val;
556  /* XXX: For now, we only let SSA_NAMES into the bitmap sets.  */
557  gcc_assert (TREE_CODE (expr) == SSA_NAME);
558  val = get_value_handle (expr);
559
560  gcc_assert (val);
561  if (!is_gimple_min_invariant (val))
562  {
563    bitmap_set_bit (set->values, VALUE_HANDLE_ID (val));
564    bitmap_set_bit (set->expressions, SSA_NAME_VERSION (expr));
565  }
566}
567
568/* Insert EXPR into SET.  */
569
570static void
571insert_into_set (value_set_t set, tree expr)
572{
573  value_set_node_t newnode = (value_set_node_t) pool_alloc (value_set_node_pool);
574  tree val = get_value_handle (expr);
575  gcc_assert (val);
576
577  if (is_gimple_min_invariant (val))
578    return;
579
580  /* For indexed sets, insert the value into the set value bitmap.
581     For all sets, add it to the linked list and increment the list
582     length.  */
583  if (set->indexed)
584    value_insert_into_set_bitmap (set, val);
585
586  newnode->next = NULL;
587  newnode->expr = expr;
588  set->length ++;
589  if (set->head == NULL)
590    {
591      set->head = set->tail = newnode;
592    }
593  else
594    {
595      set->tail->next = newnode;
596      set->tail = newnode;
597    }
598}
599
600/* Copy a bitmapped set ORIG, into bitmapped set DEST.  */
601
602static void
603bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
604{
605  bitmap_copy (dest->expressions, orig->expressions);
606  bitmap_copy (dest->values, orig->values);
607}
608
609/* Perform bitmapped set operation DEST &= ORIG.  */
610
611static void
612bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
613{
614  bitmap_iterator bi;
615  unsigned int i;
616  bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
617
618  bitmap_and_into (dest->values, orig->values);
619  bitmap_copy (temp, dest->expressions);
620  EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
621    {
622      tree name = ssa_name (i);
623      tree val = get_value_handle (name);
624      if (!bitmap_bit_p (dest->values, VALUE_HANDLE_ID (val)))
625	bitmap_clear_bit (dest->expressions, i);
626    }
627  BITMAP_FREE (temp);
628}
629
630/* Perform bitmapped value set operation DEST = DEST & ~ORIG.  */
631
632static void
633bitmap_set_and_compl (bitmap_set_t dest, bitmap_set_t orig)
634{
635  bitmap_iterator bi;
636  unsigned int i;
637  bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
638
639  bitmap_and_compl_into (dest->values, orig->values);
640  bitmap_copy (temp, dest->expressions);
641  EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
642    {
643      tree name = ssa_name (i);
644      tree val = get_value_handle (name);
645      if (!bitmap_bit_p (dest->values, VALUE_HANDLE_ID (val)))
646	bitmap_clear_bit (dest->expressions, i);
647    }
648  BITMAP_FREE (temp);
649}
650
651/* Return true if the bitmap set SET is empty.  */
652
653static bool
654bitmap_set_empty_p (bitmap_set_t set)
655{
656  return bitmap_empty_p (set->values);
657}
658
659/* Copy the set ORIG to the set DEST.  */
660
661static void
662set_copy (value_set_t dest, value_set_t orig)
663{
664  value_set_node_t node;
665
666  if (!orig || !orig->head)
667    return;
668
669  for (node = orig->head;
670       node;
671       node = node->next)
672    {
673      insert_into_set (dest, node->expr);
674    }
675}
676
677/* Remove EXPR from SET.  */
678
679static void
680set_remove (value_set_t set, tree expr)
681{
682  value_set_node_t node, prev;
683
684  /* Remove the value of EXPR from the bitmap, decrement the set
685     length, and remove it from the actual double linked list.  */
686  value_remove_from_set_bitmap (set, get_value_handle (expr));
687  set->length--;
688  prev = NULL;
689  for (node = set->head;
690       node != NULL;
691       prev = node, node = node->next)
692    {
693      if (node->expr == expr)
694	{
695	  if (prev == NULL)
696	    set->head = node->next;
697	  else
698	    prev->next= node->next;
699
700	  if (node == set->tail)
701	    set->tail = prev;
702	  pool_free (value_set_node_pool, node);
703	  return;
704	}
705    }
706}
707
708/* Return true if SET contains the value VAL.  */
709
710static bool
711set_contains_value (value_set_t set, tree val)
712{
713  /* All constants are in every set.  */
714  if (is_gimple_min_invariant (val))
715    return true;
716
717  if (!set || set->length == 0)
718    return false;
719
720  return value_exists_in_set_bitmap (set, val);
721}
722
723/* Return true if bitmapped set SET contains the expression EXPR.  */
724static bool
725bitmap_set_contains (bitmap_set_t set, tree expr)
726{
727  /* All constants are in every set.  */
728  if (is_gimple_min_invariant (get_value_handle (expr)))
729    return true;
730
731  /* XXX: Bitmapped sets only contain SSA_NAME's for now.  */
732  if (TREE_CODE (expr) != SSA_NAME)
733    return false;
734  return bitmap_bit_p (set->expressions, SSA_NAME_VERSION (expr));
735}
736
737
738/* Return true if bitmapped set SET contains the value VAL.  */
739
740static bool
741bitmap_set_contains_value (bitmap_set_t set, tree val)
742{
743  if (is_gimple_min_invariant (val))
744    return true;
745  return bitmap_bit_p (set->values, VALUE_HANDLE_ID (val));
746}
747
748/* Replace an instance of value LOOKFOR with expression EXPR in SET.  */
749
750static void
751bitmap_set_replace_value (bitmap_set_t set, tree lookfor, tree expr)
752{
753  value_set_t exprset;
754  value_set_node_t node;
755  if (is_gimple_min_invariant (lookfor))
756    return;
757  if (!bitmap_set_contains_value (set, lookfor))
758    return;
759
760  /* The number of expressions having a given value is usually
761     significantly less than the total number of expressions in SET.
762     Thus, rather than check, for each expression in SET, whether it
763     has the value LOOKFOR, we walk the reverse mapping that tells us
764     what expressions have a given value, and see if any of those
765     expressions are in our set.  For large testcases, this is about
766     5-10x faster than walking the bitmap.  If this is somehow a
767     significant lose for some cases, we can choose which set to walk
768     based on the set size.  */
769  exprset = VALUE_HANDLE_EXPR_SET (lookfor);
770  for (node = exprset->head; node; node = node->next)
771    {
772      if (TREE_CODE (node->expr) == SSA_NAME)
773	{
774	  if (bitmap_bit_p (set->expressions, SSA_NAME_VERSION (node->expr)))
775	    {
776	      bitmap_clear_bit (set->expressions, SSA_NAME_VERSION (node->expr));
777	      bitmap_set_bit (set->expressions, SSA_NAME_VERSION (expr));
778	      return;
779	    }
780	}
781    }
782}
783
784/* Subtract bitmapped set B from value set A, and return the new set.  */
785
786static value_set_t
787bitmap_set_subtract_from_value_set (value_set_t a, bitmap_set_t b,
788				    bool indexed)
789{
790  value_set_t ret = set_new (indexed);
791  value_set_node_t node;
792  for (node = a->head;
793       node;
794       node = node->next)
795    {
796      if (!bitmap_set_contains (b, node->expr))
797	insert_into_set (ret, node->expr);
798    }
799  return ret;
800}
801
802/* Return true if two sets are equal.  */
803
804static bool
805set_equal (value_set_t a, value_set_t b)
806{
807  value_set_node_t node;
808
809  if (a->length != b->length)
810    return false;
811  for (node = a->head;
812       node;
813       node = node->next)
814    {
815      if (!set_contains_value (b, get_value_handle (node->expr)))
816	return false;
817    }
818  return true;
819}
820
821/* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
822   and add it otherwise.  */
823
824static void
825bitmap_value_replace_in_set (bitmap_set_t set, tree expr)
826{
827  tree val = get_value_handle (expr);
828  if (bitmap_set_contains_value (set, val))
829    bitmap_set_replace_value (set, val, expr);
830  else
831    bitmap_insert_into_set (set, expr);
832}
833
834/* Insert EXPR into SET if EXPR's value is not already present in
835   SET.  */
836
837static void
838bitmap_value_insert_into_set (bitmap_set_t set, tree expr)
839{
840  tree val = get_value_handle (expr);
841
842  if (is_gimple_min_invariant (val))
843    return;
844
845  if (!bitmap_set_contains_value (set, val))
846    bitmap_insert_into_set (set, expr);
847}
848
849/* Insert the value for EXPR into SET, if it doesn't exist already.  */
850
851static void
852value_insert_into_set (value_set_t set, tree expr)
853{
854  tree val = get_value_handle (expr);
855
856  /* Constant and invariant values exist everywhere, and thus,
857     actually keeping them in the sets is pointless.  */
858  if (is_gimple_min_invariant (val))
859    return;
860
861  if (!set_contains_value (set, val))
862    insert_into_set (set, expr);
863}
864
865
866/* Print out SET to OUTFILE.  */
867
868static void
869bitmap_print_value_set (FILE *outfile, bitmap_set_t set,
870			const char *setname, int blockindex)
871{
872  fprintf (outfile, "%s[%d] := { ", setname, blockindex);
873  if (set)
874    {
875      bool first = true;
876      unsigned i;
877      bitmap_iterator bi;
878
879      EXECUTE_IF_SET_IN_BITMAP (set->expressions, 0, i, bi)
880	{
881	  if (!first)
882	    fprintf (outfile, ", ");
883	  first = false;
884	  print_generic_expr (outfile, ssa_name (i), 0);
885
886	  fprintf (outfile, " (");
887	  print_generic_expr (outfile, get_value_handle (ssa_name (i)), 0);
888	  fprintf (outfile, ") ");
889	}
890    }
891  fprintf (outfile, " }\n");
892}
893/* Print out the value_set SET to OUTFILE.  */
894
895static void
896print_value_set (FILE *outfile, value_set_t set,
897		 const char *setname, int blockindex)
898{
899  value_set_node_t node;
900  fprintf (outfile, "%s[%d] := { ", setname, blockindex);
901  if (set)
902    {
903      for (node = set->head;
904	   node;
905	   node = node->next)
906	{
907	  print_generic_expr (outfile, node->expr, 0);
908
909	  fprintf (outfile, " (");
910	  print_generic_expr (outfile, get_value_handle (node->expr), 0);
911	  fprintf (outfile, ") ");
912
913	  if (node->next)
914	    fprintf (outfile, ", ");
915	}
916    }
917
918  fprintf (outfile, " }\n");
919}
920
921/* Print out the expressions that have VAL to OUTFILE.  */
922
923void
924print_value_expressions (FILE *outfile, tree val)
925{
926  if (VALUE_HANDLE_EXPR_SET (val))
927    {
928      char s[10];
929      sprintf (s, "VH.%04d", VALUE_HANDLE_ID (val));
930      print_value_set (outfile, VALUE_HANDLE_EXPR_SET (val), s, 0);
931    }
932}
933
934
935void
936debug_value_expressions (tree val)
937{
938  print_value_expressions (stderr, val);
939}
940
941
942void debug_value_set (value_set_t, const char *, int);
943
944void
945debug_value_set (value_set_t set, const char *setname, int blockindex)
946{
947  print_value_set (stderr, set, setname, blockindex);
948}
949
950/* Return the folded version of T if T, when folded, is a gimple
951   min_invariant.  Otherwise, return T.  */
952
953static tree
954fully_constant_expression (tree t)
955{
956  tree folded;
957  folded = fold (t);
958  if (folded && is_gimple_min_invariant (folded))
959    return folded;
960  return t;
961}
962
963/* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
964   For example, this can copy a list made of TREE_LIST nodes.
965   Allocates the nodes in list_node_pool*/
966
967static tree
968pool_copy_list (tree list)
969{
970  tree head;
971  tree prev, next;
972
973  if (list == 0)
974    return 0;
975  head = (tree) pool_alloc (list_node_pool);
976
977  memcpy (head, list, tree_size (list));
978  prev = head;
979
980  next = TREE_CHAIN (list);
981  while (next)
982    {
983      TREE_CHAIN (prev) = (tree) pool_alloc (list_node_pool);
984      memcpy (TREE_CHAIN (prev), next, tree_size (next));
985      prev = TREE_CHAIN (prev);
986      next = TREE_CHAIN (next);
987    }
988  return head;
989}
990
991/* Translate the vuses in the VUSES vector backwards through phi
992   nodes, so that they have the value they would have in BLOCK. */
993
994static VEC(tree, gc) *
995translate_vuses_through_block (VEC (tree, gc) *vuses, basic_block block)
996{
997  tree oldvuse;
998  VEC(tree, gc) *result = NULL;
999  int i;
1000
1001  for (i = 0; VEC_iterate (tree, vuses, i, oldvuse); i++)
1002    {
1003      tree phi = SSA_NAME_DEF_STMT (oldvuse);
1004      if (TREE_CODE (phi) == PHI_NODE)
1005	{
1006	  edge e = find_edge (block, bb_for_stmt (phi));
1007	  if (e)
1008	    {
1009	      tree def = PHI_ARG_DEF (phi, e->dest_idx);
1010	      if (def != oldvuse)
1011		{
1012		  if (!result)
1013		    result = VEC_copy (tree, gc, vuses);
1014		  VEC_replace (tree, result, i, def);
1015		}
1016	    }
1017	}
1018    }
1019  if (result)
1020    {
1021      sort_vuses (result);
1022      return result;
1023    }
1024  return vuses;
1025
1026}
1027/* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1028   the phis in PRED.  Return NULL if we can't find a leader for each
1029   part of the translated expression.  */
1030
1031static tree
1032phi_translate (tree expr, value_set_t set, basic_block pred,
1033	       basic_block phiblock)
1034{
1035  tree phitrans = NULL;
1036  tree oldexpr = expr;
1037  if (expr == NULL)
1038    return NULL;
1039
1040  if (is_gimple_min_invariant (expr))
1041    return expr;
1042
1043  /* Phi translations of a given expression don't change.  */
1044  if (EXPR_P (expr))
1045    {
1046      tree vh;
1047
1048      vh = get_value_handle (expr);
1049      if (vh && TREE_CODE (vh) == VALUE_HANDLE)
1050	phitrans = phi_trans_lookup (expr, pred, VALUE_HANDLE_VUSES (vh));
1051      else
1052	phitrans = phi_trans_lookup (expr, pred, NULL);
1053    }
1054  else
1055    phitrans = phi_trans_lookup (expr, pred, NULL);
1056
1057  if (phitrans)
1058    return phitrans;
1059
1060  switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1061    {
1062    case tcc_expression:
1063      {
1064	if (TREE_CODE (expr) != CALL_EXPR)
1065	  return NULL;
1066	else
1067	  {
1068	    tree oldop0 = TREE_OPERAND (expr, 0);
1069	    tree oldarglist = TREE_OPERAND (expr, 1);
1070	    tree oldop2 = TREE_OPERAND (expr, 2);
1071	    tree newop0;
1072	    tree newarglist;
1073	    tree newop2 = NULL;
1074	    tree oldwalker;
1075	    tree newwalker;
1076	    tree newexpr;
1077	    tree vh = get_value_handle (expr);
1078	    bool listchanged = false;
1079	    bool invariantarg = false;
1080	    VEC (tree, gc) *vuses = VALUE_HANDLE_VUSES (vh);
1081	    VEC (tree, gc) *tvuses;
1082
1083	    /* Call expressions are kind of weird because they have an
1084	       argument list.  We don't want to value number the list
1085	       as one value number, because that doesn't make much
1086	       sense, and just breaks the support functions we call,
1087	       which expect TREE_OPERAND (call_expr, 2) to be a
1088	       TREE_LIST. */
1089
1090	    newop0 = phi_translate (find_leader (set, oldop0),
1091				    set, pred, phiblock);
1092	    if (newop0 == NULL)
1093	      return NULL;
1094	    if (oldop2)
1095	      {
1096		newop2 = phi_translate (find_leader (set, oldop2),
1097					set, pred, phiblock);
1098		if (newop2 == NULL)
1099		  return NULL;
1100	      }
1101
1102	    /* phi translate the argument list piece by piece.
1103
1104	      We could actually build the list piece by piece here,
1105	      but it's likely to not be worth the memory we will save,
1106	      unless you have millions of call arguments.  */
1107
1108	    newarglist = pool_copy_list (oldarglist);
1109	    for (oldwalker = oldarglist, newwalker = newarglist;
1110		 oldwalker && newwalker;
1111		 oldwalker = TREE_CHAIN (oldwalker),
1112		   newwalker = TREE_CHAIN (newwalker))
1113	      {
1114
1115		tree oldval = TREE_VALUE (oldwalker);
1116		tree newval;
1117		if (oldval)
1118		  {
1119		    /* This may seem like a weird place for this
1120		       check, but it's actually the easiest place to
1121		       do it.  We can't do it lower on in the
1122		       recursion because it's valid for pieces of a
1123		       component ref to be of AGGREGATE_TYPE, as long
1124		       as the outermost one is not.
1125		       To avoid *that* case, we have a check for
1126		       AGGREGATE_TYPE_P in insert_aux.  However, that
1127		       check will *not* catch this case because here
1128		       it occurs in the argument list.  */
1129		    if (AGGREGATE_TYPE_P (TREE_TYPE (oldval)))
1130		      return NULL;
1131		    newval = phi_translate (find_leader (set, oldval),
1132					    set, pred, phiblock);
1133		    if (newval == NULL)
1134		      return NULL;
1135		    if (newval != oldval)
1136		      {
1137			listchanged = true;
1138			invariantarg |= is_gimple_min_invariant (newval);
1139			TREE_VALUE (newwalker) = get_value_handle (newval);
1140		      }
1141		  }
1142	      }
1143
1144	    /* In case of new invariant args we might try to fold the call
1145	       again.  */
1146	    if (invariantarg)
1147	      {
1148		tree tmp = fold_ternary (CALL_EXPR, TREE_TYPE (expr),
1149					 newop0, newarglist, newop2);
1150		if (tmp)
1151		  {
1152		    STRIP_TYPE_NOPS (tmp);
1153		    if (is_gimple_min_invariant (tmp))
1154		      return tmp;
1155		  }
1156	      }
1157
1158	    if (listchanged)
1159	      vn_lookup_or_add (newarglist, NULL);
1160
1161	    tvuses = translate_vuses_through_block (vuses, pred);
1162
1163	    if (listchanged || (newop0 != oldop0) || (oldop2 != newop2)
1164		|| vuses != tvuses)
1165	      {
1166		newexpr = (tree) pool_alloc (expression_node_pool);
1167		memcpy (newexpr, expr, tree_size (expr));
1168		TREE_OPERAND (newexpr, 0) = newop0 == oldop0 ? oldop0 : get_value_handle (newop0);
1169		TREE_OPERAND (newexpr, 1) = listchanged ? newarglist : oldarglist;
1170		TREE_OPERAND (newexpr, 2) = newop2 == oldop2 ? oldop2 : get_value_handle (newop2);
1171		newexpr->common.ann = NULL;
1172		vn_lookup_or_add_with_vuses (newexpr, tvuses);
1173		expr = newexpr;
1174		phi_trans_add (oldexpr, newexpr, pred, tvuses);
1175	      }
1176	  }
1177      }
1178      return expr;
1179
1180    case tcc_declaration:
1181      {
1182	VEC (tree, gc) * oldvuses = NULL;
1183	VEC (tree, gc) * newvuses = NULL;
1184
1185	oldvuses = VALUE_HANDLE_VUSES (get_value_handle (expr));
1186	if (oldvuses)
1187	  newvuses = translate_vuses_through_block (oldvuses, pred);
1188
1189	if (oldvuses != newvuses)
1190	  vn_lookup_or_add_with_vuses (expr, newvuses);
1191
1192	phi_trans_add (oldexpr, expr, pred, newvuses);
1193      }
1194      return expr;
1195
1196    case tcc_reference:
1197      {
1198	tree oldop0 = TREE_OPERAND (expr, 0);
1199	tree oldop1 = NULL;
1200	tree newop0;
1201	tree newop1 = NULL;
1202	tree oldop2 = NULL;
1203	tree newop2 = NULL;
1204	tree oldop3 = NULL;
1205	tree newop3 = NULL;
1206	tree newexpr;
1207	VEC (tree, gc) * oldvuses = NULL;
1208	VEC (tree, gc) * newvuses = NULL;
1209
1210	if (TREE_CODE (expr) != INDIRECT_REF
1211	    && TREE_CODE (expr) != COMPONENT_REF
1212	    && TREE_CODE (expr) != ARRAY_REF)
1213	  return NULL;
1214
1215	newop0 = phi_translate (find_leader (set, oldop0),
1216				set, pred, phiblock);
1217	if (newop0 == NULL)
1218	  return NULL;
1219
1220	if (TREE_CODE (expr) == ARRAY_REF)
1221	  {
1222	    oldop1 = TREE_OPERAND (expr, 1);
1223	    newop1 = phi_translate (find_leader (set, oldop1),
1224				    set, pred, phiblock);
1225
1226	    if (newop1 == NULL)
1227	      return NULL;
1228	    oldop2 = TREE_OPERAND (expr, 2);
1229	    if (oldop2)
1230	      {
1231		newop2 = phi_translate (find_leader (set, oldop2),
1232					set, pred, phiblock);
1233
1234		if (newop2 == NULL)
1235		  return NULL;
1236	      }
1237	    oldop3 = TREE_OPERAND (expr, 3);
1238	    if (oldop3)
1239	      {
1240		newop3 = phi_translate (find_leader (set, oldop3),
1241					set, pred, phiblock);
1242
1243		if (newop3 == NULL)
1244		  return NULL;
1245	      }
1246	  }
1247
1248	oldvuses = VALUE_HANDLE_VUSES (get_value_handle (expr));
1249	if (oldvuses)
1250	  newvuses = translate_vuses_through_block (oldvuses, pred);
1251
1252	if (newop0 != oldop0 || newvuses != oldvuses
1253	    || newop1 != oldop1
1254	    || newop2 != oldop2
1255	    || newop3 != oldop3)
1256	  {
1257	    tree t;
1258
1259	    newexpr = pool_alloc (reference_node_pool);
1260	    memcpy (newexpr, expr, tree_size (expr));
1261	    TREE_OPERAND (newexpr, 0) = get_value_handle (newop0);
1262	    if (TREE_CODE (expr) == ARRAY_REF)
1263	      {
1264		TREE_OPERAND (newexpr, 1) = get_value_handle (newop1);
1265		if (newop2)
1266		  TREE_OPERAND (newexpr, 2) = get_value_handle (newop2);
1267		if (newop3)
1268		  TREE_OPERAND (newexpr, 3) = get_value_handle (newop3);
1269	      }
1270
1271	    t = fully_constant_expression (newexpr);
1272
1273	    if (t != newexpr)
1274	      {
1275		pool_free (reference_node_pool, newexpr);
1276		newexpr = t;
1277	      }
1278	    else
1279	      {
1280		newexpr->common.ann = NULL;
1281		vn_lookup_or_add_with_vuses (newexpr, newvuses);
1282	      }
1283	    expr = newexpr;
1284	    phi_trans_add (oldexpr, newexpr, pred, newvuses);
1285	  }
1286      }
1287      return expr;
1288      break;
1289
1290    case tcc_binary:
1291    case tcc_comparison:
1292      {
1293	tree oldop1 = TREE_OPERAND (expr, 0);
1294	tree oldop2 = TREE_OPERAND (expr, 1);
1295	tree newop1;
1296	tree newop2;
1297	tree newexpr;
1298
1299	newop1 = phi_translate (find_leader (set, oldop1),
1300				set, pred, phiblock);
1301	if (newop1 == NULL)
1302	  return NULL;
1303	newop2 = phi_translate (find_leader (set, oldop2),
1304				set, pred, phiblock);
1305	if (newop2 == NULL)
1306	  return NULL;
1307	if (newop1 != oldop1 || newop2 != oldop2)
1308	  {
1309	    tree t;
1310	    newexpr = (tree) pool_alloc (binary_node_pool);
1311	    memcpy (newexpr, expr, tree_size (expr));
1312	    TREE_OPERAND (newexpr, 0) = newop1 == oldop1 ? oldop1 : get_value_handle (newop1);
1313	    TREE_OPERAND (newexpr, 1) = newop2 == oldop2 ? oldop2 : get_value_handle (newop2);
1314	    t = fully_constant_expression (newexpr);
1315	    if (t != newexpr)
1316	      {
1317		pool_free (binary_node_pool, newexpr);
1318		newexpr = t;
1319	      }
1320	    else
1321	      {
1322		newexpr->common.ann = NULL;
1323		vn_lookup_or_add (newexpr, NULL);
1324	      }
1325	    expr = newexpr;
1326	    phi_trans_add (oldexpr, newexpr, pred, NULL);
1327	  }
1328      }
1329      return expr;
1330
1331    case tcc_unary:
1332      {
1333	tree oldop1 = TREE_OPERAND (expr, 0);
1334	tree newop1;
1335	tree newexpr;
1336
1337	newop1 = phi_translate (find_leader (set, oldop1),
1338				set, pred, phiblock);
1339	if (newop1 == NULL)
1340	  return NULL;
1341	if (newop1 != oldop1)
1342	  {
1343	    tree t;
1344	    newexpr = (tree) pool_alloc (unary_node_pool);
1345	    memcpy (newexpr, expr, tree_size (expr));
1346	    TREE_OPERAND (newexpr, 0) = get_value_handle (newop1);
1347	    t = fully_constant_expression (newexpr);
1348	    if (t != newexpr)
1349	      {
1350		pool_free (unary_node_pool, newexpr);
1351		newexpr = t;
1352	      }
1353	    else
1354	      {
1355		newexpr->common.ann = NULL;
1356		vn_lookup_or_add (newexpr, NULL);
1357	      }
1358	    expr = newexpr;
1359	    phi_trans_add (oldexpr, newexpr, pred, NULL);
1360	  }
1361      }
1362      return expr;
1363
1364    case tcc_exceptional:
1365      {
1366	tree phi = NULL;
1367	edge e;
1368	gcc_assert (TREE_CODE (expr) == SSA_NAME);
1369	if (TREE_CODE (SSA_NAME_DEF_STMT (expr)) == PHI_NODE)
1370	  phi = SSA_NAME_DEF_STMT (expr);
1371	else
1372	  return expr;
1373
1374	e = find_edge (pred, bb_for_stmt (phi));
1375	if (e)
1376	  {
1377	    if (is_undefined_value (PHI_ARG_DEF (phi, e->dest_idx)))
1378	      return NULL;
1379	    vn_lookup_or_add (PHI_ARG_DEF (phi, e->dest_idx), NULL);
1380	    return PHI_ARG_DEF (phi, e->dest_idx);
1381	  }
1382      }
1383      return expr;
1384
1385    default:
1386      gcc_unreachable ();
1387    }
1388}
1389
1390/* For each expression in SET, translate the value handles through phi nodes
1391   in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1392   expressions in DEST.  */
1393
1394static void
1395phi_translate_set (value_set_t dest, value_set_t set, basic_block pred,
1396		   basic_block phiblock)
1397{
1398  value_set_node_t node;
1399  for (node = set->head;
1400       node;
1401       node = node->next)
1402    {
1403      tree translated;
1404
1405      translated = phi_translate (node->expr, set, pred, phiblock);
1406
1407      /* Don't add constants or empty translations to the cache, since
1408	 we won't look them up that way, or use the result, anyway.  */
1409      if (translated && !is_gimple_min_invariant (translated))
1410	{
1411	  tree vh = get_value_handle (translated);
1412	  VEC (tree, gc) *vuses;
1413
1414	  /* The value handle itself may also be an invariant, in
1415	     which case, it has no vuses.  */
1416	  vuses = !is_gimple_min_invariant (vh)
1417	    ? VALUE_HANDLE_VUSES (vh) : NULL;
1418	  phi_trans_add (node->expr, translated, pred, vuses);
1419	}
1420
1421      if (translated != NULL)
1422	value_insert_into_set (dest, translated);
1423    }
1424}
1425
1426/* Find the leader for a value (i.e., the name representing that
1427   value) in a given set, and return it.  Return NULL if no leader is
1428   found.  */
1429
1430static tree
1431bitmap_find_leader (bitmap_set_t set, tree val)
1432{
1433  if (val == NULL)
1434    return NULL;
1435
1436  if (is_gimple_min_invariant (val))
1437    return val;
1438  if (bitmap_set_contains_value (set, val))
1439    {
1440      /* Rather than walk the entire bitmap of expressions, and see
1441	 whether any of them has the value we are looking for, we look
1442	 at the reverse mapping, which tells us the set of expressions
1443	 that have a given value (IE value->expressions with that
1444	 value) and see if any of those expressions are in our set.
1445	 The number of expressions per value is usually significantly
1446	 less than the number of expressions in the set.  In fact, for
1447	 large testcases, doing it this way is roughly 5-10x faster
1448	 than walking the bitmap.
1449	 If this is somehow a significant lose for some cases, we can
1450	 choose which set to walk based on which set is smaller.  */
1451      value_set_t exprset;
1452      value_set_node_t node;
1453      exprset = VALUE_HANDLE_EXPR_SET (val);
1454      for (node = exprset->head; node; node = node->next)
1455	{
1456	  if (TREE_CODE (node->expr) == SSA_NAME)
1457	    {
1458	      if (bitmap_bit_p (set->expressions,
1459				SSA_NAME_VERSION (node->expr)))
1460		return node->expr;
1461	    }
1462	}
1463    }
1464  return NULL;
1465}
1466
1467
1468/* Find the leader for a value (i.e., the name representing that
1469   value) in a given set, and return it.  Return NULL if no leader is
1470   found.  */
1471
1472static tree
1473find_leader (value_set_t set, tree val)
1474{
1475  value_set_node_t node;
1476
1477  if (val == NULL)
1478    return NULL;
1479
1480  /* Constants represent themselves.  */
1481  if (is_gimple_min_invariant (val))
1482    return val;
1483
1484  if (set->length == 0)
1485    return NULL;
1486
1487  if (value_exists_in_set_bitmap (set, val))
1488    {
1489      for (node = set->head;
1490	   node;
1491	   node = node->next)
1492	{
1493	  if (get_value_handle (node->expr) == val)
1494	    return node->expr;
1495	}
1496    }
1497
1498  return NULL;
1499}
1500
1501/* Given the vuse representative map, MAP, and an SSA version number,
1502   ID, return the bitmap of names ID represents, or NULL, if none
1503   exists.  */
1504
1505static bitmap
1506get_representative (bitmap *map, int id)
1507{
1508  if (map[id] != NULL)
1509    return map[id];
1510  return NULL;
1511}
1512
1513/* A vuse is anticipable at the top of block x, from the bottom of the
1514   block, if it reaches the top of the block, and is not killed in the
1515   block.  In effect, we are trying to see if the vuse is transparent
1516   backwards in the block.  */
1517
1518static bool
1519vuses_dies_in_block_x (VEC (tree, gc) *vuses, basic_block block)
1520{
1521  int i;
1522  tree vuse;
1523
1524  for (i = 0; VEC_iterate (tree, vuses, i, vuse); i++)
1525    {
1526      /* Any places where this is too conservative, are places
1527	 where we created a new version and shouldn't have.  */
1528
1529      if (!bitmap_bit_p (RVUSE_IN (block), SSA_NAME_VERSION (vuse))
1530	  || bitmap_bit_p (RVUSE_KILL (block), SSA_NAME_VERSION (vuse)))
1531	return true;
1532    }
1533  return false;
1534}
1535
1536/* Determine if the expression EXPR is valid in SET.  This means that
1537   we have a leader for each part of the expression (if it consists of
1538   values), or the expression is an SSA_NAME.
1539
1540   NB: We never should run into a case where we have SSA_NAME +
1541   SSA_NAME or SSA_NAME + value.  The sets valid_in_set is called on,
1542   the ANTIC sets, will only ever have SSA_NAME's or value expressions
1543   (IE VALUE1 + VALUE2, *VALUE1, VALUE1 < VALUE2)  */
1544
1545static bool
1546valid_in_set (value_set_t set, tree expr, basic_block block)
1547{
1548 tree vh = get_value_handle (expr);
1549 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1550    {
1551    case tcc_binary:
1552    case tcc_comparison:
1553      {
1554	tree op1 = TREE_OPERAND (expr, 0);
1555	tree op2 = TREE_OPERAND (expr, 1);
1556	return set_contains_value (set, op1) && set_contains_value (set, op2);
1557      }
1558
1559    case tcc_unary:
1560      {
1561	tree op1 = TREE_OPERAND (expr, 0);
1562	return set_contains_value (set, op1);
1563      }
1564
1565    case tcc_expression:
1566      {
1567	if (TREE_CODE (expr) == CALL_EXPR)
1568	  {
1569	    tree op0 = TREE_OPERAND (expr, 0);
1570	    tree arglist = TREE_OPERAND (expr, 1);
1571	    tree op2 = TREE_OPERAND (expr, 2);
1572
1573	    /* Check the non-list operands first.  */
1574	    if (!set_contains_value (set, op0)
1575		|| (op2 && !set_contains_value (set, op2)))
1576	      return false;
1577
1578	    /* Now check the operands.  */
1579	    for (; arglist; arglist = TREE_CHAIN (arglist))
1580	      {
1581		if (!set_contains_value (set, TREE_VALUE (arglist)))
1582		  return false;
1583	      }
1584	    return !vuses_dies_in_block_x (VALUE_HANDLE_VUSES (vh), block);
1585	  }
1586	return false;
1587      }
1588
1589    case tcc_reference:
1590      {
1591	if (TREE_CODE (expr) == INDIRECT_REF
1592	    || TREE_CODE (expr) == COMPONENT_REF
1593            || TREE_CODE (expr) == ARRAY_REF)
1594	  {
1595	    tree op0 = TREE_OPERAND (expr, 0);
1596	    gcc_assert (is_gimple_min_invariant (op0)
1597			|| TREE_CODE (op0) == VALUE_HANDLE);
1598	    if (!set_contains_value (set, op0))
1599	      return false;
1600	    if (TREE_CODE (expr) == ARRAY_REF)
1601	      {
1602		tree op1 = TREE_OPERAND (expr, 1);
1603		tree op2 = TREE_OPERAND (expr, 2);
1604		tree op3 = TREE_OPERAND (expr, 3);
1605		gcc_assert (is_gimple_min_invariant (op1)
1606		            || TREE_CODE (op1) == VALUE_HANDLE);
1607		if (!set_contains_value (set, op1))
1608		  return false;
1609		gcc_assert (!op2 || is_gimple_min_invariant (op2)
1610		            || TREE_CODE (op2) == VALUE_HANDLE);
1611		if (op2
1612		    && !set_contains_value (set, op2))
1613		  return false;
1614		gcc_assert (!op3 || is_gimple_min_invariant (op3)
1615		            || TREE_CODE (op3) == VALUE_HANDLE);
1616		if (op3
1617		    && !set_contains_value (set, op3))
1618		  return false;
1619	    }
1620	  return set_contains_value (ANTIC_SAFE_LOADS (block),
1621				     vh)
1622	    || !vuses_dies_in_block_x (VALUE_HANDLE_VUSES (vh),
1623				       block);
1624	  }
1625      }
1626      return false;
1627
1628    case tcc_exceptional:
1629      gcc_assert (TREE_CODE (expr) == SSA_NAME);
1630      return true;
1631
1632    case tcc_declaration:
1633      return !vuses_dies_in_block_x (VALUE_HANDLE_VUSES (vh), block);
1634
1635    default:
1636      /* No other cases should be encountered.  */
1637      gcc_unreachable ();
1638   }
1639}
1640
1641/* Clean the set of expressions that are no longer valid in SET.  This
1642   means expressions that are made up of values we have no leaders for
1643   in SET.  */
1644
1645static void
1646clean (value_set_t set, basic_block block)
1647{
1648  value_set_node_t node;
1649  value_set_node_t next;
1650  node = set->head;
1651  while (node)
1652    {
1653      next = node->next;
1654      if (!valid_in_set (set, node->expr, block))
1655	set_remove (set, node->expr);
1656      node = next;
1657    }
1658}
1659
1660static sbitmap has_abnormal_preds;
1661
1662/* Compute the ANTIC set for BLOCK.
1663
1664   If succs(BLOCK) > 1 then
1665     ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
1666   else if succs(BLOCK) == 1 then
1667     ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
1668
1669   ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
1670
1671   XXX: It would be nice to either write a set_clear, and use it for
1672   ANTIC_OUT, or to mark the antic_out set as deleted at the end
1673   of this routine, so that the pool can hand the same memory back out
1674   again for the next ANTIC_OUT.  */
1675
1676static bool
1677compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
1678{
1679  basic_block son;
1680  bool changed = false;
1681  value_set_t S, old, ANTIC_OUT;
1682  value_set_node_t node;
1683
1684  ANTIC_OUT = S = NULL;
1685
1686  /* If any edges from predecessors are abnormal, antic_in is empty,
1687     so do nothing.  */
1688  if (block_has_abnormal_pred_edge)
1689    goto maybe_dump_sets;
1690
1691  old = set_new (false);
1692  set_copy (old, ANTIC_IN (block));
1693  ANTIC_OUT = set_new (true);
1694
1695  /* If the block has no successors, ANTIC_OUT is empty.  */
1696  if (EDGE_COUNT (block->succs) == 0)
1697    ;
1698  /* If we have one successor, we could have some phi nodes to
1699     translate through.  */
1700  else if (single_succ_p (block))
1701    {
1702      phi_translate_set (ANTIC_OUT, ANTIC_IN (single_succ (block)),
1703			 block, single_succ (block));
1704    }
1705  /* If we have multiple successors, we take the intersection of all of
1706     them.  */
1707  else
1708    {
1709      VEC(basic_block, heap) * worklist;
1710      edge e;
1711      size_t i;
1712      basic_block bprime, first;
1713      edge_iterator ei;
1714
1715      worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
1716      FOR_EACH_EDGE (e, ei, block->succs)
1717	VEC_quick_push (basic_block, worklist, e->dest);
1718      first = VEC_index (basic_block, worklist, 0);
1719      set_copy (ANTIC_OUT, ANTIC_IN (first));
1720
1721      for (i = 1; VEC_iterate (basic_block, worklist, i, bprime); i++)
1722	{
1723	  node = ANTIC_OUT->head;
1724	  while (node)
1725	    {
1726	      tree val;
1727	      value_set_node_t next = node->next;
1728
1729	      val = get_value_handle (node->expr);
1730	      if (!set_contains_value (ANTIC_IN (bprime), val))
1731		set_remove (ANTIC_OUT, node->expr);
1732	      node = next;
1733	    }
1734	}
1735      VEC_free (basic_block, heap, worklist);
1736    }
1737
1738  /* Generate ANTIC_OUT - TMP_GEN.  */
1739  S = bitmap_set_subtract_from_value_set (ANTIC_OUT, TMP_GEN (block), false);
1740
1741  /* Start ANTIC_IN with EXP_GEN - TMP_GEN */
1742  ANTIC_IN (block) = bitmap_set_subtract_from_value_set (EXP_GEN (block),
1743							 TMP_GEN (block),
1744							 true);
1745
1746  /* Then union in the ANTIC_OUT - TMP_GEN values,
1747     to get ANTIC_OUT U EXP_GEN - TMP_GEN */
1748  for (node = S->head; node; node = node->next)
1749    value_insert_into_set (ANTIC_IN (block), node->expr);
1750
1751  clean (ANTIC_IN (block), block);
1752  if (!set_equal (old, ANTIC_IN (block)))
1753    changed = true;
1754
1755 maybe_dump_sets:
1756  if (dump_file && (dump_flags & TDF_DETAILS))
1757    {
1758      if (ANTIC_OUT)
1759	print_value_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
1760
1761      if (ANTIC_SAFE_LOADS (block))
1762	print_value_set (dump_file, ANTIC_SAFE_LOADS (block),
1763			 "ANTIC_SAFE_LOADS", block->index);
1764      print_value_set (dump_file, ANTIC_IN (block), "ANTIC_IN", block->index);
1765
1766      if (S)
1767	print_value_set (dump_file, S, "S", block->index);
1768    }
1769
1770  for (son = first_dom_son (CDI_POST_DOMINATORS, block);
1771       son;
1772       son = next_dom_son (CDI_POST_DOMINATORS, son))
1773    {
1774      changed |= compute_antic_aux (son,
1775				    TEST_BIT (has_abnormal_preds, son->index));
1776    }
1777  return changed;
1778}
1779
1780/* Compute ANTIC sets.  */
1781
1782static void
1783compute_antic (void)
1784{
1785  bool changed = true;
1786  int num_iterations = 0;
1787  basic_block block;
1788
1789  /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
1790     We pre-build the map of blocks with incoming abnormal edges here.  */
1791  has_abnormal_preds = sbitmap_alloc (last_basic_block);
1792  sbitmap_zero (has_abnormal_preds);
1793  FOR_EACH_BB (block)
1794    {
1795      edge_iterator ei;
1796      edge e;
1797
1798      FOR_EACH_EDGE (e, ei, block->preds)
1799	if (e->flags & EDGE_ABNORMAL)
1800	  {
1801	    SET_BIT (has_abnormal_preds, block->index);
1802	    break;
1803	  }
1804
1805      /* While we are here, give empty ANTIC_IN sets to each block.  */
1806      ANTIC_IN (block) = set_new (true);
1807    }
1808  /* At the exit block we anticipate nothing.  */
1809  ANTIC_IN (EXIT_BLOCK_PTR) = set_new (true);
1810
1811  while (changed)
1812    {
1813      num_iterations++;
1814      changed = false;
1815      changed = compute_antic_aux (EXIT_BLOCK_PTR, false);
1816    }
1817
1818  sbitmap_free (has_abnormal_preds);
1819
1820  if (dump_file && (dump_flags & TDF_STATS))
1821    fprintf (dump_file, "compute_antic required %d iterations\n", num_iterations);
1822}
1823
1824/* Print the names represented by the bitmap NAMES, to the file OUT.  */
1825static void
1826dump_bitmap_of_names (FILE *out, bitmap names)
1827{
1828  bitmap_iterator bi;
1829  unsigned int i;
1830
1831  fprintf (out, " { ");
1832  EXECUTE_IF_SET_IN_BITMAP (names, 0, i, bi)
1833    {
1834      print_generic_expr (out, ssa_name (i), 0);
1835      fprintf (out, " ");
1836    }
1837  fprintf (out, "}\n");
1838}
1839
1840  /* Compute a set of representative vuse versions for each phi.  This
1841     is so we can compute conservative kill sets in terms of all vuses
1842     that are killed, instead of continually walking chains.
1843
1844     We also have to be able kill all names associated with a phi when
1845     the phi dies in order to ensure we don't generate overlapping
1846     live ranges, which are not allowed in virtual SSA.  */
1847
1848static bitmap *vuse_names;
1849static void
1850compute_vuse_representatives (void)
1851{
1852  tree phi;
1853  basic_block bb;
1854  VEC (tree, heap) *phis = NULL;
1855  bool changed = true;
1856  size_t i;
1857
1858  FOR_EACH_BB (bb)
1859    {
1860      for (phi = phi_nodes (bb);
1861	   phi;
1862	   phi = PHI_CHAIN (phi))
1863	if (!is_gimple_reg (PHI_RESULT (phi)))
1864	  VEC_safe_push (tree, heap, phis, phi);
1865    }
1866
1867  while (changed)
1868    {
1869      changed = false;
1870
1871      for (i = 0; VEC_iterate (tree, phis, i, phi); i++)
1872	{
1873	  size_t ver = SSA_NAME_VERSION (PHI_RESULT (phi));
1874	  use_operand_p usep;
1875	  ssa_op_iter iter;
1876
1877	  if (vuse_names[ver] == NULL)
1878	    {
1879	      vuse_names[ver] = BITMAP_ALLOC (&grand_bitmap_obstack);
1880	      bitmap_set_bit (vuse_names[ver], ver);
1881	    }
1882	  FOR_EACH_PHI_ARG (usep, phi, iter, SSA_OP_ALL_USES)
1883	    {
1884	      tree use = USE_FROM_PTR (usep);
1885	      bitmap usebitmap = get_representative (vuse_names,
1886						     SSA_NAME_VERSION (use));
1887	      if (usebitmap != NULL)
1888		{
1889		  changed |= bitmap_ior_into (vuse_names[ver],
1890					      usebitmap);
1891		}
1892	      else
1893		{
1894		  changed |= !bitmap_bit_p (vuse_names[ver],
1895					    SSA_NAME_VERSION (use));
1896		  if (changed)
1897		    bitmap_set_bit (vuse_names[ver],
1898				    SSA_NAME_VERSION (use));
1899		}
1900	    }
1901	}
1902    }
1903
1904  if (dump_file && (dump_flags & TDF_DETAILS))
1905    for (i = 0; VEC_iterate (tree, phis, i, phi); i++)
1906      {
1907	bitmap reps = get_representative (vuse_names,
1908					  SSA_NAME_VERSION (PHI_RESULT (phi)));
1909	if (reps)
1910	  {
1911	    print_generic_expr (dump_file, PHI_RESULT (phi), 0);
1912	    fprintf (dump_file, " represents ");
1913	    dump_bitmap_of_names (dump_file, reps);
1914	  }
1915      }
1916  VEC_free (tree, heap, phis);
1917}
1918
1919/* Compute reaching vuses and antic safe loads.  RVUSE computation is
1920   is a small bit of iterative dataflow to determine what virtual uses
1921   reach what blocks.  Because we can't generate overlapping virtual
1922   uses, and virtual uses *do* actually die, this ends up being faster
1923   in most cases than continually walking the virtual use/def chains
1924   to determine whether we are inside a block where a given virtual is
1925   still available to be used.
1926
1927   ANTIC_SAFE_LOADS are those loads that actually occur before any kill to
1928   their vuses in the block,and thus, are safe at the top of the
1929   block.
1930
1931   An example:
1932
1933   <block begin>
1934   b = *a
1935   *a = 9
1936   <block end>
1937
1938   b = *a is an antic safe load because it still safe to consider it
1939   ANTIC at the top of the block.
1940
1941   We currently compute a conservative approximation to
1942   ANTIC_SAFE_LOADS.  We compute those loads that occur before *any*
1943   stores in the block.  This is not because it is difficult to
1944   compute the precise answer, but because it is expensive.  More
1945   testing is necessary to determine whether it is worth computing the
1946   precise answer.  */
1947
1948static void
1949compute_rvuse_and_antic_safe (void)
1950{
1951
1952  size_t i;
1953  tree phi;
1954  basic_block bb;
1955  int *postorder;
1956  bool changed = true;
1957  unsigned int *first_store_uid;
1958
1959  first_store_uid = xcalloc (n_basic_blocks, sizeof (unsigned int));
1960
1961  compute_vuse_representatives ();
1962
1963  FOR_ALL_BB (bb)
1964    {
1965      RVUSE_IN (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1966      RVUSE_GEN (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1967      RVUSE_KILL (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1968      RVUSE_OUT (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
1969      ANTIC_SAFE_LOADS (bb) = NULL;
1970    }
1971
1972  /* Mark live on entry */
1973  for (i = 0; i < num_ssa_names; i++)
1974    {
1975      tree name = ssa_name (i);
1976      if (name && !is_gimple_reg (name)
1977	  && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
1978	bitmap_set_bit (RVUSE_OUT (ENTRY_BLOCK_PTR),
1979			SSA_NAME_VERSION (name));
1980    }
1981
1982  /* Compute local sets for reaching vuses.
1983     GEN(block) = generated in block and not locally killed.
1984     KILL(block) = set of vuses killed in block.
1985  */
1986
1987  FOR_EACH_BB (bb)
1988    {
1989      block_stmt_iterator bsi;
1990      ssa_op_iter iter;
1991      def_operand_p defp;
1992      use_operand_p usep;
1993
1994      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1995	{
1996	  tree stmt = bsi_stmt (bsi);
1997
1998	  if (first_store_uid[bb->index] == 0
1999	      && !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYUSE | SSA_OP_VMAYDEF
2000				     | SSA_OP_VMUSTDEF | SSA_OP_VMUSTKILL))
2001	    {
2002	      first_store_uid[bb->index] = stmt_ann (stmt)->uid;
2003	    }
2004
2005
2006	  FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_VIRTUAL_KILLS
2007				    | SSA_OP_VMAYUSE)
2008	    {
2009	      tree use = USE_FROM_PTR (usep);
2010	      bitmap repbit = get_representative (vuse_names,
2011						  SSA_NAME_VERSION (use));
2012	      if (repbit != NULL)
2013		{
2014		  bitmap_and_compl_into (RVUSE_GEN (bb), repbit);
2015		  bitmap_ior_into (RVUSE_KILL (bb), repbit);
2016		}
2017	      else
2018		{
2019		  bitmap_set_bit (RVUSE_KILL (bb), SSA_NAME_VERSION (use));
2020		  bitmap_clear_bit (RVUSE_GEN (bb), SSA_NAME_VERSION (use));
2021		}
2022	    }
2023	  FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_VIRTUAL_DEFS)
2024	    {
2025	      tree def = DEF_FROM_PTR (defp);
2026	      bitmap_set_bit (RVUSE_GEN (bb), SSA_NAME_VERSION (def));
2027	    }
2028	}
2029    }
2030
2031  FOR_EACH_BB (bb)
2032    {
2033      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2034	{
2035	  if (!is_gimple_reg (PHI_RESULT (phi)))
2036	    {
2037	      edge e;
2038	      edge_iterator ei;
2039
2040	      tree def = PHI_RESULT (phi);
2041	      /* In reality, the PHI result is generated at the end of
2042		 each predecessor block.  This will make the value
2043		 LVUSE_IN for the bb containing the PHI, which is
2044		 correct.  */
2045	      FOR_EACH_EDGE (e, ei, bb->preds)
2046		bitmap_set_bit (RVUSE_GEN (e->src), SSA_NAME_VERSION (def));
2047	    }
2048	}
2049    }
2050
2051  /* Solve reaching vuses.
2052
2053     RVUSE_IN[BB] = Union of RVUSE_OUT of predecessors.
2054     RVUSE_OUT[BB] = RVUSE_GEN[BB] U (RVUSE_IN[BB] - RVUSE_KILL[BB])
2055  */
2056  postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
2057  pre_and_rev_post_order_compute (NULL, postorder, false);
2058
2059  changed = true;
2060  while (changed)
2061    {
2062      int j;
2063      changed = false;
2064      for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2065	{
2066	  edge e;
2067	  edge_iterator ei;
2068	  bb = BASIC_BLOCK (postorder[j]);
2069
2070	  FOR_EACH_EDGE (e, ei, bb->preds)
2071	    bitmap_ior_into (RVUSE_IN (bb), RVUSE_OUT (e->src));
2072
2073	  changed |= bitmap_ior_and_compl (RVUSE_OUT (bb),
2074					   RVUSE_GEN (bb),
2075					   RVUSE_IN (bb),
2076					   RVUSE_KILL (bb));
2077	}
2078    }
2079  free (postorder);
2080
2081  if (dump_file && (dump_flags & TDF_DETAILS))
2082    {
2083      FOR_ALL_BB (bb)
2084	{
2085	  fprintf (dump_file, "RVUSE_IN (%d) =", bb->index);
2086	  dump_bitmap_of_names (dump_file, RVUSE_IN (bb));
2087
2088	  fprintf (dump_file, "RVUSE_KILL (%d) =", bb->index);
2089	  dump_bitmap_of_names (dump_file, RVUSE_KILL (bb));
2090
2091	  fprintf (dump_file, "RVUSE_GEN (%d) =", bb->index);
2092	  dump_bitmap_of_names (dump_file, RVUSE_GEN (bb));
2093
2094	  fprintf (dump_file, "RVUSE_OUT (%d) =", bb->index);
2095	  dump_bitmap_of_names (dump_file, RVUSE_OUT (bb));
2096	}
2097    }
2098
2099  FOR_EACH_BB (bb)
2100    {
2101      value_set_node_t node;
2102      if (bitmap_empty_p (RVUSE_KILL (bb)))
2103	continue;
2104
2105      for (node = EXP_GEN (bb)->head; node; node = node->next)
2106	{
2107	  if (REFERENCE_CLASS_P (node->expr))
2108	    {
2109	      tree vh = get_value_handle (node->expr);
2110	      tree maybe = bitmap_find_leader (AVAIL_OUT (bb), vh);
2111
2112	      if (maybe)
2113		{
2114		  tree def = SSA_NAME_DEF_STMT (maybe);
2115
2116		  if (bb_for_stmt (def) != bb)
2117		    continue;
2118
2119		  if (TREE_CODE (def) == PHI_NODE
2120		      || stmt_ann (def)->uid < first_store_uid[bb->index])
2121		    {
2122		      if (ANTIC_SAFE_LOADS (bb) == NULL)
2123			ANTIC_SAFE_LOADS (bb) = set_new (true);
2124		      value_insert_into_set (ANTIC_SAFE_LOADS (bb),
2125					     node->expr);
2126		    }
2127		}
2128	    }
2129	}
2130    }
2131  free (first_store_uid);
2132}
2133
2134/* Return true if we can value number the call in STMT.  This is true
2135   if we have a pure or constant call.  */
2136
2137static bool
2138can_value_number_call (tree stmt)
2139{
2140  tree call = get_call_expr_in (stmt);
2141
2142  if (call_expr_flags (call)  & (ECF_PURE | ECF_CONST))
2143    return true;
2144  return false;
2145}
2146
2147/* Return true if OP is a tree which we can perform value numbering
2148   on.  */
2149
2150static bool
2151can_value_number_operation (tree op)
2152{
2153  return UNARY_CLASS_P (op)
2154    || BINARY_CLASS_P (op)
2155    || COMPARISON_CLASS_P (op)
2156    || REFERENCE_CLASS_P (op)
2157    || (TREE_CODE (op) == CALL_EXPR
2158	&& can_value_number_call (op));
2159}
2160
2161
2162/* Return true if OP is a tree which we can perform PRE on
2163   on.  This may not match the operations we can value number, but in
2164   a perfect world would.  */
2165
2166static bool
2167can_PRE_operation (tree op)
2168{
2169  return UNARY_CLASS_P (op)
2170    || BINARY_CLASS_P (op)
2171    || COMPARISON_CLASS_P (op)
2172    || TREE_CODE (op) == INDIRECT_REF
2173    || TREE_CODE (op) == COMPONENT_REF
2174    || TREE_CODE (op) == CALL_EXPR
2175    || TREE_CODE (op) == ARRAY_REF;
2176}
2177
2178
2179/* Inserted expressions are placed onto this worklist, which is used
2180   for performing quick dead code elimination of insertions we made
2181   that didn't turn out to be necessary.   */
2182static VEC(tree,heap) *inserted_exprs;
2183
2184/* Pool allocated fake store expressions are placed onto this
2185   worklist, which, after performing dead code elimination, is walked
2186   to see which expressions need to be put into GC'able memory  */
2187static VEC(tree, heap) *need_creation;
2188
2189/* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2190   COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
2191   trying to rename aggregates into ssa form directly, which is a no
2192   no.
2193
2194   Thus, this routine doesn't create temporaries, it just builds a
2195   single access expression for the array, calling
2196   find_or_generate_expression to build the innermost pieces.
2197
2198   This function is a subroutine of create_expression_by_pieces, and
2199   should not be called on it's own unless you really know what you
2200   are doing.
2201*/
2202static tree
2203create_component_ref_by_pieces (basic_block block, tree expr, tree stmts)
2204{
2205  tree genop = expr;
2206  tree folded;
2207
2208  if (TREE_CODE (genop) == VALUE_HANDLE)
2209    {
2210      tree found = bitmap_find_leader (AVAIL_OUT (block), expr);
2211      if (found)
2212	return found;
2213    }
2214
2215  if (TREE_CODE (genop) == VALUE_HANDLE)
2216    genop = VALUE_HANDLE_EXPR_SET (expr)->head->expr;
2217
2218  switch TREE_CODE (genop)
2219    {
2220    case ARRAY_REF:
2221      {
2222	tree op0;
2223	tree op1, op2, op3;
2224	op0 = create_component_ref_by_pieces (block,
2225					      TREE_OPERAND (genop, 0),
2226					      stmts);
2227	op1 = TREE_OPERAND (genop, 1);
2228	if (TREE_CODE (op1) == VALUE_HANDLE)
2229	  op1 = find_or_generate_expression (block, op1, stmts);
2230	op2 = TREE_OPERAND (genop, 2);
2231	if (op2 && TREE_CODE (op2) == VALUE_HANDLE)
2232	  op2 = find_or_generate_expression (block, op2, stmts);
2233	op3 = TREE_OPERAND (genop, 3);
2234	if (op3 && TREE_CODE (op3) == VALUE_HANDLE)
2235	  op3 = find_or_generate_expression (block, op3, stmts);
2236	folded = build4 (ARRAY_REF, TREE_TYPE (genop), op0, op1,
2237			      op2, op3);
2238	return folded;
2239      }
2240    case COMPONENT_REF:
2241      {
2242	tree op0;
2243	tree op1;
2244	op0 = create_component_ref_by_pieces (block,
2245					      TREE_OPERAND (genop, 0),
2246					      stmts);
2247	op1 = VALUE_HANDLE_EXPR_SET (TREE_OPERAND (genop, 1))->head->expr;
2248	folded = fold_build3 (COMPONENT_REF, TREE_TYPE (genop), op0, op1,
2249			      NULL_TREE);
2250	return folded;
2251      }
2252      break;
2253    case INDIRECT_REF:
2254      {
2255	tree op1 = TREE_OPERAND (genop, 0);
2256	tree genop1 = find_or_generate_expression (block, op1, stmts);
2257
2258	folded = fold_build1 (TREE_CODE (genop), TREE_TYPE (genop),
2259			      genop1);
2260	return folded;
2261      }
2262      break;
2263    case VAR_DECL:
2264    case PARM_DECL:
2265    case RESULT_DECL:
2266    case SSA_NAME:
2267    case STRING_CST:
2268      return genop;
2269    default:
2270      gcc_unreachable ();
2271    }
2272
2273  return NULL_TREE;
2274}
2275
2276/* Find a leader for an expression, or generate one using
2277   create_expression_by_pieces if it's ANTIC but
2278   complex.
2279   BLOCK is the basic_block we are looking for leaders in.
2280   EXPR is the expression to find a leader or generate for.
2281   STMTS is the statement list to put the inserted expressions on.
2282   Returns the SSA_NAME of the LHS of the generated expression or the
2283   leader.  */
2284
2285static tree
2286find_or_generate_expression (basic_block block, tree expr, tree stmts)
2287{
2288  tree genop = bitmap_find_leader (AVAIL_OUT (block), expr);
2289
2290  /* If it's still NULL, it must be a complex expression, so generate
2291     it recursively.  */
2292  if (genop == NULL)
2293    {
2294      genop = VALUE_HANDLE_EXPR_SET (expr)->head->expr;
2295
2296      gcc_assert (can_PRE_operation (genop));
2297      genop = create_expression_by_pieces (block, genop, stmts);
2298    }
2299  return genop;
2300}
2301
2302#define NECESSARY(stmt)		stmt->common.asm_written_flag
2303/* Create an expression in pieces, so that we can handle very complex
2304   expressions that may be ANTIC, but not necessary GIMPLE.
2305   BLOCK is the basic block the expression will be inserted into,
2306   EXPR is the expression to insert (in value form)
2307   STMTS is a statement list to append the necessary insertions into.
2308
2309   This function will die if we hit some value that shouldn't be
2310   ANTIC but is (IE there is no leader for it, or its components).
2311   This function may also generate expressions that are themselves
2312   partially or fully redundant.  Those that are will be either made
2313   fully redundant during the next iteration of insert (for partially
2314   redundant ones), or eliminated by eliminate (for fully redundant
2315   ones).  */
2316
2317static tree
2318create_expression_by_pieces (basic_block block, tree expr, tree stmts)
2319{
2320  tree temp, name;
2321  tree folded, forced_stmts, newexpr;
2322  tree v;
2323  tree_stmt_iterator tsi;
2324
2325  switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2326    {
2327    case tcc_expression:
2328      {
2329	tree op0, op2;
2330	tree arglist;
2331	tree genop0, genop2;
2332	tree genarglist;
2333	tree walker, genwalker;
2334
2335	gcc_assert (TREE_CODE (expr) == CALL_EXPR);
2336	genop2 = NULL;
2337
2338	op0 = TREE_OPERAND (expr, 0);
2339	arglist = TREE_OPERAND (expr, 1);
2340	op2 = TREE_OPERAND (expr, 2);
2341
2342	genop0 = find_or_generate_expression (block, op0, stmts);
2343	genarglist = copy_list (arglist);
2344	for (walker = arglist, genwalker = genarglist;
2345	     genwalker && walker;
2346	     genwalker = TREE_CHAIN (genwalker), walker = TREE_CHAIN (walker))
2347	  {
2348	    TREE_VALUE (genwalker)
2349	      = find_or_generate_expression (block, TREE_VALUE (walker),
2350					     stmts);
2351	  }
2352
2353	if (op2)
2354	  genop2 = find_or_generate_expression (block, op2, stmts);
2355	folded = fold_build3 (TREE_CODE (expr), TREE_TYPE (expr),
2356			      genop0, genarglist, genop2);
2357	break;
2358
2359
2360      }
2361      break;
2362    case tcc_reference:
2363      {
2364	if (TREE_CODE (expr) == COMPONENT_REF
2365	    || TREE_CODE (expr) == ARRAY_REF)
2366	  {
2367	    folded = create_component_ref_by_pieces (block, expr, stmts);
2368	  }
2369	else
2370	  {
2371	    tree op1 = TREE_OPERAND (expr, 0);
2372	    tree genop1 = find_or_generate_expression (block, op1, stmts);
2373
2374	    folded = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr),
2375				  genop1);
2376	  }
2377	break;
2378      }
2379
2380    case tcc_binary:
2381    case tcc_comparison:
2382      {
2383	tree op1 = TREE_OPERAND (expr, 0);
2384	tree op2 = TREE_OPERAND (expr, 1);
2385	tree genop1 = find_or_generate_expression (block, op1, stmts);
2386	tree genop2 = find_or_generate_expression (block, op2, stmts);
2387	folded = fold_build2 (TREE_CODE (expr), TREE_TYPE (expr),
2388			      genop1, genop2);
2389	break;
2390      }
2391
2392    case tcc_unary:
2393      {
2394	tree op1 = TREE_OPERAND (expr, 0);
2395	tree genop1 = find_or_generate_expression (block, op1, stmts);
2396	folded = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr),
2397			      genop1);
2398	break;
2399      }
2400
2401    default:
2402      gcc_unreachable ();
2403    }
2404
2405  /* Force the generated expression to be a sequence of GIMPLE
2406     statements.
2407     We have to call unshare_expr because force_gimple_operand may
2408     modify the tree we pass to it.  */
2409  newexpr = force_gimple_operand (unshare_expr (folded), &forced_stmts,
2410                                  false, NULL);
2411
2412  /* If we have any intermediate expressions to the value sets, add them
2413     to the value sets and chain them on in the instruction stream.  */
2414  if (forced_stmts)
2415    {
2416      tsi = tsi_start (forced_stmts);
2417      for (; !tsi_end_p (tsi); tsi_next (&tsi))
2418	{
2419	  tree stmt = tsi_stmt (tsi);
2420	  tree forcedname = TREE_OPERAND (stmt, 0);
2421	  tree forcedexpr = TREE_OPERAND (stmt, 1);
2422	  tree val = vn_lookup_or_add (forcedexpr, NULL);
2423
2424	  VEC_safe_push (tree, heap, inserted_exprs, stmt);
2425	  vn_add (forcedname, val);
2426	  bitmap_value_replace_in_set (NEW_SETS (block), forcedname);
2427	  bitmap_value_replace_in_set (AVAIL_OUT (block), forcedname);
2428	  mark_new_vars_to_rename (stmt);
2429	}
2430      tsi = tsi_last (stmts);
2431      tsi_link_after (&tsi, forced_stmts, TSI_CONTINUE_LINKING);
2432    }
2433
2434  /* Build and insert the assignment of the end result to the temporary
2435     that we will return.  */
2436  if (!pretemp || TREE_TYPE (expr) != TREE_TYPE (pretemp))
2437    {
2438      pretemp = create_tmp_var (TREE_TYPE (expr), "pretmp");
2439      get_var_ann (pretemp);
2440    }
2441
2442  temp = pretemp;
2443  add_referenced_var (temp);
2444
2445  if (TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
2446    DECL_COMPLEX_GIMPLE_REG_P (temp) = 1;
2447
2448  newexpr = build2 (MODIFY_EXPR, TREE_TYPE (expr), temp, newexpr);
2449  name = make_ssa_name (temp, newexpr);
2450  TREE_OPERAND (newexpr, 0) = name;
2451  NECESSARY (newexpr) = 0;
2452
2453  tsi = tsi_last (stmts);
2454  tsi_link_after (&tsi, newexpr, TSI_CONTINUE_LINKING);
2455  VEC_safe_push (tree, heap, inserted_exprs, newexpr);
2456  mark_new_vars_to_rename (newexpr);
2457
2458  /* Add a value handle to the temporary.
2459     The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2460     we are creating the expression by pieces, and this particular piece of
2461     the expression may have been represented.  There is no harm in replacing
2462     here.  */
2463  v = get_value_handle (expr);
2464  vn_add (name, v);
2465  bitmap_value_replace_in_set (NEW_SETS (block), name);
2466  bitmap_value_replace_in_set (AVAIL_OUT (block), name);
2467
2468  pre_stats.insertions++;
2469  if (dump_file && (dump_flags & TDF_DETAILS))
2470    {
2471      fprintf (dump_file, "Inserted ");
2472      print_generic_expr (dump_file, newexpr, 0);
2473      fprintf (dump_file, " in predecessor %d\n", block->index);
2474    }
2475
2476  return name;
2477}
2478
2479/* Insert the to-be-made-available values of NODE for each
2480   predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2481   merge the result with a phi node, given the same value handle as
2482   NODE.  Return true if we have inserted new stuff.  */
2483
2484static bool
2485insert_into_preds_of_block (basic_block block, value_set_node_t node,
2486			    tree *avail)
2487{
2488  tree val = get_value_handle (node->expr);
2489  edge pred;
2490  bool insertions = false;
2491  bool nophi = false;
2492  basic_block bprime;
2493  tree eprime;
2494  edge_iterator ei;
2495  tree type = TREE_TYPE (avail[EDGE_PRED (block, 0)->src->index]);
2496  tree temp;
2497
2498  if (dump_file && (dump_flags & TDF_DETAILS))
2499    {
2500      fprintf (dump_file, "Found partial redundancy for expression ");
2501      print_generic_expr (dump_file, node->expr, 0);
2502      fprintf (dump_file, " (");
2503      print_generic_expr (dump_file, val, 0);
2504      fprintf (dump_file, ")");
2505      fprintf (dump_file, "\n");
2506    }
2507
2508  /* Make sure we aren't creating an induction variable.  */
2509  if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2
2510      && TREE_CODE_CLASS (TREE_CODE (node->expr)) != tcc_reference )
2511    {
2512      bool firstinsideloop = false;
2513      bool secondinsideloop = false;
2514      firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
2515					       EDGE_PRED (block, 0)->src);
2516      secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
2517						EDGE_PRED (block, 1)->src);
2518      /* Induction variables only have one edge inside the loop.  */
2519      if (firstinsideloop ^ secondinsideloop)
2520	{
2521	  if (dump_file && (dump_flags & TDF_DETAILS))
2522	    fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
2523	  nophi = true;
2524	}
2525    }
2526
2527
2528  /* Make the necessary insertions.  */
2529  FOR_EACH_EDGE (pred, ei, block->preds)
2530    {
2531      tree stmts = alloc_stmt_list ();
2532      tree builtexpr;
2533      bprime = pred->src;
2534      eprime = avail[bprime->index];
2535
2536      if (can_PRE_operation (eprime))
2537	{
2538#ifdef ENABLE_CHECKING
2539	  tree vh;
2540
2541	  /* eprime may be an invariant.  */
2542	  vh = TREE_CODE (eprime) == VALUE_HANDLE
2543	    ? eprime
2544	    : get_value_handle (eprime);
2545
2546	  /* ensure that the virtual uses we need reach our block.  */
2547	  if (TREE_CODE (vh) == VALUE_HANDLE)
2548	    {
2549	      int i;
2550	      tree vuse;
2551	      for (i = 0;
2552		   VEC_iterate (tree, VALUE_HANDLE_VUSES (vh), i, vuse);
2553		   i++)
2554		{
2555		  size_t id = SSA_NAME_VERSION (vuse);
2556		  gcc_assert (bitmap_bit_p (RVUSE_OUT (bprime), id)
2557			      || IS_EMPTY_STMT (SSA_NAME_DEF_STMT (vuse)));
2558		}
2559	    }
2560#endif
2561	  builtexpr = create_expression_by_pieces (bprime,
2562						   eprime,
2563						   stmts);
2564	  bsi_insert_on_edge (pred, stmts);
2565	  avail[bprime->index] = builtexpr;
2566	  insertions = true;
2567	}
2568    }
2569  /* If we didn't want a phi node, and we made insertions, we still have
2570     inserted new stuff, and thus return true.  If we didn't want a phi node,
2571     and didn't make insertions, we haven't added anything new, so return
2572     false.  */
2573  if (nophi && insertions)
2574    return true;
2575  else if (nophi && !insertions)
2576    return false;
2577
2578  /* Now build a phi for the new variable.  */
2579  if (!prephitemp || TREE_TYPE (prephitemp) != type)
2580    {
2581      prephitemp = create_tmp_var (type, "prephitmp");
2582      get_var_ann (prephitemp);
2583    }
2584
2585  temp = prephitemp;
2586  add_referenced_var (temp);
2587
2588  if (TREE_CODE (type) == COMPLEX_TYPE)
2589    DECL_COMPLEX_GIMPLE_REG_P (temp) = 1;
2590  temp = create_phi_node (temp, block);
2591
2592  NECESSARY (temp) = 0;
2593  VEC_safe_push (tree, heap, inserted_exprs, temp);
2594  FOR_EACH_EDGE (pred, ei, block->preds)
2595    add_phi_arg (temp, avail[pred->src->index], pred);
2596
2597  vn_add (PHI_RESULT (temp), val);
2598
2599  /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
2600     this insertion, since we test for the existence of this value in PHI_GEN
2601     before proceeding with the partial redundancy checks in insert_aux.
2602
2603     The value may exist in AVAIL_OUT, in particular, it could be represented
2604     by the expression we are trying to eliminate, in which case we want the
2605     replacement to occur.  If it's not existing in AVAIL_OUT, we want it
2606     inserted there.
2607
2608     Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
2609     this block, because if it did, it would have existed in our dominator's
2610     AVAIL_OUT, and would have been skipped due to the full redundancy check.
2611  */
2612
2613  bitmap_insert_into_set (PHI_GEN (block),
2614			  PHI_RESULT (temp));
2615  bitmap_value_replace_in_set (AVAIL_OUT (block),
2616			       PHI_RESULT (temp));
2617  bitmap_insert_into_set (NEW_SETS (block),
2618			  PHI_RESULT (temp));
2619
2620  if (dump_file && (dump_flags & TDF_DETAILS))
2621    {
2622      fprintf (dump_file, "Created phi ");
2623      print_generic_expr (dump_file, temp, 0);
2624      fprintf (dump_file, " in block %d\n", block->index);
2625    }
2626  pre_stats.phis++;
2627  return true;
2628}
2629
2630
2631
2632/* Perform insertion of partially redundant values.
2633   For BLOCK, do the following:
2634   1.  Propagate the NEW_SETS of the dominator into the current block.
2635   If the block has multiple predecessors,
2636       2a. Iterate over the ANTIC expressions for the block to see if
2637           any of them are partially redundant.
2638       2b. If so, insert them into the necessary predecessors to make
2639           the expression fully redundant.
2640       2c. Insert a new PHI merging the values of the predecessors.
2641       2d. Insert the new PHI, and the new expressions, into the
2642           NEW_SETS set.
2643   3. Recursively call ourselves on the dominator children of BLOCK.
2644
2645*/
2646
2647static bool
2648insert_aux (basic_block block)
2649{
2650  basic_block son;
2651  bool new_stuff = false;
2652
2653  if (block)
2654    {
2655      basic_block dom;
2656      dom = get_immediate_dominator (CDI_DOMINATORS, block);
2657      if (dom)
2658	{
2659	  unsigned i;
2660	  bitmap_iterator bi;
2661	  bitmap_set_t newset = NEW_SETS (dom);
2662	  if (newset)
2663	    {
2664	      /* Note that we need to value_replace both NEW_SETS, and
2665		 AVAIL_OUT. For both the case of NEW_SETS, the value may be
2666		 represented by some non-simple expression here that we want
2667		 to replace it with.  */
2668	      EXECUTE_IF_SET_IN_BITMAP (newset->expressions, 0, i, bi)
2669		{
2670		  bitmap_value_replace_in_set (NEW_SETS (block), ssa_name (i));
2671		  bitmap_value_replace_in_set (AVAIL_OUT (block), ssa_name (i));
2672		}
2673	    }
2674	  if (!single_pred_p (block))
2675	    {
2676	      value_set_node_t node;
2677	      for (node = ANTIC_IN (block)->head;
2678		   node;
2679		   node = node->next)
2680		{
2681		  if (can_PRE_operation (node->expr)
2682		      && !AGGREGATE_TYPE_P (TREE_TYPE (node->expr)))
2683		    {
2684		      tree *avail;
2685		      tree val;
2686		      bool by_some = false;
2687		      bool cant_insert = false;
2688		      bool all_same = true;
2689		      tree first_s = NULL;
2690		      edge pred;
2691		      basic_block bprime;
2692		      tree eprime = NULL_TREE;
2693		      edge_iterator ei;
2694
2695		      val = get_value_handle (node->expr);
2696		      if (bitmap_set_contains_value (PHI_GEN (block), val))
2697			continue;
2698		      if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
2699			{
2700			  if (dump_file && (dump_flags & TDF_DETAILS))
2701			    fprintf (dump_file, "Found fully redundant value\n");
2702			  continue;
2703			}
2704
2705		      avail = XCNEWVEC (tree, last_basic_block);
2706		      FOR_EACH_EDGE (pred, ei, block->preds)
2707			{
2708			  tree vprime;
2709			  tree edoubleprime;
2710
2711			  /* This can happen in the very weird case
2712			     that our fake infinite loop edges have caused a
2713			     critical edge to appear.  */
2714			  if (EDGE_CRITICAL_P (pred))
2715			    {
2716			      cant_insert = true;
2717			      break;
2718			    }
2719			  bprime = pred->src;
2720			  eprime = phi_translate (node->expr,
2721						  ANTIC_IN (block),
2722						  bprime, block);
2723
2724			  /* eprime will generally only be NULL if the
2725			     value of the expression, translated
2726			     through the PHI for this predecessor, is
2727			     undefined.  If that is the case, we can't
2728			     make the expression fully redundant,
2729			     because its value is undefined along a
2730			     predecessor path.  We can thus break out
2731			     early because it doesn't matter what the
2732			     rest of the results are.  */
2733			  if (eprime == NULL)
2734			    {
2735			      cant_insert = true;
2736			      break;
2737			    }
2738
2739			  eprime = fully_constant_expression (eprime);
2740			  vprime = get_value_handle (eprime);
2741			  gcc_assert (vprime);
2742			  edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
2743							     vprime);
2744			  if (edoubleprime == NULL)
2745			    {
2746			      avail[bprime->index] = eprime;
2747			      all_same = false;
2748			    }
2749			  else
2750			    {
2751			      avail[bprime->index] = edoubleprime;
2752			      by_some = true;
2753			      if (first_s == NULL)
2754				first_s = edoubleprime;
2755			      else if (!operand_equal_p (first_s, edoubleprime,
2756							 0))
2757				all_same = false;
2758			    }
2759			}
2760		      /* If we can insert it, it's not the same value
2761			 already existing along every predecessor, and
2762			 it's defined by some predecessor, it is
2763			 partially redundant.  */
2764		      if (!cant_insert && !all_same && by_some)
2765			{
2766			  if (insert_into_preds_of_block (block, node, avail))
2767 			    new_stuff = true;
2768			}
2769		      /* If all edges produce the same value and that value is
2770			 an invariant, then the PHI has the same value on all
2771			 edges.  Note this.  */
2772		      else if (!cant_insert && all_same && eprime
2773			       && is_gimple_min_invariant (eprime)
2774			       && !is_gimple_min_invariant (val))
2775			{
2776			  value_set_t exprset = VALUE_HANDLE_EXPR_SET (val);
2777			  value_set_node_t node;
2778
2779			  for (node = exprset->head; node; node = node->next)
2780 			    {
2781			      if (TREE_CODE (node->expr) == SSA_NAME)
2782				{
2783				  vn_add (node->expr, eprime);
2784				  pre_stats.constified++;
2785				}
2786 			    }
2787			}
2788		      free (avail);
2789		    }
2790		}
2791	    }
2792	}
2793    }
2794  for (son = first_dom_son (CDI_DOMINATORS, block);
2795       son;
2796       son = next_dom_son (CDI_DOMINATORS, son))
2797    {
2798      new_stuff |= insert_aux (son);
2799    }
2800
2801  return new_stuff;
2802}
2803
2804/* Perform insertion of partially redundant values.  */
2805
2806static void
2807insert (void)
2808{
2809  bool new_stuff = true;
2810  basic_block bb;
2811  int num_iterations = 0;
2812
2813  FOR_ALL_BB (bb)
2814    NEW_SETS (bb) = bitmap_set_new ();
2815
2816  while (new_stuff)
2817    {
2818      num_iterations++;
2819      new_stuff = false;
2820      new_stuff = insert_aux (ENTRY_BLOCK_PTR);
2821    }
2822  if (num_iterations > 2 && dump_file && (dump_flags & TDF_STATS))
2823    fprintf (dump_file, "insert required %d iterations\n", num_iterations);
2824}
2825
2826
2827/* Return true if VAR is an SSA variable with no defining statement in
2828   this procedure, *AND* isn't a live-on-entry parameter.  */
2829
2830static bool
2831is_undefined_value (tree expr)
2832{
2833  return (TREE_CODE (expr) == SSA_NAME
2834          && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (expr))
2835	  /* PARM_DECLs and hard registers are always defined.  */
2836	  && TREE_CODE (SSA_NAME_VAR (expr)) != PARM_DECL);
2837}
2838
2839
2840/* Given an SSA variable VAR and an expression EXPR, compute the value
2841   number for EXPR and create a value handle (VAL) for it.  If VAR and
2842   EXPR are not the same, associate VAL with VAR.  Finally, add VAR to
2843   S1 and its value handle to S2.
2844
2845   VUSES represent the virtual use operands associated with EXPR (if
2846   any).  */
2847
2848static inline void
2849add_to_sets (tree var, tree expr, tree stmt, bitmap_set_t s1,
2850	     bitmap_set_t s2)
2851{
2852  tree val = vn_lookup_or_add (expr, stmt);
2853
2854  /* VAR and EXPR may be the same when processing statements for which
2855     we are not computing value numbers (e.g., non-assignments, or
2856     statements that make aliased stores).  In those cases, we are
2857     only interested in making VAR available as its own value.  */
2858  if (var != expr)
2859    vn_add (var, val);
2860
2861  if (s1)
2862    bitmap_insert_into_set (s1, var);
2863  bitmap_value_insert_into_set (s2, var);
2864}
2865
2866
2867/* Given a unary or binary expression EXPR, create and return a new
2868   expression with the same structure as EXPR but with its operands
2869   replaced with the value handles of each of the operands of EXPR.
2870
2871   VUSES represent the virtual use operands associated with EXPR (if
2872   any). Insert EXPR's operands into the EXP_GEN set for BLOCK. */
2873
2874static inline tree
2875create_value_expr_from (tree expr, basic_block block, tree stmt)
2876{
2877  int i;
2878  enum tree_code code = TREE_CODE (expr);
2879  tree vexpr;
2880  alloc_pool pool;
2881
2882  gcc_assert (TREE_CODE_CLASS (code) == tcc_unary
2883	      || TREE_CODE_CLASS (code) == tcc_binary
2884	      || TREE_CODE_CLASS (code) == tcc_comparison
2885	      || TREE_CODE_CLASS (code) == tcc_reference
2886	      || TREE_CODE_CLASS (code) == tcc_expression
2887	      || TREE_CODE_CLASS (code) == tcc_exceptional
2888	      || TREE_CODE_CLASS (code) == tcc_declaration);
2889
2890  if (TREE_CODE_CLASS (code) == tcc_unary)
2891    pool = unary_node_pool;
2892  else if (TREE_CODE_CLASS (code) == tcc_reference)
2893    pool = reference_node_pool;
2894  else if (TREE_CODE_CLASS (code) == tcc_binary)
2895    pool = binary_node_pool;
2896  else if (TREE_CODE_CLASS (code) == tcc_comparison)
2897    pool = comparison_node_pool;
2898  else if (TREE_CODE_CLASS (code) == tcc_exceptional)
2899    {
2900      gcc_assert (code == TREE_LIST);
2901      pool = list_node_pool;
2902    }
2903  else
2904    {
2905      gcc_assert (code == CALL_EXPR);
2906      pool = expression_node_pool;
2907    }
2908
2909  vexpr = (tree) pool_alloc (pool);
2910  memcpy (vexpr, expr, tree_size (expr));
2911
2912  /* This case is only for TREE_LIST's that appear as part of
2913     CALL_EXPR's.  Anything else is a bug, but we can't easily verify
2914     this, hence this comment.  TREE_LIST is not handled by the
2915     general case below is because they don't have a fixed length, or
2916     operands, so you can't access purpose/value/chain through
2917     TREE_OPERAND macros.  */
2918
2919  if (code == TREE_LIST)
2920    {
2921      tree op = NULL_TREE;
2922      tree temp = NULL_TREE;
2923      if (TREE_CHAIN (vexpr))
2924	temp = create_value_expr_from (TREE_CHAIN (vexpr), block, stmt);
2925      TREE_CHAIN (vexpr) = temp ? temp : TREE_CHAIN (vexpr);
2926
2927
2928      /* Recursively value-numberize reference ops.  */
2929      if (REFERENCE_CLASS_P (TREE_VALUE (vexpr)))
2930	{
2931	  tree tempop;
2932	  op = TREE_VALUE (vexpr);
2933	  tempop = create_value_expr_from (op, block, stmt);
2934	  op = tempop ? tempop : op;
2935
2936	  TREE_VALUE (vexpr)  = vn_lookup_or_add (op, stmt);
2937	}
2938      else
2939	{
2940	  op = TREE_VALUE (vexpr);
2941	  TREE_VALUE (vexpr) = vn_lookup_or_add (TREE_VALUE (vexpr), NULL);
2942	}
2943      /* This is the equivalent of inserting op into EXP_GEN like we
2944	 do below */
2945      if (!is_undefined_value (op))
2946	value_insert_into_set (EXP_GEN (block), op);
2947
2948      return vexpr;
2949    }
2950
2951  for (i = 0; i < TREE_CODE_LENGTH (code); i++)
2952    {
2953      tree val, op;
2954
2955      op = TREE_OPERAND (expr, i);
2956      if (op == NULL_TREE)
2957	continue;
2958
2959      /* Recursively value-numberize reference ops and tree lists.  */
2960      if (REFERENCE_CLASS_P (op))
2961	{
2962	  tree tempop = create_value_expr_from (op, block, stmt);
2963	  op = tempop ? tempop : op;
2964	  val = vn_lookup_or_add (op, stmt);
2965	}
2966      else if (TREE_CODE (op) == TREE_LIST)
2967	{
2968	  tree tempop;
2969
2970	  gcc_assert (TREE_CODE (expr) == CALL_EXPR);
2971	  tempop = create_value_expr_from (op, block, stmt);
2972
2973	  op = tempop ? tempop : op;
2974	  vn_lookup_or_add (op, NULL);
2975	  /* Unlike everywhere else, we do *not* want to replace the
2976	     TREE_LIST itself with a value number, because support
2977	     functions we call will blow up.  */
2978	  val = op;
2979	}
2980      else
2981	/* Create a value handle for OP and add it to VEXPR.  */
2982	val = vn_lookup_or_add (op, NULL);
2983
2984      if (!is_undefined_value (op) && TREE_CODE (op) != TREE_LIST)
2985	value_insert_into_set (EXP_GEN (block), op);
2986
2987      if (TREE_CODE (val) == VALUE_HANDLE)
2988	TREE_TYPE (val) = TREE_TYPE (TREE_OPERAND (vexpr, i));
2989
2990      TREE_OPERAND (vexpr, i) = val;
2991    }
2992
2993  return vexpr;
2994}
2995
2996
2997
2998/* Insert extra phis to merge values that are fully available from
2999   preds of BLOCK, but have no dominating representative coming from
3000   block DOM.   */
3001
3002static void
3003insert_extra_phis (basic_block block, basic_block dom)
3004{
3005
3006  if (!single_pred_p (block))
3007    {
3008      edge e;
3009      edge_iterator ei;
3010      bool first = true;
3011      bitmap_set_t tempset = bitmap_set_new ();
3012
3013      FOR_EACH_EDGE (e, ei, block->preds)
3014	{
3015	  /* We cannot handle abnormal incoming edges correctly.  */
3016	  if (e->flags & EDGE_ABNORMAL)
3017	    return;
3018
3019	  if (first)
3020	    {
3021	      bitmap_set_copy (tempset, AVAIL_OUT (e->src));
3022	      first = false;
3023	    }
3024	  else
3025	    bitmap_set_and (tempset, AVAIL_OUT (e->src));
3026	}
3027
3028      if (dom)
3029	bitmap_set_and_compl (tempset, AVAIL_OUT (dom));
3030
3031      if (!bitmap_set_empty_p (tempset))
3032	{
3033	  unsigned int i;
3034	  bitmap_iterator bi;
3035
3036	  EXECUTE_IF_SET_IN_BITMAP (tempset->expressions, 0, i, bi)
3037	    {
3038	      tree name = ssa_name (i);
3039	      tree val = get_value_handle (name);
3040	      tree temp;
3041
3042	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
3043		continue;
3044
3045	      if (!mergephitemp
3046		  || TREE_TYPE (name) != TREE_TYPE (mergephitemp))
3047		{
3048		  mergephitemp = create_tmp_var (TREE_TYPE (name),
3049						 "mergephitmp");
3050		  get_var_ann (mergephitemp);
3051		}
3052	      temp = mergephitemp;
3053
3054	      if (dump_file && (dump_flags & TDF_DETAILS))
3055		{
3056		  fprintf (dump_file, "Creating phi ");
3057		  print_generic_expr (dump_file, temp, 0);
3058		  fprintf (dump_file, " to merge available but not dominating values ");
3059		}
3060
3061	      add_referenced_var (temp);
3062	      temp = create_phi_node (temp, block);
3063	      NECESSARY (temp) = 0;
3064	      VEC_safe_push (tree, heap, inserted_exprs, temp);
3065
3066	      FOR_EACH_EDGE (e, ei, block->preds)
3067		{
3068		  tree leader = bitmap_find_leader (AVAIL_OUT (e->src), val);
3069
3070		  gcc_assert (leader);
3071		  add_phi_arg (temp, leader, e);
3072
3073		  if (dump_file && (dump_flags & TDF_DETAILS))
3074		    {
3075		      print_generic_expr (dump_file, leader, 0);
3076		      fprintf (dump_file, " in block %d,", e->src->index);
3077		    }
3078		}
3079
3080	      vn_add (PHI_RESULT (temp), val);
3081
3082	      if (dump_file && (dump_flags & TDF_DETAILS))
3083		fprintf (dump_file, "\n");
3084	    }
3085	}
3086    }
3087}
3088
3089/* Given a statement STMT and its right hand side which is a load, try
3090   to look for the expression stored in the location for the load, and
3091   return true if a useful equivalence was recorded for LHS.  */
3092
3093static bool
3094try_look_through_load (tree lhs, tree mem_ref, tree stmt, basic_block block)
3095{
3096  tree store_stmt = NULL;
3097  tree rhs;
3098  ssa_op_iter i;
3099  tree vuse;
3100
3101  FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, i, SSA_OP_VIRTUAL_USES)
3102    {
3103      tree def_stmt;
3104
3105      gcc_assert (TREE_CODE (vuse) == SSA_NAME);
3106      def_stmt = SSA_NAME_DEF_STMT (vuse);
3107
3108      /* If there is no useful statement for this VUSE, we'll not find a
3109	 useful expression to return either.  Likewise, if there is a
3110	 statement but it is not a simple assignment or it has virtual
3111	 uses, we can stop right here.  Note that this means we do
3112	 not look through PHI nodes, which is intentional.  */
3113      if (!def_stmt
3114	  || TREE_CODE (def_stmt) != MODIFY_EXPR
3115	  || !ZERO_SSA_OPERANDS (def_stmt, SSA_OP_VIRTUAL_USES))
3116	return false;
3117
3118      /* If this is not the same statement as one we have looked at for
3119	 another VUSE of STMT already, we have two statements producing
3120	 something that reaches our STMT.  */
3121      if (store_stmt && store_stmt != def_stmt)
3122	return false;
3123      else
3124	{
3125	  /* Is this a store to the exact same location as the one we are
3126	     loading from in STMT?  */
3127	  if (!operand_equal_p (TREE_OPERAND (def_stmt, 0), mem_ref, 0))
3128	    return false;
3129
3130	  /* Otherwise remember this statement and see if all other VUSEs
3131	     come from the same statement.  */
3132	  store_stmt = def_stmt;
3133	}
3134    }
3135
3136  /* Alright then, we have visited all VUSEs of STMT and we've determined
3137     that all of them come from the same statement STORE_STMT.  See if there
3138     is a useful expression we can deduce from STORE_STMT.  */
3139  rhs = TREE_OPERAND (store_stmt, 1);
3140  if ((TREE_CODE (rhs) == SSA_NAME
3141       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
3142      || is_gimple_min_invariant (rhs)
3143      || TREE_CODE (rhs) == ADDR_EXPR
3144      || TREE_INVARIANT (rhs))
3145    {
3146
3147      /* Yay!  Compute a value number for the RHS of the statement and
3148 	 add its value to the AVAIL_OUT set for the block.  Add the LHS
3149	 to TMP_GEN.  */
3150      add_to_sets (lhs, rhs, store_stmt, TMP_GEN (block), AVAIL_OUT (block));
3151      if (TREE_CODE (rhs) == SSA_NAME
3152	  && !is_undefined_value (rhs))
3153	value_insert_into_set (EXP_GEN (block), rhs);
3154      return true;
3155    }
3156
3157  return false;
3158}
3159
3160/* Return a copy of NODE that is stored in the temporary alloc_pool's.
3161   This is made recursively true, so that the operands are stored in
3162   the pool as well.  */
3163
3164static tree
3165poolify_tree (tree node)
3166{
3167  switch  (TREE_CODE (node))
3168    {
3169    case INDIRECT_REF:
3170      {
3171	tree temp = pool_alloc (reference_node_pool);
3172	memcpy (temp, node, tree_size (node));
3173	TREE_OPERAND (temp, 0) = poolify_tree (TREE_OPERAND (temp, 0));
3174	return temp;
3175      }
3176      break;
3177    case MODIFY_EXPR:
3178      {
3179	tree temp = pool_alloc (modify_expr_node_pool);
3180	memcpy (temp, node, tree_size (node));
3181	TREE_OPERAND (temp, 0) = poolify_tree (TREE_OPERAND (temp, 0));
3182	TREE_OPERAND (temp, 1) = poolify_tree (TREE_OPERAND (temp, 1));
3183	return temp;
3184      }
3185      break;
3186    case SSA_NAME:
3187    case INTEGER_CST:
3188    case STRING_CST:
3189    case REAL_CST:
3190    case PARM_DECL:
3191    case VAR_DECL:
3192    case RESULT_DECL:
3193      return node;
3194    default:
3195      gcc_unreachable ();
3196    }
3197}
3198
3199static tree modify_expr_template;
3200
3201/* Allocate a MODIFY_EXPR with TYPE, and operands OP1, OP2 in the
3202   alloc pools and return it.  */
3203static tree
3204poolify_modify_expr (tree type, tree op1, tree op2)
3205{
3206  if (modify_expr_template == NULL)
3207    modify_expr_template = build2 (MODIFY_EXPR, type, op1, op2);
3208
3209  TREE_OPERAND (modify_expr_template, 0) = op1;
3210  TREE_OPERAND (modify_expr_template, 1) = op2;
3211  TREE_TYPE (modify_expr_template) = type;
3212
3213  return poolify_tree (modify_expr_template);
3214}
3215
3216
3217/* For each real store operation of the form
3218   *a = <value> that we see, create a corresponding fake store of the
3219   form storetmp_<version> = *a.
3220
3221   This enables AVAIL computation to mark the results of stores as
3222   available.  Without this, you'd need to do some computation to
3223   mark the result of stores as ANTIC and AVAIL at all the right
3224   points.
3225   To save memory, we keep the store
3226   statements pool allocated until we decide whether they are
3227   necessary or not.  */
3228
3229static void
3230insert_fake_stores (void)
3231{
3232  basic_block block;
3233
3234  FOR_ALL_BB (block)
3235    {
3236      block_stmt_iterator bsi;
3237      for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
3238	{
3239	  tree stmt = bsi_stmt (bsi);
3240
3241	  /* We can't generate SSA names for stores that are complex
3242	     or aggregate.  We also want to ignore things whose
3243	     virtual uses occur in abnormal phis.  */
3244
3245	  if (TREE_CODE (stmt) == MODIFY_EXPR
3246	      && TREE_CODE (TREE_OPERAND (stmt, 0)) == INDIRECT_REF
3247	      && !AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 0)))
3248	      && TREE_CODE (TREE_TYPE (TREE_OPERAND (stmt, 0))) != COMPLEX_TYPE)
3249	    {
3250	      ssa_op_iter iter;
3251	      def_operand_p defp;
3252	      tree lhs = TREE_OPERAND (stmt, 0);
3253	      tree rhs = TREE_OPERAND (stmt, 1);
3254	      tree new;
3255	      bool notokay = false;
3256
3257	      FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_VIRTUAL_DEFS)
3258		{
3259		  tree defvar = DEF_FROM_PTR (defp);
3260		  if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defvar))
3261		    {
3262		      notokay = true;
3263		      break;
3264		    }
3265		}
3266
3267	      if (notokay)
3268		continue;
3269
3270	      if (!storetemp || TREE_TYPE (rhs) != TREE_TYPE (storetemp))
3271		{
3272		  storetemp = create_tmp_var (TREE_TYPE (rhs), "storetmp");
3273		  get_var_ann (storetemp);
3274		}
3275
3276	      new = poolify_modify_expr (TREE_TYPE (stmt), storetemp, lhs);
3277
3278	      lhs = make_ssa_name (storetemp, new);
3279	      TREE_OPERAND (new, 0) = lhs;
3280	      create_ssa_artficial_load_stmt (new, stmt);
3281
3282	      NECESSARY (new) = 0;
3283	      VEC_safe_push (tree, heap, inserted_exprs, new);
3284	      VEC_safe_push (tree, heap, need_creation, new);
3285	      bsi_insert_after (&bsi, new, BSI_NEW_STMT);
3286	    }
3287	}
3288    }
3289}
3290
3291/* Turn the pool allocated fake stores that we created back into real
3292   GC allocated ones if they turned out to be necessary to PRE some
3293   expressions.  */
3294
3295static void
3296realify_fake_stores (void)
3297{
3298  unsigned int i;
3299  tree stmt;
3300
3301  for (i = 0; VEC_iterate (tree, need_creation, i, stmt); i++)
3302    {
3303      if (NECESSARY (stmt))
3304	{
3305	  block_stmt_iterator bsi;
3306	  tree newstmt;
3307
3308	  /* Mark the temp variable as referenced */
3309	  add_referenced_var (SSA_NAME_VAR (TREE_OPERAND (stmt, 0)));
3310
3311	  /* Put the new statement in GC memory, fix up the
3312	     SSA_NAME_DEF_STMT on it, and then put it in place of
3313	     the old statement before the store in the IR stream
3314	     as a plain ssa name copy.  */
3315	  bsi = bsi_for_stmt (stmt);
3316	  bsi_prev (&bsi);
3317	  newstmt = build2 (MODIFY_EXPR, void_type_node,
3318			    TREE_OPERAND (stmt, 0),
3319			    TREE_OPERAND (bsi_stmt (bsi), 1));
3320	  SSA_NAME_DEF_STMT (TREE_OPERAND (newstmt, 0)) = newstmt;
3321	  bsi_insert_before (&bsi, newstmt, BSI_SAME_STMT);
3322	  bsi = bsi_for_stmt (stmt);
3323	  bsi_remove (&bsi, true);
3324	}
3325      else
3326	release_defs (stmt);
3327    }
3328}
3329
3330/* Tree-combine a value number expression *EXPR_P that does a type
3331   conversion with the value number expression of its operand.
3332   Returns true, if *EXPR_P simplifies to a value number or
3333   gimple min-invariant expression different from EXPR_P and
3334   sets *EXPR_P to the simplified expression value number.
3335   Otherwise returns false and does not change *EXPR_P.  */
3336
3337static bool
3338try_combine_conversion (tree *expr_p)
3339{
3340  tree expr = *expr_p;
3341  tree t;
3342
3343  if (!((TREE_CODE (expr) == NOP_EXPR
3344	 || TREE_CODE (expr) == CONVERT_EXPR)
3345	&& TREE_CODE (TREE_OPERAND (expr, 0)) == VALUE_HANDLE
3346	&& !VALUE_HANDLE_VUSES (TREE_OPERAND (expr, 0))))
3347    return false;
3348
3349  t = fold_unary (TREE_CODE (expr), TREE_TYPE (expr),
3350		  VALUE_HANDLE_EXPR_SET (TREE_OPERAND (expr, 0))->head->expr);
3351  if (!t)
3352    return false;
3353
3354  /* Strip useless type conversions, which is safe in the optimizers but
3355     not generally in fold.  */
3356  STRIP_USELESS_TYPE_CONVERSION (t);
3357
3358  /* Disallow value expressions we have no value number for already, as
3359     we would miss a leader for it here.  */
3360  if (!(TREE_CODE (t) == VALUE_HANDLE
3361	|| is_gimple_min_invariant (t)))
3362    t = vn_lookup (t, NULL);
3363
3364  if (t && t != expr)
3365    {
3366      *expr_p = t;
3367      return true;
3368    }
3369  return false;
3370}
3371
3372/* Compute the AVAIL set for all basic blocks.
3373
3374   This function performs value numbering of the statements in each basic
3375   block.  The AVAIL sets are built from information we glean while doing
3376   this value numbering, since the AVAIL sets contain only one entry per
3377   value.
3378
3379   AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3380   AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK].  */
3381
3382static void
3383compute_avail (void)
3384{
3385  basic_block block, son;
3386  basic_block *worklist;
3387  size_t sp = 0;
3388  tree param;
3389  /* For arguments with default definitions, we pretend they are
3390     defined in the entry block.  */
3391  for (param = DECL_ARGUMENTS (current_function_decl);
3392       param;
3393       param = TREE_CHAIN (param))
3394    {
3395      if (default_def (param) != NULL)
3396	{
3397	  tree def = default_def (param);
3398	  vn_lookup_or_add (def, NULL);
3399	  bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), def);
3400	  bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), def);
3401	}
3402    }
3403
3404  /* Likewise for the static chain decl. */
3405  if (cfun->static_chain_decl)
3406    {
3407      param = cfun->static_chain_decl;
3408      if (default_def (param) != NULL)
3409        {
3410          tree def = default_def (param);
3411          vn_lookup_or_add (def, NULL);
3412          bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), def);
3413          bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), def);
3414        }
3415    }
3416
3417  /* Allocate the worklist.  */
3418  worklist = XNEWVEC (basic_block, n_basic_blocks);
3419
3420  /* Seed the algorithm by putting the dominator children of the entry
3421     block on the worklist.  */
3422  for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3423       son;
3424       son = next_dom_son (CDI_DOMINATORS, son))
3425    worklist[sp++] = son;
3426
3427  /* Loop until the worklist is empty.  */
3428  while (sp)
3429    {
3430      block_stmt_iterator bsi;
3431      tree stmt, phi;
3432      basic_block dom;
3433      unsigned int stmt_uid = 1;
3434
3435      /* Pick a block from the worklist.  */
3436      block = worklist[--sp];
3437
3438      /* Initially, the set of available values in BLOCK is that of
3439	 its immediate dominator.  */
3440      dom = get_immediate_dominator (CDI_DOMINATORS, block);
3441      if (dom)
3442	bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3443
3444      if (!in_fre)
3445	insert_extra_phis (block, dom);
3446
3447      /* Generate values for PHI nodes.  */
3448      for (phi = phi_nodes (block); phi; phi = PHI_CHAIN (phi))
3449	/* We have no need for virtual phis, as they don't represent
3450	   actual computations.  */
3451	if (is_gimple_reg (PHI_RESULT (phi)))
3452	  add_to_sets (PHI_RESULT (phi), PHI_RESULT (phi), NULL,
3453		       PHI_GEN (block), AVAIL_OUT (block));
3454
3455      /* Now compute value numbers and populate value sets with all
3456	 the expressions computed in BLOCK.  */
3457      for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
3458	{
3459	  stmt_ann_t ann;
3460	  ssa_op_iter iter;
3461	  tree op;
3462
3463	  stmt = bsi_stmt (bsi);
3464	  ann = stmt_ann (stmt);
3465
3466	  ann->uid = stmt_uid++;
3467
3468	  /* For regular value numbering, we are only interested in
3469	     assignments of the form X_i = EXPR, where EXPR represents
3470	     an "interesting" computation, it has no volatile operands
3471	     and X_i doesn't flow through an abnormal edge.  */
3472	  if (TREE_CODE (stmt) == MODIFY_EXPR
3473	      && !ann->has_volatile_ops
3474	      && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME
3475	      && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (stmt, 0)))
3476	    {
3477	      tree lhs = TREE_OPERAND (stmt, 0);
3478	      tree rhs = TREE_OPERAND (stmt, 1);
3479
3480	      /* Try to look through loads.  */
3481	      if (TREE_CODE (lhs) == SSA_NAME
3482		  && !ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_USES)
3483		  && try_look_through_load (lhs, rhs, stmt, block))
3484		continue;
3485
3486	      STRIP_USELESS_TYPE_CONVERSION (rhs);
3487	      if (can_value_number_operation (rhs))
3488		{
3489		  /* For value numberable operation, create a
3490		     duplicate expression with the operands replaced
3491		     with the value handles of the original RHS.  */
3492		  tree newt = create_value_expr_from (rhs, block, stmt);
3493		  if (newt)
3494		    {
3495		      /* If we can combine a conversion expression
3496			 with the expression for its operand just
3497			 record the value number for it.  */
3498		      if (try_combine_conversion (&newt))
3499			vn_add (lhs, newt);
3500		      else
3501			{
3502			  tree val = vn_lookup_or_add (newt, stmt);
3503			  vn_add (lhs, val);
3504			  value_insert_into_set (EXP_GEN (block), newt);
3505			}
3506		      bitmap_insert_into_set (TMP_GEN (block), lhs);
3507		      bitmap_value_insert_into_set (AVAIL_OUT (block), lhs);
3508		      continue;
3509		    }
3510		}
3511	      else if ((TREE_CODE (rhs) == SSA_NAME
3512			&& !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
3513		       || is_gimple_min_invariant (rhs)
3514		       || TREE_CODE (rhs) == ADDR_EXPR
3515		       || TREE_INVARIANT (rhs)
3516		       || DECL_P (rhs))
3517		{
3518		  /* Compute a value number for the RHS of the statement
3519		     and add its value to the AVAIL_OUT set for the block.
3520		     Add the LHS to TMP_GEN.  */
3521		  add_to_sets (lhs, rhs, stmt, TMP_GEN (block),
3522			       AVAIL_OUT (block));
3523
3524		  if (TREE_CODE (rhs) == SSA_NAME
3525		      && !is_undefined_value (rhs))
3526		    value_insert_into_set (EXP_GEN (block), rhs);
3527		  continue;
3528		}
3529	    }
3530
3531	  /* For any other statement that we don't recognize, simply
3532	     make the names generated by the statement available in
3533	     AVAIL_OUT and TMP_GEN.  */
3534	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3535	    add_to_sets (op, op, NULL, TMP_GEN (block), AVAIL_OUT (block));
3536
3537	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3538	    add_to_sets (op, op, NULL, NULL , AVAIL_OUT (block));
3539	}
3540
3541      /* Put the dominator children of BLOCK on the worklist of blocks
3542	 to compute available sets for.  */
3543      for (son = first_dom_son (CDI_DOMINATORS, block);
3544	   son;
3545	   son = next_dom_son (CDI_DOMINATORS, son))
3546	worklist[sp++] = son;
3547    }
3548
3549  free (worklist);
3550}
3551
3552
3553/* Eliminate fully redundant computations.  */
3554
3555static void
3556eliminate (void)
3557{
3558  basic_block b;
3559
3560  FOR_EACH_BB (b)
3561    {
3562      block_stmt_iterator i;
3563
3564      for (i = bsi_start (b); !bsi_end_p (i); bsi_next (&i))
3565        {
3566          tree stmt = bsi_stmt (i);
3567
3568	  /* Lookup the RHS of the expression, see if we have an
3569	     available computation for it.  If so, replace the RHS with
3570	     the available computation.  */
3571	  if (TREE_CODE (stmt) == MODIFY_EXPR
3572	      && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME
3573	      && TREE_CODE (TREE_OPERAND (stmt ,1)) != SSA_NAME
3574	      && !is_gimple_min_invariant (TREE_OPERAND (stmt, 1))
3575	      && !stmt_ann (stmt)->has_volatile_ops)
3576	    {
3577	      tree lhs = TREE_OPERAND (stmt, 0);
3578	      tree *rhs_p = &TREE_OPERAND (stmt, 1);
3579	      tree sprime;
3580
3581	      sprime = bitmap_find_leader (AVAIL_OUT (b),
3582					   vn_lookup (lhs, NULL));
3583	      if (sprime
3584		  && sprime != lhs
3585		  && (TREE_CODE (*rhs_p) != SSA_NAME
3586		      || may_propagate_copy (*rhs_p, sprime)))
3587		{
3588		  gcc_assert (sprime != *rhs_p);
3589
3590		  if (dump_file && (dump_flags & TDF_DETAILS))
3591		    {
3592		      fprintf (dump_file, "Replaced ");
3593		      print_generic_expr (dump_file, *rhs_p, 0);
3594		      fprintf (dump_file, " with ");
3595		      print_generic_expr (dump_file, sprime, 0);
3596		      fprintf (dump_file, " in ");
3597		      print_generic_stmt (dump_file, stmt, 0);
3598		    }
3599
3600		  if (TREE_CODE (sprime) == SSA_NAME)
3601		    NECESSARY (SSA_NAME_DEF_STMT (sprime)) = 1;
3602		  /* We need to make sure the new and old types actually match,
3603		     which may require adding a simple cast, which fold_convert
3604		     will do for us.  */
3605		  if (TREE_CODE (*rhs_p) != SSA_NAME
3606		      && !tree_ssa_useless_type_conversion_1 (TREE_TYPE (*rhs_p),
3607							      TREE_TYPE (sprime)))
3608		    sprime = fold_convert (TREE_TYPE (*rhs_p), sprime);
3609
3610		  pre_stats.eliminations++;
3611		  propagate_tree_value (rhs_p, sprime);
3612		  update_stmt (stmt);
3613
3614		  /* If we removed EH side effects from the statement, clean
3615		     its EH information.  */
3616		  if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
3617		    {
3618		      bitmap_set_bit (need_eh_cleanup,
3619				      bb_for_stmt (stmt)->index);
3620		      if (dump_file && (dump_flags & TDF_DETAILS))
3621			fprintf (dump_file, "  Removed EH side effects.\n");
3622		    }
3623		}
3624	    }
3625        }
3626    }
3627}
3628
3629/* Borrow a bit of tree-ssa-dce.c for the moment.
3630   XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
3631   this may be a bit faster, and we may want critical edges kept split.  */
3632
3633/* If OP's defining statement has not already been determined to be necessary,
3634   mark that statement necessary. Return the stmt, if it is newly
3635   necessary.  */
3636
3637static inline tree
3638mark_operand_necessary (tree op)
3639{
3640  tree stmt;
3641
3642  gcc_assert (op);
3643
3644  if (TREE_CODE (op) != SSA_NAME)
3645    return NULL;
3646
3647  stmt = SSA_NAME_DEF_STMT (op);
3648  gcc_assert (stmt);
3649
3650  if (NECESSARY (stmt)
3651      || IS_EMPTY_STMT (stmt))
3652    return NULL;
3653
3654  NECESSARY (stmt) = 1;
3655  return stmt;
3656}
3657
3658/* Because we don't follow exactly the standard PRE algorithm, and decide not
3659   to insert PHI nodes sometimes, and because value numbering of casts isn't
3660   perfect, we sometimes end up inserting dead code.   This simple DCE-like
3661   pass removes any insertions we made that weren't actually used.  */
3662
3663static void
3664remove_dead_inserted_code (void)
3665{
3666  VEC(tree,heap) *worklist = NULL;
3667  int i;
3668  tree t;
3669
3670  worklist = VEC_alloc (tree, heap, VEC_length (tree, inserted_exprs));
3671  for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
3672    {
3673      if (NECESSARY (t))
3674	VEC_quick_push (tree, worklist, t);
3675    }
3676  while (VEC_length (tree, worklist) > 0)
3677    {
3678      t = VEC_pop (tree, worklist);
3679
3680      /* PHI nodes are somewhat special in that each PHI alternative has
3681	 data and control dependencies.  All the statements feeding the
3682	 PHI node's arguments are always necessary. */
3683      if (TREE_CODE (t) == PHI_NODE)
3684	{
3685	  int k;
3686
3687	  VEC_reserve (tree, heap, worklist, PHI_NUM_ARGS (t));
3688	  for (k = 0; k < PHI_NUM_ARGS (t); k++)
3689            {
3690	      tree arg = PHI_ARG_DEF (t, k);
3691	      if (TREE_CODE (arg) == SSA_NAME)
3692		{
3693		  arg = mark_operand_necessary (arg);
3694		  if (arg)
3695		    VEC_quick_push (tree, worklist, arg);
3696		}
3697	    }
3698	}
3699      else
3700	{
3701	  /* Propagate through the operands.  Examine all the USE, VUSE and
3702	     V_MAY_DEF operands in this statement.  Mark all the statements
3703	     which feed this statement's uses as necessary.  */
3704	  ssa_op_iter iter;
3705	  tree use;
3706
3707	  /* The operands of V_MAY_DEF expressions are also needed as they
3708	     represent potential definitions that may reach this
3709	     statement (V_MAY_DEF operands allow us to follow def-def
3710	     links).  */
3711
3712	  FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
3713	    {
3714	      tree n = mark_operand_necessary (use);
3715	      if (n)
3716		VEC_safe_push (tree, heap, worklist, n);
3717	    }
3718	}
3719    }
3720
3721  for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
3722    {
3723      if (!NECESSARY (t))
3724	{
3725	  block_stmt_iterator bsi;
3726
3727	  if (dump_file && (dump_flags & TDF_DETAILS))
3728	    {
3729	      fprintf (dump_file, "Removing unnecessary insertion:");
3730	      print_generic_stmt (dump_file, t, 0);
3731	    }
3732
3733	  if (TREE_CODE (t) == PHI_NODE)
3734	    {
3735	      remove_phi_node (t, NULL);
3736	    }
3737	  else
3738	    {
3739	      bsi = bsi_for_stmt (t);
3740	      bsi_remove (&bsi, true);
3741	      release_defs (t);
3742	    }
3743	}
3744    }
3745  VEC_free (tree, heap, worklist);
3746}
3747
3748/* Initialize data structures used by PRE.  */
3749
3750static void
3751init_pre (bool do_fre)
3752{
3753  basic_block bb;
3754
3755  in_fre = do_fre;
3756
3757  inserted_exprs = NULL;
3758  need_creation = NULL;
3759  pretemp = NULL_TREE;
3760  storetemp = NULL_TREE;
3761  mergephitemp = NULL_TREE;
3762  prephitemp = NULL_TREE;
3763
3764  vn_init ();
3765  if (!do_fre)
3766    current_loops = loop_optimizer_init (LOOPS_NORMAL);
3767
3768  connect_infinite_loops_to_exit ();
3769  memset (&pre_stats, 0, sizeof (pre_stats));
3770
3771  /* If block 0 has more than one predecessor, it means that its PHI
3772     nodes will have arguments coming from block -1.  This creates
3773     problems for several places in PRE that keep local arrays indexed
3774     by block number.  To prevent this, we split the edge coming from
3775     ENTRY_BLOCK_PTR (FIXME, if ENTRY_BLOCK_PTR had an index number
3776     different than -1 we wouldn't have to hack this.  tree-ssa-dce.c
3777     needs a similar change).  */
3778  if (!single_pred_p (single_succ (ENTRY_BLOCK_PTR)))
3779    if (!(single_succ_edge (ENTRY_BLOCK_PTR)->flags & EDGE_ABNORMAL))
3780      split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
3781
3782  FOR_ALL_BB (bb)
3783    bb->aux = xcalloc (1, sizeof (struct bb_value_sets));
3784
3785  bitmap_obstack_initialize (&grand_bitmap_obstack);
3786  phi_translate_table = htab_create (511, expr_pred_trans_hash,
3787				     expr_pred_trans_eq, free);
3788  value_set_pool = create_alloc_pool ("Value sets",
3789				      sizeof (struct value_set), 30);
3790  bitmap_set_pool = create_alloc_pool ("Bitmap sets",
3791				       sizeof (struct bitmap_set), 30);
3792  value_set_node_pool = create_alloc_pool ("Value set nodes",
3793				           sizeof (struct value_set_node), 30);
3794  calculate_dominance_info (CDI_POST_DOMINATORS);
3795  calculate_dominance_info (CDI_DOMINATORS);
3796  binary_node_pool = create_alloc_pool ("Binary tree nodes",
3797				        tree_code_size (PLUS_EXPR), 30);
3798  unary_node_pool = create_alloc_pool ("Unary tree nodes",
3799				       tree_code_size (NEGATE_EXPR), 30);
3800  reference_node_pool = create_alloc_pool ("Reference tree nodes",
3801					   tree_code_size (ARRAY_REF), 30);
3802  expression_node_pool = create_alloc_pool ("Expression tree nodes",
3803					    tree_code_size (CALL_EXPR), 30);
3804  list_node_pool = create_alloc_pool ("List tree nodes",
3805				      tree_code_size (TREE_LIST), 30);
3806  comparison_node_pool = create_alloc_pool ("Comparison tree nodes",
3807      					    tree_code_size (EQ_EXPR), 30);
3808  modify_expr_node_pool = create_alloc_pool ("MODIFY_EXPR nodes",
3809					     tree_code_size (MODIFY_EXPR),
3810					     30);
3811  modify_expr_template = NULL;
3812
3813  FOR_ALL_BB (bb)
3814    {
3815      EXP_GEN (bb) = set_new (true);
3816      PHI_GEN (bb) = bitmap_set_new ();
3817      TMP_GEN (bb) = bitmap_set_new ();
3818      AVAIL_OUT (bb) = bitmap_set_new ();
3819    }
3820
3821  need_eh_cleanup = BITMAP_ALLOC (NULL);
3822}
3823
3824
3825/* Deallocate data structures used by PRE.  */
3826
3827static void
3828fini_pre (bool do_fre)
3829{
3830  basic_block bb;
3831  unsigned int i;
3832
3833  VEC_free (tree, heap, inserted_exprs);
3834  VEC_free (tree, heap, need_creation);
3835  bitmap_obstack_release (&grand_bitmap_obstack);
3836  free_alloc_pool (value_set_pool);
3837  free_alloc_pool (bitmap_set_pool);
3838  free_alloc_pool (value_set_node_pool);
3839  free_alloc_pool (binary_node_pool);
3840  free_alloc_pool (reference_node_pool);
3841  free_alloc_pool (unary_node_pool);
3842  free_alloc_pool (list_node_pool);
3843  free_alloc_pool (expression_node_pool);
3844  free_alloc_pool (comparison_node_pool);
3845  free_alloc_pool (modify_expr_node_pool);
3846  htab_delete (phi_translate_table);
3847  remove_fake_exit_edges ();
3848
3849  FOR_ALL_BB (bb)
3850    {
3851      free (bb->aux);
3852      bb->aux = NULL;
3853    }
3854
3855  free_dominance_info (CDI_POST_DOMINATORS);
3856  vn_delete ();
3857
3858  if (!bitmap_empty_p (need_eh_cleanup))
3859    {
3860      tree_purge_all_dead_eh_edges (need_eh_cleanup);
3861      cleanup_tree_cfg ();
3862    }
3863
3864  BITMAP_FREE (need_eh_cleanup);
3865
3866  /* Wipe out pointers to VALUE_HANDLEs.  In the not terribly distant
3867     future we will want them to be persistent though.  */
3868  for (i = 0; i < num_ssa_names; i++)
3869    {
3870      tree name = ssa_name (i);
3871
3872      if (!name)
3873	continue;
3874
3875      if (SSA_NAME_VALUE (name)
3876	  && TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
3877	SSA_NAME_VALUE (name) = NULL;
3878    }
3879  if (!do_fre && current_loops)
3880    {
3881      loop_optimizer_finalize (current_loops);
3882      current_loops = NULL;
3883    }
3884}
3885
3886/* Main entry point to the SSA-PRE pass.  DO_FRE is true if the caller
3887   only wants to do full redundancy elimination.  */
3888
3889static void
3890execute_pre (bool do_fre)
3891{
3892  init_pre (do_fre);
3893
3894  if (!do_fre)
3895    insert_fake_stores ();
3896
3897  /* Collect and value number expressions computed in each basic block.  */
3898  compute_avail ();
3899
3900  if (dump_file && (dump_flags & TDF_DETAILS))
3901    {
3902      basic_block bb;
3903
3904      FOR_ALL_BB (bb)
3905	{
3906	  print_value_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
3907	  bitmap_print_value_set (dump_file, TMP_GEN (bb), "tmp_gen",
3908				  bb->index);
3909	  bitmap_print_value_set (dump_file, AVAIL_OUT (bb), "avail_out",
3910				  bb->index);
3911	}
3912    }
3913
3914  /* Insert can get quite slow on an incredibly large number of basic
3915     blocks due to some quadratic behavior.  Until this behavior is
3916     fixed, don't run it when he have an incredibly large number of
3917     bb's.  If we aren't going to run insert, there is no point in
3918     computing ANTIC, either, even though it's plenty fast.  */
3919  if (!do_fre && n_basic_blocks < 4000)
3920    {
3921      vuse_names = XCNEWVEC (bitmap, num_ssa_names);
3922      compute_rvuse_and_antic_safe ();
3923      compute_antic ();
3924      insert ();
3925      free (vuse_names);
3926    }
3927
3928  /* Remove all the redundant expressions.  */
3929  eliminate ();
3930
3931
3932  if (dump_file && (dump_flags & TDF_STATS))
3933    {
3934      fprintf (dump_file, "Insertions: %d\n", pre_stats.insertions);
3935      fprintf (dump_file, "New PHIs: %d\n", pre_stats.phis);
3936      fprintf (dump_file, "Eliminated: %d\n", pre_stats.eliminations);
3937      fprintf (dump_file, "Constified: %d\n", pre_stats.constified);
3938    }
3939
3940  bsi_commit_edge_inserts ();
3941
3942  if (!do_fre)
3943    {
3944      remove_dead_inserted_code ();
3945      realify_fake_stores ();
3946    }
3947
3948  fini_pre (do_fre);
3949
3950}
3951
3952/* Gate and execute functions for PRE.  */
3953
3954static unsigned int
3955do_pre (void)
3956{
3957  execute_pre (false);
3958  return 0;
3959}
3960
3961static bool
3962gate_pre (void)
3963{
3964  return flag_tree_pre != 0;
3965}
3966
3967struct tree_opt_pass pass_pre =
3968{
3969  "pre",				/* name */
3970  gate_pre,				/* gate */
3971  do_pre,				/* execute */
3972  NULL,					/* sub */
3973  NULL,					/* next */
3974  0,					/* static_pass_number */
3975  TV_TREE_PRE,				/* tv_id */
3976  PROP_no_crit_edges | PROP_cfg
3977    | PROP_ssa | PROP_alias,		/* properties_required */
3978  0,					/* properties_provided */
3979  0,					/* properties_destroyed */
3980  0,					/* todo_flags_start */
3981  TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect
3982  | TODO_verify_ssa, /* todo_flags_finish */
3983  0					/* letter */
3984};
3985
3986
3987/* Gate and execute functions for FRE.  */
3988
3989static unsigned int
3990execute_fre (void)
3991{
3992  execute_pre (true);
3993  return 0;
3994}
3995
3996static bool
3997gate_fre (void)
3998{
3999  return flag_tree_fre != 0;
4000}
4001
4002struct tree_opt_pass pass_fre =
4003{
4004  "fre",				/* name */
4005  gate_fre,				/* gate */
4006  execute_fre,				/* execute */
4007  NULL,					/* sub */
4008  NULL,					/* next */
4009  0,					/* static_pass_number */
4010  TV_TREE_FRE,				/* tv_id */
4011  PROP_cfg | PROP_ssa | PROP_alias,	/* properties_required */
4012  0,					/* properties_provided */
4013  0,					/* properties_destroyed */
4014  0,					/* todo_flags_start */
4015  TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
4016  0					/* letter */
4017};
4018