1/* SSA Dominator optimizations for trees
2   Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3   Contributed by Diego Novillo <dnovillo@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to
19the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20Boston, MA 02110-1301, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "flags.h"
28#include "rtl.h"
29#include "tm_p.h"
30#include "ggc.h"
31#include "basic-block.h"
32#include "cfgloop.h"
33#include "output.h"
34#include "expr.h"
35#include "function.h"
36#include "diagnostic.h"
37#include "timevar.h"
38#include "tree-dump.h"
39#include "tree-flow.h"
40#include "domwalk.h"
41#include "real.h"
42#include "tree-pass.h"
43#include "tree-ssa-propagate.h"
44#include "langhooks.h"
45#include "params.h"
46
47/* This file implements optimizations on the dominator tree.  */
48
49
50/* Structure for recording edge equivalences as well as any pending
51   edge redirections during the dominator optimizer.
52
53   Computing and storing the edge equivalences instead of creating
54   them on-demand can save significant amounts of time, particularly
55   for pathological cases involving switch statements.
56
57   These structures live for a single iteration of the dominator
58   optimizer in the edge's AUX field.  At the end of an iteration we
59   free each of these structures and update the AUX field to point
60   to any requested redirection target (the code for updating the
61   CFG and SSA graph for edge redirection expects redirection edge
62   targets to be in the AUX field for each edge.  */
63
64struct edge_info
65{
66  /* If this edge creates a simple equivalence, the LHS and RHS of
67     the equivalence will be stored here.  */
68  tree lhs;
69  tree rhs;
70
71  /* Traversing an edge may also indicate one or more particular conditions
72     are true or false.  The number of recorded conditions can vary, but
73     can be determined by the condition's code.  So we have an array
74     and its maximum index rather than use a varray.  */
75  tree *cond_equivalences;
76  unsigned int max_cond_equivalences;
77
78  /* If we can thread this edge this field records the new target.  */
79  edge redirection_target;
80};
81
82
83/* Hash table with expressions made available during the renaming process.
84   When an assignment of the form X_i = EXPR is found, the statement is
85   stored in this table.  If the same expression EXPR is later found on the
86   RHS of another statement, it is replaced with X_i (thus performing
87   global redundancy elimination).  Similarly as we pass through conditionals
88   we record the conditional itself as having either a true or false value
89   in this table.  */
90static htab_t avail_exprs;
91
92/* Stack of available expressions in AVAIL_EXPRs.  Each block pushes any
93   expressions it enters into the hash table along with a marker entry
94   (null).  When we finish processing the block, we pop off entries and
95   remove the expressions from the global hash table until we hit the
96   marker.  */
97static VEC(tree,heap) *avail_exprs_stack;
98
99/* Stack of statements we need to rescan during finalization for newly
100   exposed variables.
101
102   Statement rescanning must occur after the current block's available
103   expressions are removed from AVAIL_EXPRS.  Else we may change the
104   hash code for an expression and be unable to find/remove it from
105   AVAIL_EXPRS.  */
106static VEC(tree,heap) *stmts_to_rescan;
107
108/* Structure for entries in the expression hash table.
109
110   This requires more memory for the hash table entries, but allows us
111   to avoid creating silly tree nodes and annotations for conditionals,
112   eliminates 2 global hash tables and two block local varrays.
113
114   It also allows us to reduce the number of hash table lookups we
115   have to perform in lookup_avail_expr and finally it allows us to
116   significantly reduce the number of calls into the hashing routine
117   itself.  */
118
119struct expr_hash_elt
120{
121  /* The value (lhs) of this expression.  */
122  tree lhs;
123
124  /* The expression (rhs) we want to record.  */
125  tree rhs;
126
127  /* The stmt pointer if this element corresponds to a statement.  */
128  tree stmt;
129
130  /* The hash value for RHS/ann.  */
131  hashval_t hash;
132};
133
134/* Stack of dest,src pairs that need to be restored during finalization.
135
136   A NULL entry is used to mark the end of pairs which need to be
137   restored during finalization of this block.  */
138static VEC(tree,heap) *const_and_copies_stack;
139
140/* Bitmap of SSA_NAMEs known to have a nonzero value, even if we do not
141   know their exact value.  */
142static bitmap nonzero_vars;
143
144/* Bitmap of blocks that are scheduled to be threaded through.  This
145   is used to communicate with thread_through_blocks.  */
146static bitmap threaded_blocks;
147
148/* Stack of SSA_NAMEs which need their NONZERO_VARS property cleared
149   when the current block is finalized.
150
151   A NULL entry is used to mark the end of names needing their
152   entry in NONZERO_VARS cleared during finalization of this block.  */
153static VEC(tree,heap) *nonzero_vars_stack;
154
155/* Track whether or not we have changed the control flow graph.  */
156static bool cfg_altered;
157
158/* Bitmap of blocks that have had EH statements cleaned.  We should
159   remove their dead edges eventually.  */
160static bitmap need_eh_cleanup;
161
162/* Statistics for dominator optimizations.  */
163struct opt_stats_d
164{
165  long num_stmts;
166  long num_exprs_considered;
167  long num_re;
168  long num_const_prop;
169  long num_copy_prop;
170  long num_iterations;
171};
172
173static struct opt_stats_d opt_stats;
174
175/* Value range propagation record.  Each time we encounter a conditional
176   of the form SSA_NAME COND CONST we create a new vrp_element to record
177   how the condition affects the possible values SSA_NAME may have.
178
179   Each record contains the condition tested (COND), and the range of
180   values the variable may legitimately have if COND is true.  Note the
181   range of values may be a smaller range than COND specifies if we have
182   recorded other ranges for this variable.  Each record also contains the
183   block in which the range was recorded for invalidation purposes.
184
185   Note that the current known range is computed lazily.  This allows us
186   to avoid the overhead of computing ranges which are never queried.
187
188   When we encounter a conditional, we look for records which constrain
189   the SSA_NAME used in the condition.  In some cases those records allow
190   us to determine the condition's result at compile time.  In other cases
191   they may allow us to simplify the condition.
192
193   We also use value ranges to do things like transform signed div/mod
194   operations into unsigned div/mod or to simplify ABS_EXPRs.
195
196   Simple experiments have shown these optimizations to not be all that
197   useful on switch statements (much to my surprise).  So switch statement
198   optimizations are not performed.
199
200   Note carefully we do not propagate information through each statement
201   in the block.  i.e., if we know variable X has a value defined of
202   [0, 25] and we encounter Y = X + 1, we do not track a value range
203   for Y (which would be [1, 26] if we cared).  Similarly we do not
204   constrain values as we encounter narrowing typecasts, etc.  */
205
206struct vrp_element
207{
208  /* The highest and lowest values the variable in COND may contain when
209     COND is true.  Note this may not necessarily be the same values
210     tested by COND if the same variable was used in earlier conditionals.
211
212     Note this is computed lazily and thus can be NULL indicating that
213     the values have not been computed yet.  */
214  tree low;
215  tree high;
216
217  /* The actual conditional we recorded.  This is needed since we compute
218     ranges lazily.  */
219  tree cond;
220
221  /* The basic block where this record was created.  We use this to determine
222     when to remove records.  */
223  basic_block bb;
224};
225
226/* A hash table holding value range records (VRP_ELEMENTs) for a given
227   SSA_NAME.  We used to use a varray indexed by SSA_NAME_VERSION, but
228   that gets awful wasteful, particularly since the density objects
229   with useful information is very low.  */
230static htab_t vrp_data;
231
232typedef struct vrp_element *vrp_element_p;
233
234DEF_VEC_P(vrp_element_p);
235DEF_VEC_ALLOC_P(vrp_element_p,heap);
236
237/* An entry in the VRP_DATA hash table.  We record the variable and a
238   varray of VRP_ELEMENT records associated with that variable.  */
239struct vrp_hash_elt
240{
241  tree var;
242  VEC(vrp_element_p,heap) *records;
243};
244
245/* Array of variables which have their values constrained by operations
246   in this basic block.  We use this during finalization to know
247   which variables need their VRP data updated.  */
248
249/* Stack of SSA_NAMEs which had their values constrained by operations
250   in this basic block.  During finalization of this block we use this
251   list to determine which variables need their VRP data updated.
252
253   A NULL entry marks the end of the SSA_NAMEs associated with this block.  */
254static VEC(tree,heap) *vrp_variables_stack;
255
256struct eq_expr_value
257{
258  tree src;
259  tree dst;
260};
261
262/* Local functions.  */
263static void optimize_stmt (struct dom_walk_data *,
264			   basic_block bb,
265			   block_stmt_iterator);
266static tree lookup_avail_expr (tree, bool);
267static hashval_t vrp_hash (const void *);
268static int vrp_eq (const void *, const void *);
269static hashval_t avail_expr_hash (const void *);
270static hashval_t real_avail_expr_hash (const void *);
271static int avail_expr_eq (const void *, const void *);
272static void htab_statistics (FILE *, htab_t);
273static void record_cond (tree, tree);
274static void record_const_or_copy (tree, tree);
275static void record_equality (tree, tree);
276static tree update_rhs_and_lookup_avail_expr (tree, tree, bool);
277static tree simplify_rhs_and_lookup_avail_expr (tree, int);
278static tree simplify_cond_and_lookup_avail_expr (tree, stmt_ann_t, int);
279static tree simplify_switch_and_lookup_avail_expr (tree, int);
280static tree find_equivalent_equality_comparison (tree);
281static void record_range (tree, basic_block);
282static bool extract_range_from_cond (tree, tree *, tree *, int *);
283static void record_equivalences_from_phis (basic_block);
284static void record_equivalences_from_incoming_edge (basic_block);
285static bool eliminate_redundant_computations (tree, stmt_ann_t);
286static void record_equivalences_from_stmt (tree, int, stmt_ann_t);
287static void thread_across_edge (struct dom_walk_data *, edge);
288static void dom_opt_finalize_block (struct dom_walk_data *, basic_block);
289static void dom_opt_initialize_block (struct dom_walk_data *, basic_block);
290static void propagate_to_outgoing_edges (struct dom_walk_data *, basic_block);
291static void remove_local_expressions_from_table (void);
292static void restore_vars_to_original_value (void);
293static edge single_incoming_edge_ignoring_loop_edges (basic_block);
294static void restore_nonzero_vars_to_original_value (void);
295static inline bool unsafe_associative_fp_binop (tree);
296
297
298/* Local version of fold that doesn't introduce cruft.  */
299
300static tree
301local_fold (tree t)
302{
303  t = fold (t);
304
305  /* Strip away useless type conversions.  Both the NON_LVALUE_EXPR that
306     may have been added by fold, and "useless" type conversions that might
307     now be apparent due to propagation.  */
308  STRIP_USELESS_TYPE_CONVERSION (t);
309
310  return t;
311}
312
313/* Allocate an EDGE_INFO for edge E and attach it to E.
314   Return the new EDGE_INFO structure.  */
315
316static struct edge_info *
317allocate_edge_info (edge e)
318{
319  struct edge_info *edge_info;
320
321  edge_info = xcalloc (1, sizeof (struct edge_info));
322
323  e->aux = edge_info;
324  return edge_info;
325}
326
327/* Free all EDGE_INFO structures associated with edges in the CFG.
328   If a particular edge can be threaded, copy the redirection
329   target from the EDGE_INFO structure into the edge's AUX field
330   as required by code to update the CFG and SSA graph for
331   jump threading.  */
332
333static void
334free_all_edge_infos (void)
335{
336  basic_block bb;
337  edge_iterator ei;
338  edge e;
339
340  FOR_EACH_BB (bb)
341    {
342      FOR_EACH_EDGE (e, ei, bb->preds)
343        {
344	 struct edge_info *edge_info = e->aux;
345
346	  if (edge_info)
347	    {
348	      e->aux = edge_info->redirection_target;
349	      if (edge_info->cond_equivalences)
350		free (edge_info->cond_equivalences);
351	      free (edge_info);
352	    }
353	}
354    }
355}
356
357/* Free an instance of vrp_hash_elt.  */
358
359static void
360vrp_free (void *data)
361{
362  struct vrp_hash_elt *elt = data;
363  struct VEC(vrp_element_p,heap) **vrp_elt = &elt->records;
364
365  VEC_free (vrp_element_p, heap, *vrp_elt);
366  free (elt);
367}
368
369/* Jump threading, redundancy elimination and const/copy propagation.
370
371   This pass may expose new symbols that need to be renamed into SSA.  For
372   every new symbol exposed, its corresponding bit will be set in
373   VARS_TO_RENAME.  */
374
375static void
376tree_ssa_dominator_optimize (void)
377{
378  struct dom_walk_data walk_data;
379  unsigned int i;
380  struct loops loops_info;
381
382  memset (&opt_stats, 0, sizeof (opt_stats));
383
384  /* Create our hash tables.  */
385  avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free);
386  vrp_data = htab_create (ceil_log2 (num_ssa_names), vrp_hash, vrp_eq,
387			  vrp_free);
388  avail_exprs_stack = VEC_alloc (tree, heap, 20);
389  const_and_copies_stack = VEC_alloc (tree, heap, 20);
390  nonzero_vars_stack = VEC_alloc (tree, heap, 20);
391  vrp_variables_stack = VEC_alloc (tree, heap, 20);
392  stmts_to_rescan = VEC_alloc (tree, heap, 20);
393  nonzero_vars = BITMAP_ALLOC (NULL);
394  threaded_blocks = BITMAP_ALLOC (NULL);
395  need_eh_cleanup = BITMAP_ALLOC (NULL);
396
397  /* Setup callbacks for the generic dominator tree walker.  */
398  walk_data.walk_stmts_backward = false;
399  walk_data.dom_direction = CDI_DOMINATORS;
400  walk_data.initialize_block_local_data = NULL;
401  walk_data.before_dom_children_before_stmts = dom_opt_initialize_block;
402  walk_data.before_dom_children_walk_stmts = optimize_stmt;
403  walk_data.before_dom_children_after_stmts = propagate_to_outgoing_edges;
404  walk_data.after_dom_children_before_stmts = NULL;
405  walk_data.after_dom_children_walk_stmts = NULL;
406  walk_data.after_dom_children_after_stmts = dom_opt_finalize_block;
407  /* Right now we only attach a dummy COND_EXPR to the global data pointer.
408     When we attach more stuff we'll need to fill this out with a real
409     structure.  */
410  walk_data.global_data = NULL;
411  walk_data.block_local_data_size = 0;
412  walk_data.interesting_blocks = NULL;
413
414  /* Now initialize the dominator walker.  */
415  init_walk_dominator_tree (&walk_data);
416
417  calculate_dominance_info (CDI_DOMINATORS);
418
419  /* We need to know which edges exit loops so that we can
420     aggressively thread through loop headers to an exit
421     edge.  */
422  flow_loops_find (&loops_info);
423  mark_loop_exit_edges (&loops_info);
424  flow_loops_free (&loops_info);
425
426  /* Clean up the CFG so that any forwarder blocks created by loop
427     canonicalization are removed.  */
428  cleanup_tree_cfg ();
429  calculate_dominance_info (CDI_DOMINATORS);
430
431  /* If we prove certain blocks are unreachable, then we want to
432     repeat the dominator optimization process as PHI nodes may
433     have turned into copies which allows better propagation of
434     values.  So we repeat until we do not identify any new unreachable
435     blocks.  */
436  do
437    {
438      /* Optimize the dominator tree.  */
439      cfg_altered = false;
440
441      /* We need accurate information regarding back edges in the CFG
442	 for jump threading.  */
443      mark_dfs_back_edges ();
444
445      /* Recursively walk the dominator tree optimizing statements.  */
446      walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
447
448      {
449	block_stmt_iterator bsi;
450	basic_block bb;
451	FOR_EACH_BB (bb)
452	  {
453	    for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
454	      {
455		update_stmt_if_modified (bsi_stmt (bsi));
456	      }
457	  }
458      }
459
460      /* If we exposed any new variables, go ahead and put them into
461	 SSA form now, before we handle jump threading.  This simplifies
462	 interactions between rewriting of _DECL nodes into SSA form
463	 and rewriting SSA_NAME nodes into SSA form after block
464	 duplication and CFG manipulation.  */
465      update_ssa (TODO_update_ssa);
466
467      free_all_edge_infos ();
468
469      /* Thread jumps, creating duplicate blocks as needed.  */
470      cfg_altered |= thread_through_all_blocks (threaded_blocks);
471
472      /* Removal of statements may make some EH edges dead.  Purge
473	 such edges from the CFG as needed.  */
474      if (!bitmap_empty_p (need_eh_cleanup))
475	{
476	  cfg_altered |= tree_purge_all_dead_eh_edges (need_eh_cleanup);
477	  bitmap_zero (need_eh_cleanup);
478	}
479
480      if (cfg_altered)
481        free_dominance_info (CDI_DOMINATORS);
482
483      /* Only iterate if we threaded jumps AND the CFG cleanup did
484	 something interesting.  Other cases generate far fewer
485	 optimization opportunities and thus are not worth another
486	 full DOM iteration.  */
487      cfg_altered &= cleanup_tree_cfg ();
488
489      if (rediscover_loops_after_threading)
490	{
491	  /* Rerun basic loop analysis to discover any newly
492	     created loops and update the set of exit edges.  */
493	  rediscover_loops_after_threading = false;
494	  flow_loops_find (&loops_info);
495	  mark_loop_exit_edges (&loops_info);
496	  flow_loops_free (&loops_info);
497
498	  /* Remove any forwarder blocks inserted by loop
499	     header canonicalization.  */
500	  cleanup_tree_cfg ();
501	}
502
503      calculate_dominance_info (CDI_DOMINATORS);
504
505      update_ssa (TODO_update_ssa);
506
507      /* Reinitialize the various tables.  */
508      bitmap_clear (nonzero_vars);
509      bitmap_clear (threaded_blocks);
510      htab_empty (avail_exprs);
511      htab_empty (vrp_data);
512
513      /* Finally, remove everything except invariants in SSA_NAME_VALUE.
514
515	 This must be done before we iterate as we might have a
516	 reference to an SSA_NAME which was removed by the call to
517	 update_ssa.
518
519	 Long term we will be able to let everything in SSA_NAME_VALUE
520	 persist.  However, for now, we know this is the safe thing to do.  */
521      for (i = 0; i < num_ssa_names; i++)
522	{
523	  tree name = ssa_name (i);
524	  tree value;
525
526	  if (!name)
527	    continue;
528
529	  value = SSA_NAME_VALUE (name);
530	  if (value && !is_gimple_min_invariant (value))
531	    SSA_NAME_VALUE (name) = NULL;
532	}
533
534      opt_stats.num_iterations++;
535    }
536  while (optimize > 1 && cfg_altered);
537
538  /* Debugging dumps.  */
539  if (dump_file && (dump_flags & TDF_STATS))
540    dump_dominator_optimization_stats (dump_file);
541
542  /* We emptied the hash table earlier, now delete it completely.  */
543  htab_delete (avail_exprs);
544  htab_delete (vrp_data);
545
546  /* It is not necessary to clear CURRDEFS, REDIRECTION_EDGES, VRP_DATA,
547     CONST_AND_COPIES, and NONZERO_VARS as they all get cleared at the bottom
548     of the do-while loop above.  */
549
550  /* And finalize the dominator walker.  */
551  fini_walk_dominator_tree (&walk_data);
552
553  /* Free nonzero_vars.  */
554  BITMAP_FREE (nonzero_vars);
555  BITMAP_FREE (threaded_blocks);
556  BITMAP_FREE (need_eh_cleanup);
557
558  VEC_free (tree, heap, avail_exprs_stack);
559  VEC_free (tree, heap, const_and_copies_stack);
560  VEC_free (tree, heap, nonzero_vars_stack);
561  VEC_free (tree, heap, vrp_variables_stack);
562  VEC_free (tree, heap, stmts_to_rescan);
563}
564
565static bool
566gate_dominator (void)
567{
568  return flag_tree_dom != 0;
569}
570
571struct tree_opt_pass pass_dominator =
572{
573  "dom",				/* name */
574  gate_dominator,			/* gate */
575  tree_ssa_dominator_optimize,		/* execute */
576  NULL,					/* sub */
577  NULL,					/* next */
578  0,					/* static_pass_number */
579  TV_TREE_SSA_DOMINATOR_OPTS,		/* tv_id */
580  PROP_cfg | PROP_ssa | PROP_alias,	/* properties_required */
581  0,					/* properties_provided */
582  0,					/* properties_destroyed */
583  0,					/* todo_flags_start */
584  TODO_dump_func
585    | TODO_update_ssa
586    | TODO_verify_ssa,			/* todo_flags_finish */
587  0					/* letter */
588};
589
590
591/* We are exiting E->src, see if E->dest ends with a conditional
592   jump which has a known value when reached via E.
593
594   Special care is necessary if E is a back edge in the CFG as we
595   will have already recorded equivalences for E->dest into our
596   various tables, including the result of the conditional at
597   the end of E->dest.  Threading opportunities are severely
598   limited in that case to avoid short-circuiting the loop
599   incorrectly.
600
601   Note it is quite common for the first block inside a loop to
602   end with a conditional which is either always true or always
603   false when reached via the loop backedge.  Thus we do not want
604   to blindly disable threading across a loop backedge.  */
605
606static void
607thread_across_edge (struct dom_walk_data *walk_data, edge e)
608{
609  block_stmt_iterator bsi;
610  tree stmt = NULL;
611  tree phi;
612  int stmt_count = 0;
613  int max_stmt_count;
614
615
616  /* If E->dest does not end with a conditional, then there is
617     nothing to do.  */
618  bsi = bsi_last (e->dest);
619  if (bsi_end_p (bsi)
620      || ! bsi_stmt (bsi)
621      || (TREE_CODE (bsi_stmt (bsi)) != COND_EXPR
622	  && TREE_CODE (bsi_stmt (bsi)) != GOTO_EXPR
623	  && TREE_CODE (bsi_stmt (bsi)) != SWITCH_EXPR))
624    return;
625
626  /* The basic idea here is to use whatever knowledge we have
627     from our dominator walk to simplify statements in E->dest,
628     with the ultimate goal being to simplify the conditional
629     at the end of E->dest.
630
631     Note that we must undo any changes we make to the underlying
632     statements as the simplifications we are making are control
633     flow sensitive (ie, the simplifications are valid when we
634     traverse E, but may not be valid on other paths to E->dest.  */
635
636  /* Each PHI creates a temporary equivalence, record them.  Again
637     these are context sensitive equivalences and will be removed
638     by our caller.  */
639  for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
640    {
641      tree src = PHI_ARG_DEF_FROM_EDGE (phi, e);
642      tree dst = PHI_RESULT (phi);
643
644      /* Do not include virtual PHIs in our statement count as
645	 they never generate code.  */
646      if (is_gimple_reg (dst))
647	stmt_count++;
648
649      /* If the desired argument is not the same as this PHI's result
650	 and it is set by a PHI in E->dest, then we can not thread
651	 through E->dest.  */
652      if (src != dst
653	  && TREE_CODE (src) == SSA_NAME
654	  && TREE_CODE (SSA_NAME_DEF_STMT (src)) == PHI_NODE
655	  && bb_for_stmt (SSA_NAME_DEF_STMT (src)) == e->dest)
656	return;
657
658      record_const_or_copy (dst, src);
659    }
660
661  /* Try to simplify each statement in E->dest, ultimately leading to
662     a simplification of the COND_EXPR at the end of E->dest.
663
664     We might consider marking just those statements which ultimately
665     feed the COND_EXPR.  It's not clear if the overhead of bookkeeping
666     would be recovered by trying to simplify fewer statements.
667
668     If we are able to simplify a statement into the form
669     SSA_NAME = (SSA_NAME | gimple invariant), then we can record
670     a context sensitive equivalency which may help us simplify
671     later statements in E->dest.
672
673     Failure to simplify into the form above merely means that the
674     statement provides no equivalences to help simplify later
675     statements.  This does not prevent threading through E->dest.  */
676  max_stmt_count = PARAM_VALUE (PARAM_MAX_JUMP_THREAD_DUPLICATION_STMTS);
677  for (bsi = bsi_start (e->dest); ! bsi_end_p (bsi); bsi_next (&bsi))
678    {
679      tree cached_lhs = NULL;
680
681      stmt = bsi_stmt (bsi);
682
683      /* Ignore empty statements and labels.  */
684      if (IS_EMPTY_STMT (stmt) || TREE_CODE (stmt) == LABEL_EXPR)
685	continue;
686
687      /* If duplicating this block is going to cause too much code
688	 expansion, then do not thread through this block.  */
689      stmt_count++;
690      if (stmt_count > max_stmt_count)
691	return;
692
693      /* Safely handle threading across loop backedges.  This is
694	 over conservative, but still allows us to capture the
695	 majority of the cases where we can thread across a loop
696	 backedge.  */
697      if ((e->flags & EDGE_DFS_BACK) != 0
698	  && TREE_CODE (stmt) != COND_EXPR
699	  && TREE_CODE (stmt) != SWITCH_EXPR)
700	return;
701
702      /* If the statement has volatile operands, then we assume we
703	 can not thread through this block.  This is overly
704	 conservative in some ways.  */
705      if (TREE_CODE (stmt) == ASM_EXPR && ASM_VOLATILE_P (stmt))
706	return;
707
708      /* If this is not a MODIFY_EXPR which sets an SSA_NAME to a new
709	 value, then do not try to simplify this statement as it will
710	 not simplify in any way that is helpful for jump threading.  */
711      if (TREE_CODE (stmt) != MODIFY_EXPR
712	  || TREE_CODE (TREE_OPERAND (stmt, 0)) != SSA_NAME)
713	continue;
714
715      /* At this point we have a statement which assigns an RHS to an
716	 SSA_VAR on the LHS.  We want to try and simplify this statement
717	 to expose more context sensitive equivalences which in turn may
718	 allow us to simplify the condition at the end of the loop.  */
719      if (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME)
720	cached_lhs = TREE_OPERAND (stmt, 1);
721      else
722	{
723	  /* Copy the operands.  */
724	  tree *copy, pre_fold_expr;
725	  ssa_op_iter iter;
726	  use_operand_p use_p;
727	  unsigned int num, i = 0;
728
729	  num = NUM_SSA_OPERANDS (stmt, (SSA_OP_USE | SSA_OP_VUSE));
730	  copy = xcalloc (num, sizeof (tree));
731
732	  /* Make a copy of the uses & vuses into USES_COPY, then cprop into
733	     the operands.  */
734	  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
735	    {
736	      tree tmp = NULL;
737	      tree use = USE_FROM_PTR (use_p);
738
739	      copy[i++] = use;
740	      if (TREE_CODE (use) == SSA_NAME)
741		tmp = SSA_NAME_VALUE (use);
742	      if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
743		SET_USE (use_p, tmp);
744	    }
745
746	  /* Try to fold/lookup the new expression.  Inserting the
747	     expression into the hash table is unlikely to help
748	     Sadly, we have to handle conditional assignments specially
749	     here, because fold expects all the operands of an expression
750	     to be folded before the expression itself is folded, but we
751	     can't just substitute the folded condition here.  */
752	  if (TREE_CODE (TREE_OPERAND (stmt, 1)) == COND_EXPR)
753	    {
754	      tree cond = COND_EXPR_COND (TREE_OPERAND (stmt, 1));
755	      cond = fold (cond);
756	      if (cond == boolean_true_node)
757		pre_fold_expr = COND_EXPR_THEN (TREE_OPERAND (stmt, 1));
758	      else if (cond == boolean_false_node)
759		pre_fold_expr = COND_EXPR_ELSE (TREE_OPERAND (stmt, 1));
760	      else
761		pre_fold_expr = TREE_OPERAND (stmt, 1);
762	    }
763	  else
764	    pre_fold_expr = TREE_OPERAND (stmt, 1);
765
766	  if (pre_fold_expr)
767	    {
768	      cached_lhs = fold (pre_fold_expr);
769	      if (TREE_CODE (cached_lhs) != SSA_NAME
770		  && !is_gimple_min_invariant (cached_lhs))
771	        cached_lhs = lookup_avail_expr (stmt, false);
772	    }
773
774	  /* Restore the statement's original uses/defs.  */
775	  i = 0;
776	  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
777	    SET_USE (use_p, copy[i++]);
778
779	  free (copy);
780	}
781
782      /* Record the context sensitive equivalence if we were able
783	 to simplify this statement.  */
784      if (cached_lhs
785	  && (TREE_CODE (cached_lhs) == SSA_NAME
786	      || is_gimple_min_invariant (cached_lhs)))
787	record_const_or_copy (TREE_OPERAND (stmt, 0), cached_lhs);
788    }
789
790  /* If we stopped at a COND_EXPR or SWITCH_EXPR, see if we know which arm
791     will be taken.  */
792  if (stmt
793      && (TREE_CODE (stmt) == COND_EXPR
794	  || TREE_CODE (stmt) == GOTO_EXPR
795	  || TREE_CODE (stmt) == SWITCH_EXPR))
796    {
797      tree cond, cached_lhs;
798
799      /* Now temporarily cprop the operands and try to find the resulting
800	 expression in the hash tables.  */
801      if (TREE_CODE (stmt) == COND_EXPR)
802	cond = COND_EXPR_COND (stmt);
803      else if (TREE_CODE (stmt) == GOTO_EXPR)
804	cond = GOTO_DESTINATION (stmt);
805      else
806	cond = SWITCH_COND (stmt);
807
808      if (COMPARISON_CLASS_P (cond))
809	{
810	  tree dummy_cond, op0, op1;
811	  enum tree_code cond_code;
812
813	  op0 = TREE_OPERAND (cond, 0);
814	  op1 = TREE_OPERAND (cond, 1);
815	  cond_code = TREE_CODE (cond);
816
817	  /* Get the current value of both operands.  */
818	  if (TREE_CODE (op0) == SSA_NAME)
819	    {
820	      tree tmp = SSA_NAME_VALUE (op0);
821	      if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
822		op0 = tmp;
823	    }
824
825	  if (TREE_CODE (op1) == SSA_NAME)
826	    {
827	      tree tmp = SSA_NAME_VALUE (op1);
828	      if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
829		op1 = tmp;
830	    }
831
832	  /* Stuff the operator and operands into our dummy conditional
833	     expression, creating the dummy conditional if necessary.  */
834	  dummy_cond = walk_data->global_data;
835	  if (! dummy_cond)
836	    {
837	      dummy_cond = build (cond_code, boolean_type_node, op0, op1);
838	      dummy_cond = build (COND_EXPR, void_type_node,
839				  dummy_cond, NULL, NULL);
840	      walk_data->global_data = dummy_cond;
841	    }
842	  else
843	    {
844	      TREE_SET_CODE (COND_EXPR_COND (dummy_cond), cond_code);
845	      TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op0;
846	      TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1) = op1;
847	    }
848
849	  /* If the conditional folds to an invariant, then we are done,
850	     otherwise look it up in the hash tables.  */
851	  cached_lhs = local_fold (COND_EXPR_COND (dummy_cond));
852	  if (! is_gimple_min_invariant (cached_lhs))
853	    {
854	      cached_lhs = lookup_avail_expr (dummy_cond, false);
855	      if (!cached_lhs || ! is_gimple_min_invariant (cached_lhs))
856		cached_lhs = simplify_cond_and_lookup_avail_expr (dummy_cond,
857								  NULL,
858								  false);
859	    }
860	}
861      /* We can have conditionals which just test the state of a
862	 variable rather than use a relational operator.  These are
863	 simpler to handle.  */
864      else if (TREE_CODE (cond) == SSA_NAME)
865	{
866	  cached_lhs = cond;
867	  cached_lhs = SSA_NAME_VALUE (cached_lhs);
868	  if (cached_lhs && ! is_gimple_min_invariant (cached_lhs))
869	    cached_lhs = NULL;
870	}
871      else
872	cached_lhs = lookup_avail_expr (stmt, false);
873
874      if (cached_lhs)
875	{
876	  edge taken_edge = find_taken_edge (e->dest, cached_lhs);
877	  basic_block dest = (taken_edge ? taken_edge->dest : NULL);
878
879	  if (dest == e->dest)
880	    return;
881
882	  /* If we have a known destination for the conditional, then
883	     we can perform this optimization, which saves at least one
884	     conditional jump each time it applies since we get to
885	     bypass the conditional at our original destination.  */
886	  if (dest)
887	    {
888	      struct edge_info *edge_info;
889
890	      if (e->aux)
891		edge_info = e->aux;
892	      else
893		edge_info = allocate_edge_info (e);
894	      edge_info->redirection_target = taken_edge;
895	      bitmap_set_bit (threaded_blocks, e->dest->index);
896	    }
897	}
898    }
899}
900
901
902/* Initialize local stacks for this optimizer and record equivalences
903   upon entry to BB.  Equivalences can come from the edge traversed to
904   reach BB or they may come from PHI nodes at the start of BB.  */
905
906static void
907dom_opt_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
908			  basic_block bb)
909{
910  if (dump_file && (dump_flags & TDF_DETAILS))
911    fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
912
913  /* Push a marker on the stacks of local information so that we know how
914     far to unwind when we finalize this block.  */
915  VEC_safe_push (tree, heap, avail_exprs_stack, NULL_TREE);
916  VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
917  VEC_safe_push (tree, heap, nonzero_vars_stack, NULL_TREE);
918  VEC_safe_push (tree, heap, vrp_variables_stack, NULL_TREE);
919
920  record_equivalences_from_incoming_edge (bb);
921
922  /* PHI nodes can create equivalences too.  */
923  record_equivalences_from_phis (bb);
924}
925
926/* Given an expression EXPR (a relational expression or a statement),
927   initialize the hash table element pointed to by ELEMENT.  */
928
929static void
930initialize_hash_element (tree expr, tree lhs, struct expr_hash_elt *element)
931{
932  /* Hash table elements may be based on conditional expressions or statements.
933
934     For the former case, we have no annotation and we want to hash the
935     conditional expression.  In the latter case we have an annotation and
936     we want to record the expression the statement evaluates.  */
937  if (COMPARISON_CLASS_P (expr) || TREE_CODE (expr) == TRUTH_NOT_EXPR)
938    {
939      element->stmt = NULL;
940      element->rhs = expr;
941    }
942  else if (TREE_CODE (expr) == COND_EXPR)
943    {
944      element->stmt = expr;
945      element->rhs = COND_EXPR_COND (expr);
946    }
947  else if (TREE_CODE (expr) == SWITCH_EXPR)
948    {
949      element->stmt = expr;
950      element->rhs = SWITCH_COND (expr);
951    }
952  else if (TREE_CODE (expr) == RETURN_EXPR && TREE_OPERAND (expr, 0))
953    {
954      element->stmt = expr;
955      element->rhs = TREE_OPERAND (TREE_OPERAND (expr, 0), 1);
956    }
957  else if (TREE_CODE (expr) == GOTO_EXPR)
958    {
959      element->stmt = expr;
960      element->rhs = GOTO_DESTINATION (expr);
961    }
962  else
963    {
964      element->stmt = expr;
965      element->rhs = TREE_OPERAND (expr, 1);
966    }
967
968  element->lhs = lhs;
969  element->hash = avail_expr_hash (element);
970}
971
972/* Remove all the expressions in LOCALS from TABLE, stopping when there are
973   LIMIT entries left in LOCALs.  */
974
975static void
976remove_local_expressions_from_table (void)
977{
978  /* Remove all the expressions made available in this block.  */
979  while (VEC_length (tree, avail_exprs_stack) > 0)
980    {
981      struct expr_hash_elt element;
982      tree expr = VEC_pop (tree, avail_exprs_stack);
983
984      if (expr == NULL_TREE)
985	break;
986
987      initialize_hash_element (expr, NULL, &element);
988      htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
989    }
990}
991
992/* Use the SSA_NAMES in LOCALS to restore TABLE to its original
993   state, stopping when there are LIMIT entries left in LOCALs.  */
994
995static void
996restore_nonzero_vars_to_original_value (void)
997{
998  while (VEC_length (tree, nonzero_vars_stack) > 0)
999    {
1000      tree name = VEC_pop (tree, nonzero_vars_stack);
1001
1002      if (name == NULL)
1003	break;
1004
1005      bitmap_clear_bit (nonzero_vars, SSA_NAME_VERSION (name));
1006    }
1007}
1008
1009/* Use the source/dest pairs in CONST_AND_COPIES_STACK to restore
1010   CONST_AND_COPIES to its original state, stopping when we hit a
1011   NULL marker.  */
1012
1013static void
1014restore_vars_to_original_value (void)
1015{
1016  while (VEC_length (tree, const_and_copies_stack) > 0)
1017    {
1018      tree prev_value, dest;
1019
1020      dest = VEC_pop (tree, const_and_copies_stack);
1021
1022      if (dest == NULL)
1023	break;
1024
1025      prev_value = VEC_pop (tree, const_and_copies_stack);
1026      SSA_NAME_VALUE (dest) =  prev_value;
1027    }
1028}
1029
1030/* We have finished processing the dominator children of BB, perform
1031   any finalization actions in preparation for leaving this node in
1032   the dominator tree.  */
1033
1034static void
1035dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
1036{
1037  tree last;
1038
1039  /* If we have an outgoing edge to a block with multiple incoming and
1040     outgoing edges, then we may be able to thread the edge.  ie, we
1041     may be able to statically determine which of the outgoing edges
1042     will be traversed when the incoming edge from BB is traversed.  */
1043  if (single_succ_p (bb)
1044      && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
1045      && !single_pred_p (single_succ (bb))
1046      && !single_succ_p (single_succ (bb)))
1047
1048    {
1049      thread_across_edge (walk_data, single_succ_edge (bb));
1050    }
1051  else if ((last = last_stmt (bb))
1052	   && TREE_CODE (last) == COND_EXPR
1053	   && (COMPARISON_CLASS_P (COND_EXPR_COND (last))
1054	       || TREE_CODE (COND_EXPR_COND (last)) == SSA_NAME)
1055	   && EDGE_COUNT (bb->succs) == 2
1056	   && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
1057	   && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
1058    {
1059      edge true_edge, false_edge;
1060
1061      extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1062
1063      /* Only try to thread the edge if it reaches a target block with
1064	 more than one predecessor and more than one successor.  */
1065      if (!single_pred_p (true_edge->dest) && !single_succ_p (true_edge->dest))
1066	{
1067	  struct edge_info *edge_info;
1068	  unsigned int i;
1069
1070	  /* Push a marker onto the available expression stack so that we
1071	     unwind any expressions related to the TRUE arm before processing
1072	     the false arm below.  */
1073	  VEC_safe_push (tree, heap, avail_exprs_stack, NULL_TREE);
1074	  VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
1075
1076	  edge_info = true_edge->aux;
1077
1078	  /* If we have info associated with this edge, record it into
1079	     our equivalency tables.  */
1080	  if (edge_info)
1081	    {
1082	      tree *cond_equivalences = edge_info->cond_equivalences;
1083	      tree lhs = edge_info->lhs;
1084	      tree rhs = edge_info->rhs;
1085
1086	      /* If we have a simple NAME = VALUE equivalency record it.  */
1087	      if (lhs && TREE_CODE (lhs) == SSA_NAME)
1088		record_const_or_copy (lhs, rhs);
1089
1090	      /* If we have 0 = COND or 1 = COND equivalences, record them
1091		 into our expression hash tables.  */
1092	      if (cond_equivalences)
1093		for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
1094		  {
1095		    tree expr = cond_equivalences[i];
1096		    tree value = cond_equivalences[i + 1];
1097
1098		    record_cond (expr, value);
1099		  }
1100	    }
1101
1102	  /* Now thread the edge.  */
1103	  thread_across_edge (walk_data, true_edge);
1104
1105	  /* And restore the various tables to their state before
1106	     we threaded this edge.  */
1107	  remove_local_expressions_from_table ();
1108	  restore_vars_to_original_value ();
1109	}
1110
1111      /* Similarly for the ELSE arm.  */
1112      if (!single_pred_p (false_edge->dest) && !single_succ_p (false_edge->dest))
1113	{
1114	  struct edge_info *edge_info;
1115	  unsigned int i;
1116
1117	  edge_info = false_edge->aux;
1118
1119	  /* If we have info associated with this edge, record it into
1120	     our equivalency tables.  */
1121	  if (edge_info)
1122	    {
1123	      tree *cond_equivalences = edge_info->cond_equivalences;
1124	      tree lhs = edge_info->lhs;
1125	      tree rhs = edge_info->rhs;
1126
1127	      /* If we have a simple NAME = VALUE equivalency record it.  */
1128	      if (lhs && TREE_CODE (lhs) == SSA_NAME)
1129		record_const_or_copy (lhs, rhs);
1130
1131	      /* If we have 0 = COND or 1 = COND equivalences, record them
1132		 into our expression hash tables.  */
1133	      if (cond_equivalences)
1134		for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
1135		  {
1136		    tree expr = cond_equivalences[i];
1137		    tree value = cond_equivalences[i + 1];
1138
1139		    record_cond (expr, value);
1140		  }
1141	    }
1142
1143	  thread_across_edge (walk_data, false_edge);
1144
1145	  /* No need to remove local expressions from our tables
1146	     or restore vars to their original value as that will
1147	     be done immediately below.  */
1148	}
1149    }
1150
1151  remove_local_expressions_from_table ();
1152  restore_nonzero_vars_to_original_value ();
1153  restore_vars_to_original_value ();
1154
1155  /* Remove VRP records associated with this basic block.  They are no
1156     longer valid.
1157
1158     To be efficient, we note which variables have had their values
1159     constrained in this block.  So walk over each variable in the
1160     VRP_VARIABLEs array.  */
1161  while (VEC_length (tree, vrp_variables_stack) > 0)
1162    {
1163      tree var = VEC_pop (tree, vrp_variables_stack);
1164      struct vrp_hash_elt vrp_hash_elt, *vrp_hash_elt_p;
1165      void **slot;
1166
1167      /* Each variable has a stack of value range records.  We want to
1168	 invalidate those associated with our basic block.  So we walk
1169	 the array backwards popping off records associated with our
1170	 block.  Once we hit a record not associated with our block
1171	 we are done.  */
1172      VEC(vrp_element_p,heap) **var_vrp_records;
1173
1174      if (var == NULL)
1175	break;
1176
1177      vrp_hash_elt.var = var;
1178      vrp_hash_elt.records = NULL;
1179
1180      slot = htab_find_slot (vrp_data, &vrp_hash_elt, NO_INSERT);
1181
1182      vrp_hash_elt_p = (struct vrp_hash_elt *) *slot;
1183      var_vrp_records = &vrp_hash_elt_p->records;
1184
1185      while (VEC_length (vrp_element_p, *var_vrp_records) > 0)
1186	{
1187	  struct vrp_element *element
1188	    = VEC_last (vrp_element_p, *var_vrp_records);
1189
1190	  if (element->bb != bb)
1191	    break;
1192
1193	  VEC_pop (vrp_element_p, *var_vrp_records);
1194	}
1195    }
1196
1197  /* If we queued any statements to rescan in this block, then
1198     go ahead and rescan them now.  */
1199  while (VEC_length (tree, stmts_to_rescan) > 0)
1200    {
1201      tree stmt = VEC_last (tree, stmts_to_rescan);
1202      basic_block stmt_bb = bb_for_stmt (stmt);
1203
1204      if (stmt_bb != bb)
1205	break;
1206
1207      VEC_pop (tree, stmts_to_rescan);
1208      mark_new_vars_to_rename (stmt);
1209    }
1210}
1211
1212/* PHI nodes can create equivalences too.
1213
1214   Ignoring any alternatives which are the same as the result, if
1215   all the alternatives are equal, then the PHI node creates an
1216   equivalence.
1217
1218   Additionally, if all the PHI alternatives are known to have a nonzero
1219   value, then the result of this PHI is known to have a nonzero value,
1220   even if we do not know its exact value.  */
1221
1222static void
1223record_equivalences_from_phis (basic_block bb)
1224{
1225  tree phi;
1226
1227  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1228    {
1229      tree lhs = PHI_RESULT (phi);
1230      tree rhs = NULL;
1231      int i;
1232
1233      for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1234	{
1235	  tree t = PHI_ARG_DEF (phi, i);
1236
1237	  /* Ignore alternatives which are the same as our LHS.  Since
1238	     LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
1239	     can simply compare pointers.  */
1240	  if (lhs == t)
1241	    continue;
1242
1243	  /* If we have not processed an alternative yet, then set
1244	     RHS to this alternative.  */
1245	  if (rhs == NULL)
1246	    rhs = t;
1247	  /* If we have processed an alternative (stored in RHS), then
1248	     see if it is equal to this one.  If it isn't, then stop
1249	     the search.  */
1250	  else if (! operand_equal_for_phi_arg_p (rhs, t))
1251	    break;
1252	}
1253
1254      /* If we had no interesting alternatives, then all the RHS alternatives
1255	 must have been the same as LHS.  */
1256      if (!rhs)
1257	rhs = lhs;
1258
1259      /* If we managed to iterate through each PHI alternative without
1260	 breaking out of the loop, then we have a PHI which may create
1261	 a useful equivalence.  We do not need to record unwind data for
1262	 this, since this is a true assignment and not an equivalence
1263	 inferred from a comparison.  All uses of this ssa name are dominated
1264	 by this assignment, so unwinding just costs time and space.  */
1265      if (i == PHI_NUM_ARGS (phi)
1266	  && may_propagate_copy (lhs, rhs))
1267	SSA_NAME_VALUE (lhs) = rhs;
1268
1269      /* Now see if we know anything about the nonzero property for the
1270	 result of this PHI.  */
1271      for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1272	{
1273	  if (!PHI_ARG_NONZERO (phi, i))
1274	    break;
1275	}
1276
1277      if (i == PHI_NUM_ARGS (phi))
1278	bitmap_set_bit (nonzero_vars, SSA_NAME_VERSION (PHI_RESULT (phi)));
1279    }
1280}
1281
1282/* Ignoring loop backedges, if BB has precisely one incoming edge then
1283   return that edge.  Otherwise return NULL.  */
1284static edge
1285single_incoming_edge_ignoring_loop_edges (basic_block bb)
1286{
1287  edge retval = NULL;
1288  edge e;
1289  edge_iterator ei;
1290
1291  FOR_EACH_EDGE (e, ei, bb->preds)
1292    {
1293      /* A loop back edge can be identified by the destination of
1294	 the edge dominating the source of the edge.  */
1295      if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1296	continue;
1297
1298      /* If we have already seen a non-loop edge, then we must have
1299	 multiple incoming non-loop edges and thus we return NULL.  */
1300      if (retval)
1301	return NULL;
1302
1303      /* This is the first non-loop incoming edge we have found.  Record
1304	 it.  */
1305      retval = e;
1306    }
1307
1308  return retval;
1309}
1310
1311/* Record any equivalences created by the incoming edge to BB.  If BB
1312   has more than one incoming edge, then no equivalence is created.  */
1313
1314static void
1315record_equivalences_from_incoming_edge (basic_block bb)
1316{
1317  edge e;
1318  basic_block parent;
1319  struct edge_info *edge_info;
1320
1321  /* If our parent block ended with a control statement, then we may be
1322     able to record some equivalences based on which outgoing edge from
1323     the parent was followed.  */
1324  parent = get_immediate_dominator (CDI_DOMINATORS, bb);
1325
1326  e = single_incoming_edge_ignoring_loop_edges (bb);
1327
1328  /* If we had a single incoming edge from our parent block, then enter
1329     any data associated with the edge into our tables.  */
1330  if (e && e->src == parent)
1331    {
1332      unsigned int i;
1333
1334      edge_info = e->aux;
1335
1336      if (edge_info)
1337	{
1338	  tree lhs = edge_info->lhs;
1339	  tree rhs = edge_info->rhs;
1340	  tree *cond_equivalences = edge_info->cond_equivalences;
1341
1342	  if (lhs)
1343	    record_equality (lhs, rhs);
1344
1345	  if (cond_equivalences)
1346	    {
1347	      bool recorded_range = false;
1348	      for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
1349		{
1350		  tree expr = cond_equivalences[i];
1351		  tree value = cond_equivalences[i + 1];
1352
1353		  record_cond (expr, value);
1354
1355		  /* For the first true equivalence, record range
1356		     information.  We only do this for the first
1357		     true equivalence as it should dominate any
1358		     later true equivalences.  */
1359		  if (! recorded_range
1360		      && COMPARISON_CLASS_P (expr)
1361		      && value == boolean_true_node
1362		      && TREE_CONSTANT (TREE_OPERAND (expr, 1)))
1363		    {
1364		      record_range (expr, bb);
1365		      recorded_range = true;
1366		    }
1367		}
1368	    }
1369	}
1370    }
1371}
1372
1373/* Dump SSA statistics on FILE.  */
1374
1375void
1376dump_dominator_optimization_stats (FILE *file)
1377{
1378  long n_exprs;
1379
1380  fprintf (file, "Total number of statements:                   %6ld\n\n",
1381	   opt_stats.num_stmts);
1382  fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1383           opt_stats.num_exprs_considered);
1384
1385  n_exprs = opt_stats.num_exprs_considered;
1386  if (n_exprs == 0)
1387    n_exprs = 1;
1388
1389  fprintf (file, "    Redundant expressions eliminated:         %6ld (%.0f%%)\n",
1390	   opt_stats.num_re, PERCENT (opt_stats.num_re,
1391				      n_exprs));
1392  fprintf (file, "    Constants propagated:                     %6ld\n",
1393	   opt_stats.num_const_prop);
1394  fprintf (file, "    Copies propagated:                        %6ld\n",
1395	   opt_stats.num_copy_prop);
1396
1397  fprintf (file, "\nTotal number of DOM iterations:             %6ld\n",
1398	   opt_stats.num_iterations);
1399
1400  fprintf (file, "\nHash table statistics:\n");
1401
1402  fprintf (file, "    avail_exprs: ");
1403  htab_statistics (file, avail_exprs);
1404}
1405
1406
1407/* Dump SSA statistics on stderr.  */
1408
1409void
1410debug_dominator_optimization_stats (void)
1411{
1412  dump_dominator_optimization_stats (stderr);
1413}
1414
1415
1416/* Dump statistics for the hash table HTAB.  */
1417
1418static void
1419htab_statistics (FILE *file, htab_t htab)
1420{
1421  fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1422	   (long) htab_size (htab),
1423	   (long) htab_elements (htab),
1424	   htab_collisions (htab));
1425}
1426
1427/* Record the fact that VAR has a nonzero value, though we may not know
1428   its exact value.  Note that if VAR is already known to have a nonzero
1429   value, then we do nothing.  */
1430
1431static void
1432record_var_is_nonzero (tree var)
1433{
1434  int indx = SSA_NAME_VERSION (var);
1435
1436  if (bitmap_bit_p (nonzero_vars, indx))
1437    return;
1438
1439  /* Mark it in the global table.  */
1440  bitmap_set_bit (nonzero_vars, indx);
1441
1442  /* Record this SSA_NAME so that we can reset the global table
1443     when we leave this block.  */
1444  VEC_safe_push (tree, heap, nonzero_vars_stack, var);
1445}
1446
1447/* Enter a statement into the true/false expression hash table indicating
1448   that the condition COND has the value VALUE.  */
1449
1450static void
1451record_cond (tree cond, tree value)
1452{
1453  struct expr_hash_elt *element = xmalloc (sizeof (struct expr_hash_elt));
1454  void **slot;
1455
1456  initialize_hash_element (cond, value, element);
1457
1458  slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
1459				   element->hash, INSERT);
1460  if (*slot == NULL)
1461    {
1462      *slot = (void *) element;
1463      VEC_safe_push (tree, heap, avail_exprs_stack, cond);
1464    }
1465  else
1466    free (element);
1467}
1468
1469/* Build a new conditional using NEW_CODE, OP0 and OP1 and store
1470   the new conditional into *p, then store a boolean_true_node
1471   into *(p + 1).  */
1472
1473static void
1474build_and_record_new_cond (enum tree_code new_code, tree op0, tree op1, tree *p)
1475{
1476  *p = build2 (new_code, boolean_type_node, op0, op1);
1477  p++;
1478  *p = boolean_true_node;
1479}
1480
1481/* Record that COND is true and INVERTED is false into the edge information
1482   structure.  Also record that any conditions dominated by COND are true
1483   as well.
1484
1485   For example, if a < b is true, then a <= b must also be true.  */
1486
1487static void
1488record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
1489{
1490  tree op0, op1;
1491
1492  if (!COMPARISON_CLASS_P (cond))
1493    return;
1494
1495  op0 = TREE_OPERAND (cond, 0);
1496  op1 = TREE_OPERAND (cond, 1);
1497
1498  switch (TREE_CODE (cond))
1499    {
1500    case LT_EXPR:
1501    case GT_EXPR:
1502      edge_info->max_cond_equivalences = 12;
1503      edge_info->cond_equivalences = xmalloc (12 * sizeof (tree));
1504      build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1505				  ? LE_EXPR : GE_EXPR),
1506				 op0, op1, &edge_info->cond_equivalences[4]);
1507      build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1508				 &edge_info->cond_equivalences[6]);
1509      build_and_record_new_cond (NE_EXPR, op0, op1,
1510				 &edge_info->cond_equivalences[8]);
1511      build_and_record_new_cond (LTGT_EXPR, op0, op1,
1512				 &edge_info->cond_equivalences[10]);
1513      break;
1514
1515    case GE_EXPR:
1516    case LE_EXPR:
1517      edge_info->max_cond_equivalences = 6;
1518      edge_info->cond_equivalences = xmalloc (6 * sizeof (tree));
1519      build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1520				 &edge_info->cond_equivalences[4]);
1521      break;
1522
1523    case EQ_EXPR:
1524      edge_info->max_cond_equivalences = 10;
1525      edge_info->cond_equivalences = xmalloc (10 * sizeof (tree));
1526      build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1527				 &edge_info->cond_equivalences[4]);
1528      build_and_record_new_cond (LE_EXPR, op0, op1,
1529				 &edge_info->cond_equivalences[6]);
1530      build_and_record_new_cond (GE_EXPR, op0, op1,
1531				 &edge_info->cond_equivalences[8]);
1532      break;
1533
1534    case UNORDERED_EXPR:
1535      edge_info->max_cond_equivalences = 16;
1536      edge_info->cond_equivalences = xmalloc (16 * sizeof (tree));
1537      build_and_record_new_cond (NE_EXPR, op0, op1,
1538				 &edge_info->cond_equivalences[4]);
1539      build_and_record_new_cond (UNLE_EXPR, op0, op1,
1540				 &edge_info->cond_equivalences[6]);
1541      build_and_record_new_cond (UNGE_EXPR, op0, op1,
1542				 &edge_info->cond_equivalences[8]);
1543      build_and_record_new_cond (UNEQ_EXPR, op0, op1,
1544				 &edge_info->cond_equivalences[10]);
1545      build_and_record_new_cond (UNLT_EXPR, op0, op1,
1546				 &edge_info->cond_equivalences[12]);
1547      build_and_record_new_cond (UNGT_EXPR, op0, op1,
1548				 &edge_info->cond_equivalences[14]);
1549      break;
1550
1551    case UNLT_EXPR:
1552    case UNGT_EXPR:
1553      edge_info->max_cond_equivalences = 8;
1554      edge_info->cond_equivalences = xmalloc (8 * sizeof (tree));
1555      build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1556				  ? UNLE_EXPR : UNGE_EXPR),
1557				 op0, op1, &edge_info->cond_equivalences[4]);
1558      build_and_record_new_cond (NE_EXPR, op0, op1,
1559				 &edge_info->cond_equivalences[6]);
1560      break;
1561
1562    case UNEQ_EXPR:
1563      edge_info->max_cond_equivalences = 8;
1564      edge_info->cond_equivalences = xmalloc (8 * sizeof (tree));
1565      build_and_record_new_cond (UNLE_EXPR, op0, op1,
1566				 &edge_info->cond_equivalences[4]);
1567      build_and_record_new_cond (UNGE_EXPR, op0, op1,
1568				 &edge_info->cond_equivalences[6]);
1569      break;
1570
1571    case LTGT_EXPR:
1572      edge_info->max_cond_equivalences = 8;
1573      edge_info->cond_equivalences = xmalloc (8 * sizeof (tree));
1574      build_and_record_new_cond (NE_EXPR, op0, op1,
1575				 &edge_info->cond_equivalences[4]);
1576      build_and_record_new_cond (ORDERED_EXPR, op0, op1,
1577				 &edge_info->cond_equivalences[6]);
1578      break;
1579
1580    default:
1581      edge_info->max_cond_equivalences = 4;
1582      edge_info->cond_equivalences = xmalloc (4 * sizeof (tree));
1583      break;
1584    }
1585
1586  /* Now store the original true and false conditions into the first
1587     two slots.  */
1588  edge_info->cond_equivalences[0] = cond;
1589  edge_info->cond_equivalences[1] = boolean_true_node;
1590  edge_info->cond_equivalences[2] = inverted;
1591  edge_info->cond_equivalences[3] = boolean_false_node;
1592}
1593
1594/* A helper function for record_const_or_copy and record_equality.
1595   Do the work of recording the value and undo info.  */
1596
1597static void
1598record_const_or_copy_1 (tree x, tree y, tree prev_x)
1599{
1600  SSA_NAME_VALUE (x) = y;
1601
1602  VEC_reserve (tree, heap, const_and_copies_stack, 2);
1603  VEC_quick_push (tree, const_and_copies_stack, prev_x);
1604  VEC_quick_push (tree, const_and_copies_stack, x);
1605}
1606
1607
1608/* Return the loop depth of the basic block of the defining statement of X.
1609   This number should not be treated as absolutely correct because the loop
1610   information may not be completely up-to-date when dom runs.  However, it
1611   will be relatively correct, and as more passes are taught to keep loop info
1612   up to date, the result will become more and more accurate.  */
1613
1614int
1615loop_depth_of_name (tree x)
1616{
1617  tree defstmt;
1618  basic_block defbb;
1619
1620  /* If it's not an SSA_NAME, we have no clue where the definition is.  */
1621  if (TREE_CODE (x) != SSA_NAME)
1622    return 0;
1623
1624  /* Otherwise return the loop depth of the defining statement's bb.
1625     Note that there may not actually be a bb for this statement, if the
1626     ssa_name is live on entry.  */
1627  defstmt = SSA_NAME_DEF_STMT (x);
1628  defbb = bb_for_stmt (defstmt);
1629  if (!defbb)
1630    return 0;
1631
1632  return defbb->loop_depth;
1633}
1634
1635
1636/* Record that X is equal to Y in const_and_copies.  Record undo
1637   information in the block-local vector.  */
1638
1639static void
1640record_const_or_copy (tree x, tree y)
1641{
1642  tree prev_x = SSA_NAME_VALUE (x);
1643
1644  if (TREE_CODE (y) == SSA_NAME)
1645    {
1646      tree tmp = SSA_NAME_VALUE (y);
1647      if (tmp)
1648	y = tmp;
1649    }
1650
1651  record_const_or_copy_1 (x, y, prev_x);
1652}
1653
1654/* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1655   This constrains the cases in which we may treat this as assignment.  */
1656
1657static void
1658record_equality (tree x, tree y)
1659{
1660  tree prev_x = NULL, prev_y = NULL;
1661
1662  if (TREE_CODE (x) == SSA_NAME)
1663    prev_x = SSA_NAME_VALUE (x);
1664  if (TREE_CODE (y) == SSA_NAME)
1665    prev_y = SSA_NAME_VALUE (y);
1666
1667  /* If one of the previous values is invariant, or invariant in more loops
1668     (by depth), then use that.
1669     Otherwise it doesn't matter which value we choose, just so
1670     long as we canonicalize on one value.  */
1671  if (TREE_INVARIANT (y))
1672    ;
1673  else if (TREE_INVARIANT (x) || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
1674    prev_x = x, x = y, y = prev_x, prev_x = prev_y;
1675  else if (prev_x && TREE_INVARIANT (prev_x))
1676    x = y, y = prev_x, prev_x = prev_y;
1677  else if (prev_y && TREE_CODE (prev_y) != VALUE_HANDLE)
1678    y = prev_y;
1679
1680  /* After the swapping, we must have one SSA_NAME.  */
1681  if (TREE_CODE (x) != SSA_NAME)
1682    return;
1683
1684  /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1685     variable compared against zero.  If we're honoring signed zeros,
1686     then we cannot record this value unless we know that the value is
1687     nonzero.  */
1688  if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x)))
1689      && (TREE_CODE (y) != REAL_CST
1690	  || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1691    return;
1692
1693  record_const_or_copy_1 (x, y, prev_x);
1694}
1695
1696/* Return true, if it is ok to do folding of an associative expression.
1697   EXP is the tree for the associative expression.  */
1698
1699static inline bool
1700unsafe_associative_fp_binop (tree exp)
1701{
1702  enum tree_code code = TREE_CODE (exp);
1703  return !(!flag_unsafe_math_optimizations
1704           && (code == MULT_EXPR || code == PLUS_EXPR
1705	       || code == MINUS_EXPR)
1706           && FLOAT_TYPE_P (TREE_TYPE (exp)));
1707}
1708
1709/* Returns true when STMT is a simple iv increment.  It detects the
1710   following situation:
1711
1712   i_1 = phi (..., i_2)
1713   i_2 = i_1 +/- ...  */
1714
1715static bool
1716simple_iv_increment_p (tree stmt)
1717{
1718  tree lhs, rhs, preinc, phi;
1719  unsigned i;
1720
1721  if (TREE_CODE (stmt) != MODIFY_EXPR)
1722    return false;
1723
1724  lhs = TREE_OPERAND (stmt, 0);
1725  if (TREE_CODE (lhs) != SSA_NAME)
1726    return false;
1727
1728  rhs = TREE_OPERAND (stmt, 1);
1729
1730  if (TREE_CODE (rhs) != PLUS_EXPR
1731      && TREE_CODE (rhs) != MINUS_EXPR)
1732    return false;
1733
1734  preinc = TREE_OPERAND (rhs, 0);
1735  if (TREE_CODE (preinc) != SSA_NAME)
1736    return false;
1737
1738  phi = SSA_NAME_DEF_STMT (preinc);
1739  if (TREE_CODE (phi) != PHI_NODE)
1740    return false;
1741
1742  for (i = 0; i < (unsigned) PHI_NUM_ARGS (phi); i++)
1743    if (PHI_ARG_DEF (phi, i) == lhs)
1744      return true;
1745
1746  return false;
1747}
1748
1749/* STMT is a MODIFY_EXPR for which we were unable to find RHS in the
1750   hash tables.  Try to simplify the RHS using whatever equivalences
1751   we may have recorded.
1752
1753   If we are able to simplify the RHS, then lookup the simplified form in
1754   the hash table and return the result.  Otherwise return NULL.  */
1755
1756static tree
1757simplify_rhs_and_lookup_avail_expr (tree stmt, int insert)
1758{
1759  tree rhs = TREE_OPERAND (stmt, 1);
1760  enum tree_code rhs_code = TREE_CODE (rhs);
1761  tree result = NULL;
1762
1763  /* If we have lhs = ~x, look and see if we earlier had x = ~y.
1764     In which case we can change this statement to be lhs = y.
1765     Which can then be copy propagated.
1766
1767     Similarly for negation.  */
1768  if ((rhs_code == BIT_NOT_EXPR || rhs_code == NEGATE_EXPR)
1769      && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1770    {
1771      /* Get the definition statement for our RHS.  */
1772      tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
1773
1774      /* See if the RHS_DEF_STMT has the same form as our statement.  */
1775      if (TREE_CODE (rhs_def_stmt) == MODIFY_EXPR
1776	  && TREE_CODE (TREE_OPERAND (rhs_def_stmt, 1)) == rhs_code)
1777	{
1778	  tree rhs_def_operand;
1779
1780	  rhs_def_operand = TREE_OPERAND (TREE_OPERAND (rhs_def_stmt, 1), 0);
1781
1782	  /* Verify that RHS_DEF_OPERAND is a suitable SSA variable.  */
1783	  if (TREE_CODE (rhs_def_operand) == SSA_NAME
1784	      && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1785	    result = update_rhs_and_lookup_avail_expr (stmt,
1786						       rhs_def_operand,
1787						       insert);
1788	}
1789    }
1790
1791  /* If we have z = (x OP C1), see if we earlier had x = y OP C2.
1792     If OP is associative, create and fold (y OP C2) OP C1 which
1793     should result in (y OP C3), use that as the RHS for the
1794     assignment.  Add minus to this, as we handle it specially below.  */
1795  if ((associative_tree_code (rhs_code) || rhs_code == MINUS_EXPR)
1796      && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
1797      && has_single_use (TREE_OPERAND (rhs, 0))
1798      && is_gimple_min_invariant (TREE_OPERAND (rhs, 1)))
1799    {
1800      tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
1801
1802      /* If the statement defines an induction variable, do not propagate
1803	 its value, so that we do not create overlapping life ranges.  */
1804      if (simple_iv_increment_p (rhs_def_stmt))
1805	goto dont_fold_assoc;
1806
1807      /* See if the RHS_DEF_STMT has the same form as our statement.  */
1808      if (TREE_CODE (rhs_def_stmt) == MODIFY_EXPR)
1809	{
1810	  tree rhs_def_rhs = TREE_OPERAND (rhs_def_stmt, 1);
1811	  enum tree_code rhs_def_code = TREE_CODE (rhs_def_rhs);
1812
1813	  if ((rhs_code == rhs_def_code && unsafe_associative_fp_binop (rhs))
1814	      || (rhs_code == PLUS_EXPR && rhs_def_code == MINUS_EXPR)
1815	      || (rhs_code == MINUS_EXPR && rhs_def_code == PLUS_EXPR))
1816	    {
1817	      tree def_stmt_op0 = TREE_OPERAND (rhs_def_rhs, 0);
1818	      tree def_stmt_op1 = TREE_OPERAND (rhs_def_rhs, 1);
1819
1820	      if (TREE_CODE (def_stmt_op0) == SSA_NAME
1821		  && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def_stmt_op0)
1822		  && is_gimple_min_invariant (def_stmt_op1))
1823		{
1824		  tree outer_const = TREE_OPERAND (rhs, 1);
1825		  tree type = TREE_TYPE (TREE_OPERAND (stmt, 0));
1826		  tree t;
1827
1828		  /* If we care about correct floating point results, then
1829		     don't fold x + c1 - c2.  Note that we need to take both
1830		     the codes and the signs to figure this out.  */
1831		  if (FLOAT_TYPE_P (type)
1832		      && !flag_unsafe_math_optimizations
1833		      && (rhs_def_code == PLUS_EXPR
1834			  || rhs_def_code == MINUS_EXPR))
1835		    {
1836		      bool neg = false;
1837
1838		      neg ^= (rhs_code == MINUS_EXPR);
1839		      neg ^= (rhs_def_code == MINUS_EXPR);
1840		      neg ^= real_isneg (TREE_REAL_CST_PTR (outer_const));
1841		      neg ^= real_isneg (TREE_REAL_CST_PTR (def_stmt_op1));
1842
1843		      if (neg)
1844			goto dont_fold_assoc;
1845		    }
1846
1847		  /* Ho hum.  So fold will only operate on the outermost
1848		     thingy that we give it, so we have to build the new
1849		     expression in two pieces.  This requires that we handle
1850		     combinations of plus and minus.  */
1851		  if (rhs_def_code != rhs_code)
1852		    {
1853		      if (rhs_def_code == MINUS_EXPR)
1854		        t = build (MINUS_EXPR, type, outer_const, def_stmt_op1);
1855		      else
1856		        t = build (MINUS_EXPR, type, def_stmt_op1, outer_const);
1857		      rhs_code = PLUS_EXPR;
1858		    }
1859		  else if (rhs_def_code == MINUS_EXPR)
1860		    t = build (PLUS_EXPR, type, def_stmt_op1, outer_const);
1861		  else
1862		    t = build (rhs_def_code, type, def_stmt_op1, outer_const);
1863		  t = local_fold (t);
1864		  t = build (rhs_code, type, def_stmt_op0, t);
1865		  t = local_fold (t);
1866
1867		  /* If the result is a suitable looking gimple expression,
1868		     then use it instead of the original for STMT.  */
1869		  if (TREE_CODE (t) == SSA_NAME
1870		      || (UNARY_CLASS_P (t)
1871			  && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME)
1872		      || ((BINARY_CLASS_P (t) || COMPARISON_CLASS_P (t))
1873			  && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
1874			  && is_gimple_val (TREE_OPERAND (t, 1))))
1875		    result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
1876		}
1877	    }
1878	}
1879 dont_fold_assoc:;
1880    }
1881
1882  /* Optimize *"foo" into 'f'.  This is done here rather than
1883     in fold to avoid problems with stuff like &*"foo".  */
1884  if (TREE_CODE (rhs) == INDIRECT_REF || TREE_CODE (rhs) == ARRAY_REF)
1885    {
1886      tree t = fold_read_from_constant_string (rhs);
1887
1888      if (t)
1889        result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
1890    }
1891
1892  return result;
1893}
1894
1895/* COND is a condition of the form:
1896
1897     x == const or x != const
1898
1899   Look back to x's defining statement and see if x is defined as
1900
1901     x = (type) y;
1902
1903   If const is unchanged if we convert it to type, then we can build
1904   the equivalent expression:
1905
1906
1907      y == const or y != const
1908
1909   Which may allow further optimizations.
1910
1911   Return the equivalent comparison or NULL if no such equivalent comparison
1912   was found.  */
1913
1914static tree
1915find_equivalent_equality_comparison (tree cond)
1916{
1917  tree op0 = TREE_OPERAND (cond, 0);
1918  tree op1 = TREE_OPERAND (cond, 1);
1919  tree def_stmt = SSA_NAME_DEF_STMT (op0);
1920
1921  /* OP0 might have been a parameter, so first make sure it
1922     was defined by a MODIFY_EXPR.  */
1923  if (def_stmt && TREE_CODE (def_stmt) == MODIFY_EXPR)
1924    {
1925      tree def_rhs = TREE_OPERAND (def_stmt, 1);
1926
1927
1928      /* If either operand to the comparison is a pointer to
1929	 a function, then we can not apply this optimization
1930	 as some targets require function pointers to be
1931	 canonicalized and in this case this optimization would
1932	 eliminate a necessary canonicalization.  */
1933      if ((POINTER_TYPE_P (TREE_TYPE (op0))
1934	   && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) == FUNCTION_TYPE)
1935	  || (POINTER_TYPE_P (TREE_TYPE (op1))
1936	      && TREE_CODE (TREE_TYPE (TREE_TYPE (op1))) == FUNCTION_TYPE))
1937	return NULL;
1938
1939      /* Now make sure the RHS of the MODIFY_EXPR is a typecast.  */
1940      if ((TREE_CODE (def_rhs) == NOP_EXPR
1941	   || TREE_CODE (def_rhs) == CONVERT_EXPR)
1942	  && TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME)
1943	{
1944	  tree def_rhs_inner = TREE_OPERAND (def_rhs, 0);
1945	  tree def_rhs_inner_type = TREE_TYPE (def_rhs_inner);
1946	  tree new;
1947
1948	  if (TYPE_PRECISION (def_rhs_inner_type)
1949	      > TYPE_PRECISION (TREE_TYPE (def_rhs)))
1950	    return NULL;
1951
1952	  /* If the inner type of the conversion is a pointer to
1953	     a function, then we can not apply this optimization
1954	     as some targets require function pointers to be
1955	     canonicalized.  This optimization would result in
1956	     canonicalization of the pointer when it was not originally
1957	     needed/intended.  */
1958	  if (POINTER_TYPE_P (def_rhs_inner_type)
1959	      && TREE_CODE (TREE_TYPE (def_rhs_inner_type)) == FUNCTION_TYPE)
1960	    return NULL;
1961
1962	  /* What we want to prove is that if we convert OP1 to
1963	     the type of the object inside the NOP_EXPR that the
1964	     result is still equivalent to SRC.
1965
1966	     If that is true, the build and return new equivalent
1967	     condition which uses the source of the typecast and the
1968	     new constant (which has only changed its type).  */
1969	  new = build1 (TREE_CODE (def_rhs), def_rhs_inner_type, op1);
1970	  new = local_fold (new);
1971	  if (is_gimple_val (new) && tree_int_cst_equal (new, op1))
1972	    return build (TREE_CODE (cond), TREE_TYPE (cond),
1973			  def_rhs_inner, new);
1974	}
1975    }
1976  return NULL;
1977}
1978
1979/* STMT is a COND_EXPR for which we could not trivially determine its
1980   result.  This routine attempts to find equivalent forms of the
1981   condition which we may be able to optimize better.  It also
1982   uses simple value range propagation to optimize conditionals.  */
1983
1984static tree
1985simplify_cond_and_lookup_avail_expr (tree stmt,
1986				     stmt_ann_t ann,
1987				     int insert)
1988{
1989  tree cond = COND_EXPR_COND (stmt);
1990
1991  if (COMPARISON_CLASS_P (cond))
1992    {
1993      tree op0 = TREE_OPERAND (cond, 0);
1994      tree op1 = TREE_OPERAND (cond, 1);
1995
1996      if (TREE_CODE (op0) == SSA_NAME && is_gimple_min_invariant (op1))
1997	{
1998	  int limit;
1999	  tree low, high, cond_low, cond_high;
2000	  int lowequal, highequal, swapped, no_overlap, subset, cond_inverted;
2001	  VEC(vrp_element_p,heap) **vrp_records;
2002	  struct vrp_element *element;
2003	  struct vrp_hash_elt vrp_hash_elt, *vrp_hash_elt_p;
2004	  void **slot;
2005
2006	  /* First see if we have test of an SSA_NAME against a constant
2007	     where the SSA_NAME is defined by an earlier typecast which
2008	     is irrelevant when performing tests against the given
2009	     constant.  */
2010	  if (TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
2011	    {
2012	      tree new_cond = find_equivalent_equality_comparison (cond);
2013
2014	      if (new_cond)
2015		{
2016		  /* Update the statement to use the new equivalent
2017		     condition.  */
2018		  COND_EXPR_COND (stmt) = new_cond;
2019
2020		  /* If this is not a real stmt, ann will be NULL and we
2021		     avoid processing the operands.  */
2022		  if (ann)
2023		    mark_stmt_modified (stmt);
2024
2025		  /* Lookup the condition and return its known value if it
2026		     exists.  */
2027		  new_cond = lookup_avail_expr (stmt, insert);
2028		  if (new_cond)
2029		    return new_cond;
2030
2031		  /* The operands have changed, so update op0 and op1.  */
2032		  op0 = TREE_OPERAND (cond, 0);
2033		  op1 = TREE_OPERAND (cond, 1);
2034		}
2035	    }
2036
2037	  /* Consult the value range records for this variable (if they exist)
2038	     to see if we can eliminate or simplify this conditional.
2039
2040	     Note two tests are necessary to determine no records exist.
2041	     First we have to see if the virtual array exists, if it
2042	     exists, then we have to check its active size.
2043
2044	     Also note the vast majority of conditionals are not testing
2045	     a variable which has had its range constrained by an earlier
2046	     conditional.  So this filter avoids a lot of unnecessary work.  */
2047	  vrp_hash_elt.var = op0;
2048	  vrp_hash_elt.records = NULL;
2049          slot = htab_find_slot (vrp_data, &vrp_hash_elt, NO_INSERT);
2050          if (slot == NULL)
2051	    return NULL;
2052
2053	  vrp_hash_elt_p = (struct vrp_hash_elt *) *slot;
2054	  vrp_records = &vrp_hash_elt_p->records;
2055
2056	  limit = VEC_length (vrp_element_p, *vrp_records);
2057
2058	  /* If we have no value range records for this variable, or we are
2059	     unable to extract a range for this condition, then there is
2060	     nothing to do.  */
2061	  if (limit == 0
2062	      || ! extract_range_from_cond (cond, &cond_high,
2063					    &cond_low, &cond_inverted))
2064	    return NULL;
2065
2066	  /* We really want to avoid unnecessary computations of range
2067	     info.  So all ranges are computed lazily; this avoids a
2068	     lot of unnecessary work.  i.e., we record the conditional,
2069	     but do not process how it constrains the variable's
2070	     potential values until we know that processing the condition
2071	     could be helpful.
2072
2073	     However, we do not want to have to walk a potentially long
2074	     list of ranges, nor do we want to compute a variable's
2075	     range more than once for a given path.
2076
2077	     Luckily, each time we encounter a conditional that can not
2078	     be otherwise optimized we will end up here and we will
2079	     compute the necessary range information for the variable
2080	     used in this condition.
2081
2082	     Thus you can conclude that there will never be more than one
2083	     conditional associated with a variable which has not been
2084	     processed.  So we never need to merge more than one new
2085	     conditional into the current range.
2086
2087	     These properties also help us avoid unnecessary work.  */
2088	   element = VEC_last (vrp_element_p, *vrp_records);
2089
2090	  if (element->high && element->low)
2091	    {
2092	      /* The last element has been processed, so there is no range
2093		 merging to do, we can simply use the high/low values
2094		 recorded in the last element.  */
2095	      low = element->low;
2096	      high = element->high;
2097	    }
2098	  else
2099	    {
2100	      tree tmp_high, tmp_low;
2101	      int dummy;
2102
2103	      /* The last element has not been processed.  Process it now.
2104		 record_range should ensure for cond inverted is not set.
2105		 This call can only fail if cond is x < min or x > max,
2106		 which fold should have optimized into false.
2107		 If that doesn't happen, just pretend all values are
2108		 in the range.  */
2109	      if (! extract_range_from_cond (element->cond, &tmp_high,
2110					     &tmp_low, &dummy))
2111		gcc_unreachable ();
2112	      else
2113		gcc_assert (dummy == 0);
2114
2115	      /* If this is the only element, then no merging is necessary,
2116		 the high/low values from extract_range_from_cond are all
2117		 we need.  */
2118	      if (limit == 1)
2119		{
2120		  low = tmp_low;
2121		  high = tmp_high;
2122		}
2123	      else
2124		{
2125		  /* Get the high/low value from the previous element.  */
2126		  struct vrp_element *prev
2127		    = VEC_index (vrp_element_p, *vrp_records, limit - 2);
2128		  low = prev->low;
2129		  high = prev->high;
2130
2131		  /* Merge in this element's range with the range from the
2132		     previous element.
2133
2134		     The low value for the merged range is the maximum of
2135		     the previous low value and the low value of this record.
2136
2137		     Similarly the high value for the merged range is the
2138		     minimum of the previous high value and the high value of
2139		     this record.  */
2140		  low = (low && tree_int_cst_compare (low, tmp_low) == 1
2141			 ? low : tmp_low);
2142		  high = (high && tree_int_cst_compare (high, tmp_high) == -1
2143			  ? high : tmp_high);
2144		}
2145
2146	      /* And record the computed range.  */
2147	      element->low = low;
2148	      element->high = high;
2149
2150	    }
2151
2152	  /* After we have constrained this variable's potential values,
2153	     we try to determine the result of the given conditional.
2154
2155	     To simplify later tests, first determine if the current
2156	     low value is the same low value as the conditional.
2157	     Similarly for the current high value and the high value
2158	     for the conditional.  */
2159	  lowequal = tree_int_cst_equal (low, cond_low);
2160	  highequal = tree_int_cst_equal (high, cond_high);
2161
2162	  if (lowequal && highequal)
2163	    return (cond_inverted ? boolean_false_node : boolean_true_node);
2164
2165	  /* To simplify the overlap/subset tests below we may want
2166	     to swap the two ranges so that the larger of the two
2167	     ranges occurs "first".  */
2168	  swapped = 0;
2169	  if (tree_int_cst_compare (low, cond_low) == 1
2170	      || (lowequal
2171		  && tree_int_cst_compare (cond_high, high) == 1))
2172	    {
2173	      tree temp;
2174
2175	      swapped = 1;
2176	      temp = low;
2177	      low = cond_low;
2178	      cond_low = temp;
2179	      temp = high;
2180	      high = cond_high;
2181	      cond_high = temp;
2182	    }
2183
2184	  /* Now determine if there is no overlap in the ranges
2185	     or if the second range is a subset of the first range.  */
2186	  no_overlap = tree_int_cst_lt (high, cond_low);
2187	  subset = tree_int_cst_compare (cond_high, high) != 1;
2188
2189	  /* If there was no overlap in the ranges, then this conditional
2190	     always has a false value (unless we had to invert this
2191	     conditional, in which case it always has a true value).  */
2192	  if (no_overlap)
2193	    return (cond_inverted ? boolean_true_node : boolean_false_node);
2194
2195	  /* If the current range is a subset of the condition's range,
2196	     then this conditional always has a true value (unless we
2197	     had to invert this conditional, in which case it always
2198	     has a true value).  */
2199	  if (subset && swapped)
2200	    return (cond_inverted ? boolean_false_node : boolean_true_node);
2201
2202	  /* We were unable to determine the result of the conditional.
2203	     However, we may be able to simplify the conditional.  First
2204	     merge the ranges in the same manner as range merging above.  */
2205	  low = tree_int_cst_compare (low, cond_low) == 1 ? low : cond_low;
2206	  high = tree_int_cst_compare (high, cond_high) == -1 ? high : cond_high;
2207
2208	  /* If the range has converged to a single point, then turn this
2209	     into an equality comparison.  */
2210	  if (TREE_CODE (cond) != EQ_EXPR
2211	      && TREE_CODE (cond) != NE_EXPR
2212	      && tree_int_cst_equal (low, high))
2213	    {
2214	      TREE_SET_CODE (cond, EQ_EXPR);
2215	      TREE_OPERAND (cond, 1) = high;
2216	    }
2217	}
2218    }
2219  return 0;
2220}
2221
2222/* STMT is a SWITCH_EXPR for which we could not trivially determine its
2223   result.  This routine attempts to find equivalent forms of the
2224   condition which we may be able to optimize better.  */
2225
2226static tree
2227simplify_switch_and_lookup_avail_expr (tree stmt, int insert)
2228{
2229  tree cond = SWITCH_COND (stmt);
2230  tree def, to, ti;
2231
2232  /* The optimization that we really care about is removing unnecessary
2233     casts.  That will let us do much better in propagating the inferred
2234     constant at the switch target.  */
2235  if (TREE_CODE (cond) == SSA_NAME)
2236    {
2237      def = SSA_NAME_DEF_STMT (cond);
2238      if (TREE_CODE (def) == MODIFY_EXPR)
2239	{
2240	  def = TREE_OPERAND (def, 1);
2241	  if (TREE_CODE (def) == NOP_EXPR)
2242	    {
2243	      int need_precision;
2244	      bool fail;
2245
2246	      def = TREE_OPERAND (def, 0);
2247
2248#ifdef ENABLE_CHECKING
2249	      /* ??? Why was Jeff testing this?  We are gimple...  */
2250	      gcc_assert (is_gimple_val (def));
2251#endif
2252
2253	      to = TREE_TYPE (cond);
2254	      ti = TREE_TYPE (def);
2255
2256	      /* If we have an extension that preserves value, then we
2257		 can copy the source value into the switch.  */
2258
2259	      need_precision = TYPE_PRECISION (ti);
2260	      fail = false;
2261	      if (! INTEGRAL_TYPE_P (ti))
2262		fail = true;
2263	      else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
2264		fail = true;
2265	      else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
2266		need_precision += 1;
2267	      if (TYPE_PRECISION (to) < need_precision)
2268		fail = true;
2269
2270	      if (!fail)
2271		{
2272		  SWITCH_COND (stmt) = def;
2273		  mark_stmt_modified (stmt);
2274
2275		  return lookup_avail_expr (stmt, insert);
2276		}
2277	    }
2278	}
2279    }
2280
2281  return 0;
2282}
2283
2284
2285/* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2286   known value for that SSA_NAME (or NULL if no value is known).
2287
2288   NONZERO_VARS is the set SSA_NAMES known to have a nonzero value,
2289   even if we don't know their precise value.
2290
2291   Propagate values from CONST_AND_COPIES and NONZERO_VARS into the PHI
2292   nodes of the successors of BB.  */
2293
2294static void
2295cprop_into_successor_phis (basic_block bb, bitmap nonzero_vars)
2296{
2297  edge e;
2298  edge_iterator ei;
2299
2300  FOR_EACH_EDGE (e, ei, bb->succs)
2301    {
2302      tree phi;
2303      int indx;
2304
2305      /* If this is an abnormal edge, then we do not want to copy propagate
2306	 into the PHI alternative associated with this edge.  */
2307      if (e->flags & EDGE_ABNORMAL)
2308	continue;
2309
2310      phi = phi_nodes (e->dest);
2311      if (! phi)
2312	continue;
2313
2314      indx = e->dest_idx;
2315      for ( ; phi; phi = PHI_CHAIN (phi))
2316	{
2317	  tree new;
2318	  use_operand_p orig_p;
2319	  tree orig;
2320
2321	  /* The alternative may be associated with a constant, so verify
2322	     it is an SSA_NAME before doing anything with it.  */
2323	  orig_p = PHI_ARG_DEF_PTR (phi, indx);
2324	  orig = USE_FROM_PTR (orig_p);
2325	  if (TREE_CODE (orig) != SSA_NAME)
2326	    continue;
2327
2328	  /* If the alternative is known to have a nonzero value, record
2329	     that fact in the PHI node itself for future use.  */
2330	  if (bitmap_bit_p (nonzero_vars, SSA_NAME_VERSION (orig)))
2331	    PHI_ARG_NONZERO (phi, indx) = true;
2332
2333	  /* If we have *ORIG_P in our constant/copy table, then replace
2334	     ORIG_P with its value in our constant/copy table.  */
2335	  new = SSA_NAME_VALUE (orig);
2336	  if (new
2337	      && new != orig
2338	      && (TREE_CODE (new) == SSA_NAME
2339		  || is_gimple_min_invariant (new))
2340	      && may_propagate_copy (orig, new))
2341	    propagate_value (orig_p, new);
2342	}
2343    }
2344}
2345
2346/* We have finished optimizing BB, record any information implied by
2347   taking a specific outgoing edge from BB.  */
2348
2349static void
2350record_edge_info (basic_block bb)
2351{
2352  block_stmt_iterator bsi = bsi_last (bb);
2353  struct edge_info *edge_info;
2354
2355  if (! bsi_end_p (bsi))
2356    {
2357      tree stmt = bsi_stmt (bsi);
2358
2359      if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
2360	{
2361	  tree cond = SWITCH_COND (stmt);
2362
2363	  if (TREE_CODE (cond) == SSA_NAME)
2364	    {
2365	      tree labels = SWITCH_LABELS (stmt);
2366	      int i, n_labels = TREE_VEC_LENGTH (labels);
2367	      tree *info = xcalloc (last_basic_block, sizeof (tree));
2368	      edge e;
2369	      edge_iterator ei;
2370
2371	      for (i = 0; i < n_labels; i++)
2372		{
2373		  tree label = TREE_VEC_ELT (labels, i);
2374		  basic_block target_bb = label_to_block (CASE_LABEL (label));
2375
2376		  if (CASE_HIGH (label)
2377		      || !CASE_LOW (label)
2378		      || info[target_bb->index])
2379		    info[target_bb->index] = error_mark_node;
2380		  else
2381		    info[target_bb->index] = label;
2382		}
2383
2384	      FOR_EACH_EDGE (e, ei, bb->succs)
2385		{
2386		  basic_block target_bb = e->dest;
2387		  tree node = info[target_bb->index];
2388
2389		  if (node != NULL && node != error_mark_node)
2390		    {
2391		      tree x = fold_convert (TREE_TYPE (cond), CASE_LOW (node));
2392		      edge_info = allocate_edge_info (e);
2393		      edge_info->lhs = cond;
2394		      edge_info->rhs = x;
2395		    }
2396		}
2397	      free (info);
2398	    }
2399	}
2400
2401      /* A COND_EXPR may create equivalences too.  */
2402      if (stmt && TREE_CODE (stmt) == COND_EXPR)
2403	{
2404	  tree cond = COND_EXPR_COND (stmt);
2405	  edge true_edge;
2406	  edge false_edge;
2407
2408	  extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2409
2410	  /* If the conditional is a single variable 'X', record 'X = 1'
2411	     for the true edge and 'X = 0' on the false edge.  */
2412	  if (SSA_VAR_P (cond))
2413	    {
2414	      struct edge_info *edge_info;
2415
2416	      edge_info = allocate_edge_info (true_edge);
2417	      edge_info->lhs = cond;
2418	      edge_info->rhs = constant_boolean_node (1, TREE_TYPE (cond));
2419
2420	      edge_info = allocate_edge_info (false_edge);
2421	      edge_info->lhs = cond;
2422	      edge_info->rhs = constant_boolean_node (0, TREE_TYPE (cond));
2423	    }
2424	  /* Equality tests may create one or two equivalences.  */
2425	  else if (COMPARISON_CLASS_P (cond))
2426	    {
2427	      tree op0 = TREE_OPERAND (cond, 0);
2428	      tree op1 = TREE_OPERAND (cond, 1);
2429
2430	      /* Special case comparing booleans against a constant as we
2431		 know the value of OP0 on both arms of the branch.  i.e., we
2432		 can record an equivalence for OP0 rather than COND.  */
2433	      if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
2434		  && TREE_CODE (op0) == SSA_NAME
2435		  && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2436		  && is_gimple_min_invariant (op1))
2437		{
2438		  if (TREE_CODE (cond) == EQ_EXPR)
2439		    {
2440		      edge_info = allocate_edge_info (true_edge);
2441		      edge_info->lhs = op0;
2442		      edge_info->rhs = (integer_zerop (op1)
2443					    ? boolean_false_node
2444					    : boolean_true_node);
2445
2446		      edge_info = allocate_edge_info (false_edge);
2447		      edge_info->lhs = op0;
2448		      edge_info->rhs = (integer_zerop (op1)
2449					    ? boolean_true_node
2450					    : boolean_false_node);
2451		    }
2452		  else
2453		    {
2454		      edge_info = allocate_edge_info (true_edge);
2455		      edge_info->lhs = op0;
2456		      edge_info->rhs = (integer_zerop (op1)
2457					    ? boolean_true_node
2458					    : boolean_false_node);
2459
2460		      edge_info = allocate_edge_info (false_edge);
2461		      edge_info->lhs = op0;
2462		      edge_info->rhs = (integer_zerop (op1)
2463					    ? boolean_false_node
2464					    : boolean_true_node);
2465		    }
2466		}
2467
2468	      else if (is_gimple_min_invariant (op0)
2469		       && (TREE_CODE (op1) == SSA_NAME
2470			   || is_gimple_min_invariant (op1)))
2471		{
2472		  tree inverted = invert_truthvalue (cond);
2473		  struct edge_info *edge_info;
2474
2475		  edge_info = allocate_edge_info (true_edge);
2476		  record_conditions (edge_info, cond, inverted);
2477
2478		  if (TREE_CODE (cond) == EQ_EXPR)
2479		    {
2480		      edge_info->lhs = op1;
2481		      edge_info->rhs = op0;
2482		    }
2483
2484		  edge_info = allocate_edge_info (false_edge);
2485		  record_conditions (edge_info, inverted, cond);
2486
2487		  if (TREE_CODE (cond) == NE_EXPR)
2488		    {
2489		      edge_info->lhs = op1;
2490		      edge_info->rhs = op0;
2491		    }
2492		}
2493
2494	      else if (TREE_CODE (op0) == SSA_NAME
2495		       && (is_gimple_min_invariant (op1)
2496			   || TREE_CODE (op1) == SSA_NAME))
2497		{
2498		  tree inverted = invert_truthvalue (cond);
2499		  struct edge_info *edge_info;
2500
2501		  edge_info = allocate_edge_info (true_edge);
2502		  record_conditions (edge_info, cond, inverted);
2503
2504		  if (TREE_CODE (cond) == EQ_EXPR)
2505		    {
2506		      edge_info->lhs = op0;
2507		      edge_info->rhs = op1;
2508		    }
2509
2510		  edge_info = allocate_edge_info (false_edge);
2511		  record_conditions (edge_info, inverted, cond);
2512
2513		  if (TREE_CODE (cond) == NE_EXPR)
2514		    {
2515		      edge_info->lhs = op0;
2516		      edge_info->rhs = op1;
2517		    }
2518		}
2519	    }
2520
2521	  /* ??? TRUTH_NOT_EXPR can create an equivalence too.  */
2522	}
2523    }
2524}
2525
2526/* Propagate information from BB to its outgoing edges.
2527
2528   This can include equivalency information implied by control statements
2529   at the end of BB and const/copy propagation into PHIs in BB's
2530   successor blocks.  */
2531
2532static void
2533propagate_to_outgoing_edges (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
2534			     basic_block bb)
2535{
2536  record_edge_info (bb);
2537  cprop_into_successor_phis (bb, nonzero_vars);
2538}
2539
2540/* Search for redundant computations in STMT.  If any are found, then
2541   replace them with the variable holding the result of the computation.
2542
2543   If safe, record this expression into the available expression hash
2544   table.  */
2545
2546static bool
2547eliminate_redundant_computations (tree stmt, stmt_ann_t ann)
2548{
2549  tree *expr_p, def = NULL_TREE;
2550  bool insert = true;
2551  tree cached_lhs;
2552  bool retval = false;
2553  bool modify_expr_p = false;
2554
2555  if (TREE_CODE (stmt) == MODIFY_EXPR)
2556    def = TREE_OPERAND (stmt, 0);
2557
2558  /* Certain expressions on the RHS can be optimized away, but can not
2559     themselves be entered into the hash tables.  */
2560  if (ann->makes_aliased_stores
2561      || ! def
2562      || TREE_CODE (def) != SSA_NAME
2563      || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
2564      || !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF)
2565      /* Do not record equivalences for increments of ivs.  This would create
2566	 overlapping live ranges for a very questionable gain.  */
2567      || simple_iv_increment_p (stmt))
2568    insert = false;
2569
2570  /* Check if the expression has been computed before.  */
2571  cached_lhs = lookup_avail_expr (stmt, insert);
2572
2573  /* If this is an assignment and the RHS was not in the hash table,
2574     then try to simplify the RHS and lookup the new RHS in the
2575     hash table.  */
2576  if (! cached_lhs && TREE_CODE (stmt) == MODIFY_EXPR)
2577    cached_lhs = simplify_rhs_and_lookup_avail_expr (stmt, insert);
2578  /* Similarly if this is a COND_EXPR and we did not find its
2579     expression in the hash table, simplify the condition and
2580     try again.  */
2581  else if (! cached_lhs && TREE_CODE (stmt) == COND_EXPR)
2582    cached_lhs = simplify_cond_and_lookup_avail_expr (stmt, ann, insert);
2583  /* Similarly for a SWITCH_EXPR.  */
2584  else if (!cached_lhs && TREE_CODE (stmt) == SWITCH_EXPR)
2585    cached_lhs = simplify_switch_and_lookup_avail_expr (stmt, insert);
2586
2587  opt_stats.num_exprs_considered++;
2588
2589  /* Get a pointer to the expression we are trying to optimize.  */
2590  if (TREE_CODE (stmt) == COND_EXPR)
2591    expr_p = &COND_EXPR_COND (stmt);
2592  else if (TREE_CODE (stmt) == SWITCH_EXPR)
2593    expr_p = &SWITCH_COND (stmt);
2594  else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0))
2595    {
2596      expr_p = &TREE_OPERAND (TREE_OPERAND (stmt, 0), 1);
2597      modify_expr_p = true;
2598    }
2599  else
2600    {
2601      expr_p = &TREE_OPERAND (stmt, 1);
2602      modify_expr_p = true;
2603    }
2604
2605  /* It is safe to ignore types here since we have already done
2606     type checking in the hashing and equality routines.  In fact
2607     type checking here merely gets in the way of constant
2608     propagation.  Also, make sure that it is safe to propagate
2609     CACHED_LHS into *EXPR_P.  */
2610  if (cached_lhs
2611      && ((TREE_CODE (cached_lhs) != SSA_NAME
2612	   && (modify_expr_p
2613	       || tree_ssa_useless_type_conversion_1 (TREE_TYPE (*expr_p),
2614						      TREE_TYPE (cached_lhs))))
2615	  || may_propagate_copy (*expr_p, cached_lhs)))
2616    {
2617      if (dump_file && (dump_flags & TDF_DETAILS))
2618	{
2619	  fprintf (dump_file, "  Replaced redundant expr '");
2620	  print_generic_expr (dump_file, *expr_p, dump_flags);
2621	  fprintf (dump_file, "' with '");
2622	  print_generic_expr (dump_file, cached_lhs, dump_flags);
2623	   fprintf (dump_file, "'\n");
2624	}
2625
2626      opt_stats.num_re++;
2627
2628#if defined ENABLE_CHECKING
2629      gcc_assert (TREE_CODE (cached_lhs) == SSA_NAME
2630		  || is_gimple_min_invariant (cached_lhs));
2631#endif
2632
2633      if (TREE_CODE (cached_lhs) == ADDR_EXPR
2634	  || (POINTER_TYPE_P (TREE_TYPE (*expr_p))
2635	      && is_gimple_min_invariant (cached_lhs)))
2636	retval = true;
2637
2638      if (modify_expr_p
2639	  && !tree_ssa_useless_type_conversion_1 (TREE_TYPE (*expr_p),
2640						  TREE_TYPE (cached_lhs)))
2641	cached_lhs = fold_convert (TREE_TYPE (*expr_p), cached_lhs);
2642
2643      propagate_tree_value (expr_p, cached_lhs);
2644      mark_stmt_modified (stmt);
2645    }
2646  return retval;
2647}
2648
2649/* STMT, a MODIFY_EXPR, may create certain equivalences, in either
2650   the available expressions table or the const_and_copies table.
2651   Detect and record those equivalences.  */
2652
2653static void
2654record_equivalences_from_stmt (tree stmt,
2655			       int may_optimize_p,
2656			       stmt_ann_t ann)
2657{
2658  tree lhs = TREE_OPERAND (stmt, 0);
2659  enum tree_code lhs_code = TREE_CODE (lhs);
2660  int i;
2661
2662  if (lhs_code == SSA_NAME)
2663    {
2664      tree rhs = TREE_OPERAND (stmt, 1);
2665
2666      /* Strip away any useless type conversions.  */
2667      STRIP_USELESS_TYPE_CONVERSION (rhs);
2668
2669      /* If the RHS of the assignment is a constant or another variable that
2670	 may be propagated, register it in the CONST_AND_COPIES table.  We
2671	 do not need to record unwind data for this, since this is a true
2672	 assignment and not an equivalence inferred from a comparison.  All
2673	 uses of this ssa name are dominated by this assignment, so unwinding
2674	 just costs time and space.  */
2675      if (may_optimize_p
2676	  && (TREE_CODE (rhs) == SSA_NAME
2677	      || is_gimple_min_invariant (rhs)))
2678	SSA_NAME_VALUE (lhs) = rhs;
2679
2680      if (tree_expr_nonzero_p (rhs))
2681	record_var_is_nonzero (lhs);
2682    }
2683
2684  /* Look at both sides for pointer dereferences.  If we find one, then
2685     the pointer must be nonnull and we can enter that equivalence into
2686     the hash tables.  */
2687  if (flag_delete_null_pointer_checks)
2688    for (i = 0; i < 2; i++)
2689      {
2690	tree t = TREE_OPERAND (stmt, i);
2691
2692	/* Strip away any COMPONENT_REFs.  */
2693	while (TREE_CODE (t) == COMPONENT_REF)
2694	  t = TREE_OPERAND (t, 0);
2695
2696	/* Now see if this is a pointer dereference.  */
2697	if (INDIRECT_REF_P (t))
2698          {
2699	    tree op = TREE_OPERAND (t, 0);
2700
2701	    /* If the pointer is a SSA variable, then enter new
2702	       equivalences into the hash table.  */
2703	    while (TREE_CODE (op) == SSA_NAME)
2704	      {
2705		tree def = SSA_NAME_DEF_STMT (op);
2706
2707		record_var_is_nonzero (op);
2708
2709		/* And walk up the USE-DEF chains noting other SSA_NAMEs
2710		   which are known to have a nonzero value.  */
2711		if (def
2712		    && TREE_CODE (def) == MODIFY_EXPR
2713		    && TREE_CODE (TREE_OPERAND (def, 1)) == NOP_EXPR)
2714		  op = TREE_OPERAND (TREE_OPERAND (def, 1), 0);
2715		else
2716		  break;
2717	      }
2718	  }
2719      }
2720
2721  /* A memory store, even an aliased store, creates a useful
2722     equivalence.  By exchanging the LHS and RHS, creating suitable
2723     vops and recording the result in the available expression table,
2724     we may be able to expose more redundant loads.  */
2725  if (!ann->has_volatile_ops
2726      && (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME
2727	  || is_gimple_min_invariant (TREE_OPERAND (stmt, 1)))
2728      && !is_gimple_reg (lhs))
2729    {
2730      tree rhs = TREE_OPERAND (stmt, 1);
2731      tree new;
2732
2733      /* FIXME: If the LHS of the assignment is a bitfield and the RHS
2734         is a constant, we need to adjust the constant to fit into the
2735         type of the LHS.  If the LHS is a bitfield and the RHS is not
2736	 a constant, then we can not record any equivalences for this
2737	 statement since we would need to represent the widening or
2738	 narrowing of RHS.  This fixes gcc.c-torture/execute/921016-1.c
2739	 and should not be necessary if GCC represented bitfields
2740	 properly.  */
2741      if (lhs_code == COMPONENT_REF
2742	  && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1)))
2743	{
2744	  if (TREE_CONSTANT (rhs))
2745	    rhs = widen_bitfield (rhs, TREE_OPERAND (lhs, 1), lhs);
2746	  else
2747	    rhs = NULL;
2748
2749	  /* If the value overflowed, then we can not use this equivalence.  */
2750	  if (rhs && ! is_gimple_min_invariant (rhs))
2751	    rhs = NULL;
2752	}
2753
2754      if (rhs)
2755	{
2756	  /* Build a new statement with the RHS and LHS exchanged.  */
2757	  new = build (MODIFY_EXPR, TREE_TYPE (stmt), rhs, lhs);
2758
2759	  create_ssa_artficial_load_stmt (new, stmt);
2760
2761	  /* Finally enter the statement into the available expression
2762	     table.  */
2763	  lookup_avail_expr (new, true);
2764	}
2765    }
2766}
2767
2768/* Replace *OP_P in STMT with any known equivalent value for *OP_P from
2769   CONST_AND_COPIES.  */
2770
2771static bool
2772cprop_operand (tree stmt, use_operand_p op_p)
2773{
2774  bool may_have_exposed_new_symbols = false;
2775  tree val;
2776  tree op = USE_FROM_PTR (op_p);
2777
2778  /* If the operand has a known constant value or it is known to be a
2779     copy of some other variable, use the value or copy stored in
2780     CONST_AND_COPIES.  */
2781  val = SSA_NAME_VALUE (op);
2782  if (val && val != op && TREE_CODE (val) != VALUE_HANDLE)
2783    {
2784      tree op_type, val_type;
2785
2786      /* Do not change the base variable in the virtual operand
2787	 tables.  That would make it impossible to reconstruct
2788	 the renamed virtual operand if we later modify this
2789	 statement.  Also only allow the new value to be an SSA_NAME
2790	 for propagation into virtual operands.  */
2791      if (!is_gimple_reg (op)
2792	  && (TREE_CODE (val) != SSA_NAME
2793	      || is_gimple_reg (val)
2794	      || get_virtual_var (val) != get_virtual_var (op)))
2795	return false;
2796
2797      /* Do not replace hard register operands in asm statements.  */
2798      if (TREE_CODE (stmt) == ASM_EXPR
2799	  && !may_propagate_copy_into_asm (op))
2800	return false;
2801
2802      /* Get the toplevel type of each operand.  */
2803      op_type = TREE_TYPE (op);
2804      val_type = TREE_TYPE (val);
2805
2806      /* While both types are pointers, get the type of the object
2807	 pointed to.  */
2808      while (POINTER_TYPE_P (op_type) && POINTER_TYPE_P (val_type))
2809	{
2810	  op_type = TREE_TYPE (op_type);
2811	  val_type = TREE_TYPE (val_type);
2812	}
2813
2814      /* Make sure underlying types match before propagating a constant by
2815	 converting the constant to the proper type.  Note that convert may
2816	 return a non-gimple expression, in which case we ignore this
2817	 propagation opportunity.  */
2818      if (TREE_CODE (val) != SSA_NAME)
2819	{
2820	  if (!lang_hooks.types_compatible_p (op_type, val_type))
2821	    {
2822	      val = fold_convert (TREE_TYPE (op), val);
2823	      if (!is_gimple_min_invariant (val))
2824		return false;
2825	    }
2826	}
2827
2828      /* Certain operands are not allowed to be copy propagated due
2829	 to their interaction with exception handling and some GCC
2830	 extensions.  */
2831      else if (!may_propagate_copy (op, val))
2832	return false;
2833
2834      /* Do not propagate copies if the propagated value is at a deeper loop
2835	 depth than the propagatee.  Otherwise, this may move loop variant
2836	 variables outside of their loops and prevent coalescing
2837	 opportunities.  If the value was loop invariant, it will be hoisted
2838	 by LICM and exposed for copy propagation.  */
2839      if (loop_depth_of_name (val) > loop_depth_of_name (op))
2840	return false;
2841
2842      /* Dump details.  */
2843      if (dump_file && (dump_flags & TDF_DETAILS))
2844	{
2845	  fprintf (dump_file, "  Replaced '");
2846	  print_generic_expr (dump_file, op, dump_flags);
2847	  fprintf (dump_file, "' with %s '",
2848		   (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2849	  print_generic_expr (dump_file, val, dump_flags);
2850	  fprintf (dump_file, "'\n");
2851	}
2852
2853      /* If VAL is an ADDR_EXPR or a constant of pointer type, note
2854	 that we may have exposed a new symbol for SSA renaming.  */
2855      if (TREE_CODE (val) == ADDR_EXPR
2856	  || (POINTER_TYPE_P (TREE_TYPE (op))
2857	      && is_gimple_min_invariant (val)))
2858	may_have_exposed_new_symbols = true;
2859
2860      if (TREE_CODE (val) != SSA_NAME)
2861	opt_stats.num_const_prop++;
2862      else
2863	opt_stats.num_copy_prop++;
2864
2865      propagate_value (op_p, val);
2866
2867      /* And note that we modified this statement.  This is now
2868	 safe, even if we changed virtual operands since we will
2869	 rescan the statement and rewrite its operands again.  */
2870      mark_stmt_modified (stmt);
2871    }
2872  return may_have_exposed_new_symbols;
2873}
2874
2875/* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
2876   known value for that SSA_NAME (or NULL if no value is known).
2877
2878   Propagate values from CONST_AND_COPIES into the uses, vuses and
2879   v_may_def_ops of STMT.  */
2880
2881static bool
2882cprop_into_stmt (tree stmt)
2883{
2884  bool may_have_exposed_new_symbols = false;
2885  use_operand_p op_p;
2886  ssa_op_iter iter;
2887
2888  FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
2889    {
2890      if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
2891	may_have_exposed_new_symbols |= cprop_operand (stmt, op_p);
2892    }
2893
2894  return may_have_exposed_new_symbols;
2895}
2896
2897
2898/* Optimize the statement pointed to by iterator SI.
2899
2900   We try to perform some simplistic global redundancy elimination and
2901   constant propagation:
2902
2903   1- To detect global redundancy, we keep track of expressions that have
2904      been computed in this block and its dominators.  If we find that the
2905      same expression is computed more than once, we eliminate repeated
2906      computations by using the target of the first one.
2907
2908   2- Constant values and copy assignments.  This is used to do very
2909      simplistic constant and copy propagation.  When a constant or copy
2910      assignment is found, we map the value on the RHS of the assignment to
2911      the variable in the LHS in the CONST_AND_COPIES table.  */
2912
2913static void
2914optimize_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
2915	       basic_block bb, block_stmt_iterator si)
2916{
2917  stmt_ann_t ann;
2918  tree stmt, old_stmt;
2919  bool may_optimize_p;
2920  bool may_have_exposed_new_symbols = false;
2921
2922  old_stmt = stmt = bsi_stmt (si);
2923
2924  update_stmt_if_modified (stmt);
2925  ann = stmt_ann (stmt);
2926  opt_stats.num_stmts++;
2927  may_have_exposed_new_symbols = false;
2928
2929  if (dump_file && (dump_flags & TDF_DETAILS))
2930    {
2931      fprintf (dump_file, "Optimizing statement ");
2932      print_generic_stmt (dump_file, stmt, TDF_SLIM);
2933    }
2934
2935  /* Const/copy propagate into USES, VUSES and the RHS of V_MAY_DEFs.  */
2936  may_have_exposed_new_symbols = cprop_into_stmt (stmt);
2937
2938  /* If the statement has been modified with constant replacements,
2939     fold its RHS before checking for redundant computations.  */
2940  if (ann->modified)
2941    {
2942      tree rhs;
2943
2944      /* Try to fold the statement making sure that STMT is kept
2945	 up to date.  */
2946      if (fold_stmt (bsi_stmt_ptr (si)))
2947	{
2948	  stmt = bsi_stmt (si);
2949	  ann = stmt_ann (stmt);
2950
2951	  if (dump_file && (dump_flags & TDF_DETAILS))
2952	    {
2953	      fprintf (dump_file, "  Folded to: ");
2954	      print_generic_stmt (dump_file, stmt, TDF_SLIM);
2955	    }
2956	}
2957
2958      rhs = get_rhs (stmt);
2959      if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
2960	recompute_tree_invarant_for_addr_expr (rhs);
2961
2962      /* Constant/copy propagation above may change the set of
2963	 virtual operands associated with this statement.  Folding
2964	 may remove the need for some virtual operands.
2965
2966	 Indicate we will need to rescan and rewrite the statement.  */
2967      may_have_exposed_new_symbols = true;
2968    }
2969
2970  /* Check for redundant computations.  Do this optimization only
2971     for assignments that have no volatile ops and conditionals.  */
2972  may_optimize_p = (!ann->has_volatile_ops
2973		    && ((TREE_CODE (stmt) == RETURN_EXPR
2974			 && TREE_OPERAND (stmt, 0)
2975			 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR
2976			 && ! (TREE_SIDE_EFFECTS
2977			       (TREE_OPERAND (TREE_OPERAND (stmt, 0), 1))))
2978			|| (TREE_CODE (stmt) == MODIFY_EXPR
2979			    && ! TREE_SIDE_EFFECTS (TREE_OPERAND (stmt, 1)))
2980			|| TREE_CODE (stmt) == COND_EXPR
2981			|| TREE_CODE (stmt) == SWITCH_EXPR));
2982
2983  if (may_optimize_p)
2984    may_have_exposed_new_symbols
2985      |= eliminate_redundant_computations (stmt, ann);
2986
2987  /* Record any additional equivalences created by this statement.  */
2988  if (TREE_CODE (stmt) == MODIFY_EXPR)
2989    record_equivalences_from_stmt (stmt,
2990				   may_optimize_p,
2991				   ann);
2992
2993  /* If STMT is a COND_EXPR and it was modified, then we may know
2994     where it goes.  If that is the case, then mark the CFG as altered.
2995
2996     This will cause us to later call remove_unreachable_blocks and
2997     cleanup_tree_cfg when it is safe to do so.  It is not safe to
2998     clean things up here since removal of edges and such can trigger
2999     the removal of PHI nodes, which in turn can release SSA_NAMEs to
3000     the manager.
3001
3002     That's all fine and good, except that once SSA_NAMEs are released
3003     to the manager, we must not call create_ssa_name until all references
3004     to released SSA_NAMEs have been eliminated.
3005
3006     All references to the deleted SSA_NAMEs can not be eliminated until
3007     we remove unreachable blocks.
3008
3009     We can not remove unreachable blocks until after we have completed
3010     any queued jump threading.
3011
3012     We can not complete any queued jump threads until we have taken
3013     appropriate variables out of SSA form.  Taking variables out of
3014     SSA form can call create_ssa_name and thus we lose.
3015
3016     Ultimately I suspect we're going to need to change the interface
3017     into the SSA_NAME manager.  */
3018
3019  if (ann->modified)
3020    {
3021      tree val = NULL;
3022
3023      if (TREE_CODE (stmt) == COND_EXPR)
3024	val = COND_EXPR_COND (stmt);
3025      else if (TREE_CODE (stmt) == SWITCH_EXPR)
3026	val = SWITCH_COND (stmt);
3027
3028      if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
3029	cfg_altered = true;
3030
3031      /* If we simplified a statement in such a way as to be shown that it
3032	 cannot trap, update the eh information and the cfg to match.  */
3033      if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
3034	{
3035	  bitmap_set_bit (need_eh_cleanup, bb->index);
3036	  if (dump_file && (dump_flags & TDF_DETAILS))
3037	    fprintf (dump_file, "  Flagged to clear EH edges.\n");
3038	}
3039    }
3040
3041  if (may_have_exposed_new_symbols)
3042    VEC_safe_push (tree, heap, stmts_to_rescan, bsi_stmt (si));
3043}
3044
3045/* Replace the RHS of STMT with NEW_RHS.  If RHS can be found in the
3046   available expression hashtable, then return the LHS from the hash
3047   table.
3048
3049   If INSERT is true, then we also update the available expression
3050   hash table to account for the changes made to STMT.  */
3051
3052static tree
3053update_rhs_and_lookup_avail_expr (tree stmt, tree new_rhs, bool insert)
3054{
3055  tree cached_lhs = NULL;
3056
3057  /* Remove the old entry from the hash table.  */
3058  if (insert)
3059    {
3060      struct expr_hash_elt element;
3061
3062      initialize_hash_element (stmt, NULL, &element);
3063      htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
3064    }
3065
3066  /* Now update the RHS of the assignment.  */
3067  TREE_OPERAND (stmt, 1) = new_rhs;
3068
3069  /* Now lookup the updated statement in the hash table.  */
3070  cached_lhs = lookup_avail_expr (stmt, insert);
3071
3072  /* We have now called lookup_avail_expr twice with two different
3073     versions of this same statement, once in optimize_stmt, once here.
3074
3075     We know the call in optimize_stmt did not find an existing entry
3076     in the hash table, so a new entry was created.  At the same time
3077     this statement was pushed onto the AVAIL_EXPRS_STACK vector.
3078
3079     If this call failed to find an existing entry on the hash table,
3080     then the new version of this statement was entered into the
3081     hash table.  And this statement was pushed onto BLOCK_AVAIL_EXPR
3082     for the second time.  So there are two copies on BLOCK_AVAIL_EXPRs
3083
3084     If this call succeeded, we still have one copy of this statement
3085     on the BLOCK_AVAIL_EXPRs vector.
3086
3087     For both cases, we need to pop the most recent entry off the
3088     BLOCK_AVAIL_EXPRs vector.  For the case where we never found this
3089     statement in the hash tables, that will leave precisely one
3090     copy of this statement on BLOCK_AVAIL_EXPRs.  For the case where
3091     we found a copy of this statement in the second hash table lookup
3092     we want _no_ copies of this statement in BLOCK_AVAIL_EXPRs.  */
3093  if (insert)
3094    VEC_pop (tree, avail_exprs_stack);
3095
3096  /* And make sure we record the fact that we modified this
3097     statement.  */
3098  mark_stmt_modified (stmt);
3099
3100  return cached_lhs;
3101}
3102
3103/* Search for an existing instance of STMT in the AVAIL_EXPRS table.  If
3104   found, return its LHS. Otherwise insert STMT in the table and return
3105   NULL_TREE.
3106
3107   Also, when an expression is first inserted in the AVAIL_EXPRS table, it
3108   is also added to the stack pointed to by BLOCK_AVAIL_EXPRS_P, so that they
3109   can be removed when we finish processing this block and its children.
3110
3111   NOTE: This function assumes that STMT is a MODIFY_EXPR node that
3112   contains no CALL_EXPR on its RHS and makes no volatile nor
3113   aliased references.  */
3114
3115static tree
3116lookup_avail_expr (tree stmt, bool insert)
3117{
3118  void **slot;
3119  tree lhs;
3120  tree temp;
3121  struct expr_hash_elt *element = xmalloc (sizeof (struct expr_hash_elt));
3122
3123  lhs = TREE_CODE (stmt) == MODIFY_EXPR ? TREE_OPERAND (stmt, 0) : NULL;
3124
3125  initialize_hash_element (stmt, lhs, element);
3126
3127  /* Don't bother remembering constant assignments and copy operations.
3128     Constants and copy operations are handled by the constant/copy propagator
3129     in optimize_stmt.  */
3130  if (TREE_CODE (element->rhs) == SSA_NAME
3131      || is_gimple_min_invariant (element->rhs))
3132    {
3133      free (element);
3134      return NULL_TREE;
3135    }
3136
3137  /* If this is an equality test against zero, see if we have recorded a
3138     nonzero value for the variable in question.  */
3139  if ((TREE_CODE (element->rhs) == EQ_EXPR
3140       || TREE_CODE  (element->rhs) == NE_EXPR)
3141      && TREE_CODE (TREE_OPERAND (element->rhs, 0)) == SSA_NAME
3142      && integer_zerop (TREE_OPERAND (element->rhs, 1)))
3143    {
3144      int indx = SSA_NAME_VERSION (TREE_OPERAND (element->rhs, 0));
3145
3146      if (bitmap_bit_p (nonzero_vars, indx))
3147	{
3148	  tree t = element->rhs;
3149	  free (element);
3150	  return constant_boolean_node (TREE_CODE (t) != EQ_EXPR,
3151					TREE_TYPE (t));
3152	}
3153    }
3154
3155  /* Finally try to find the expression in the main expression hash table.  */
3156  slot = htab_find_slot_with_hash (avail_exprs, element, element->hash,
3157				   (insert ? INSERT : NO_INSERT));
3158  if (slot == NULL)
3159    {
3160      free (element);
3161      return NULL_TREE;
3162    }
3163
3164  if (*slot == NULL)
3165    {
3166      *slot = (void *) element;
3167      VEC_safe_push (tree, heap, avail_exprs_stack,
3168		     stmt ? stmt : element->rhs);
3169      return NULL_TREE;
3170    }
3171
3172  /* Extract the LHS of the assignment so that it can be used as the current
3173     definition of another variable.  */
3174  lhs = ((struct expr_hash_elt *)*slot)->lhs;
3175
3176  /* See if the LHS appears in the CONST_AND_COPIES table.  If it does, then
3177     use the value from the const_and_copies table.  */
3178  if (TREE_CODE (lhs) == SSA_NAME)
3179    {
3180      temp = SSA_NAME_VALUE (lhs);
3181      if (temp && TREE_CODE (temp) != VALUE_HANDLE)
3182	lhs = temp;
3183    }
3184
3185  free (element);
3186  return lhs;
3187}
3188
3189/* Given a condition COND, record into HI_P, LO_P and INVERTED_P the
3190   range of values that result in the conditional having a true value.
3191
3192   Return true if we are successful in extracting a range from COND and
3193   false if we are unsuccessful.  */
3194
3195static bool
3196extract_range_from_cond (tree cond, tree *hi_p, tree *lo_p, int *inverted_p)
3197{
3198  tree op1 = TREE_OPERAND (cond, 1);
3199  tree high, low, type;
3200  int inverted;
3201
3202  type = TREE_TYPE (op1);
3203
3204  /* Experiments have shown that it's rarely, if ever useful to
3205     record ranges for enumerations.  Presumably this is due to
3206     the fact that they're rarely used directly.  They are typically
3207     cast into an integer type and used that way.  */
3208  if (TREE_CODE (type) != INTEGER_TYPE)
3209    return 0;
3210
3211  switch (TREE_CODE (cond))
3212    {
3213    case EQ_EXPR:
3214      high = low = op1;
3215      inverted = 0;
3216      break;
3217
3218    case NE_EXPR:
3219      high = low = op1;
3220      inverted = 1;
3221      break;
3222
3223    case GE_EXPR:
3224      low = op1;
3225
3226      /* Get the highest value of the type.  If not a constant, use that
3227	 of its base type, if it has one.  */
3228      high = TYPE_MAX_VALUE (type);
3229      if (TREE_CODE (high) != INTEGER_CST && TREE_TYPE (type))
3230	high = TYPE_MAX_VALUE (TREE_TYPE (type));
3231      inverted = 0;
3232      break;
3233
3234    case GT_EXPR:
3235      high = TYPE_MAX_VALUE (type);
3236      if (TREE_CODE (high) != INTEGER_CST && TREE_TYPE (type))
3237	high = TYPE_MAX_VALUE (TREE_TYPE (type));
3238      if (!tree_int_cst_lt (op1, high))
3239	return 0;
3240      low = int_const_binop (PLUS_EXPR, op1, integer_one_node, 1);
3241      inverted = 0;
3242      break;
3243
3244    case LE_EXPR:
3245      high = op1;
3246      low = TYPE_MIN_VALUE (type);
3247      if (TREE_CODE (low) != INTEGER_CST && TREE_TYPE (type))
3248	low = TYPE_MIN_VALUE (TREE_TYPE (type));
3249      inverted = 0;
3250      break;
3251
3252    case LT_EXPR:
3253      low = TYPE_MIN_VALUE (type);
3254      if (TREE_CODE (low) != INTEGER_CST && TREE_TYPE (type))
3255	low = TYPE_MIN_VALUE (TREE_TYPE (type));
3256      if (!tree_int_cst_lt (low, op1))
3257	return 0;
3258      high = int_const_binop (MINUS_EXPR, op1, integer_one_node, 1);
3259      inverted = 0;
3260      break;
3261
3262    default:
3263      return 0;
3264    }
3265
3266  *hi_p = high;
3267  *lo_p = low;
3268  *inverted_p = inverted;
3269  return 1;
3270}
3271
3272/* Record a range created by COND for basic block BB.  */
3273
3274static void
3275record_range (tree cond, basic_block bb)
3276{
3277  enum tree_code code = TREE_CODE (cond);
3278
3279  /* We explicitly ignore NE_EXPRs and all the unordered comparisons.
3280     They rarely allow for meaningful range optimizations and significantly
3281     complicate the implementation.  */
3282  if ((code == LT_EXPR || code == LE_EXPR || code == GT_EXPR
3283       || code == GE_EXPR || code == EQ_EXPR)
3284      && TREE_CODE (TREE_TYPE (TREE_OPERAND (cond, 1))) == INTEGER_TYPE)
3285    {
3286      struct vrp_hash_elt *vrp_hash_elt;
3287      struct vrp_element *element;
3288      VEC(vrp_element_p,heap) **vrp_records_p;
3289      void **slot;
3290
3291
3292      vrp_hash_elt = xmalloc (sizeof (struct vrp_hash_elt));
3293      vrp_hash_elt->var = TREE_OPERAND (cond, 0);
3294      vrp_hash_elt->records = NULL;
3295      slot = htab_find_slot (vrp_data, vrp_hash_elt, INSERT);
3296
3297      if (*slot == NULL)
3298	*slot = (void *) vrp_hash_elt;
3299      else
3300	vrp_free (vrp_hash_elt);
3301
3302      vrp_hash_elt = (struct vrp_hash_elt *) *slot;
3303      vrp_records_p = &vrp_hash_elt->records;
3304
3305      element = ggc_alloc (sizeof (struct vrp_element));
3306      element->low = NULL;
3307      element->high = NULL;
3308      element->cond = cond;
3309      element->bb = bb;
3310
3311      VEC_safe_push (vrp_element_p, heap, *vrp_records_p, element);
3312      VEC_safe_push (tree, heap, vrp_variables_stack, TREE_OPERAND (cond, 0));
3313    }
3314}
3315
3316/* Hashing and equality functions for VRP_DATA.
3317
3318   Since this hash table is addressed by SSA_NAMEs, we can hash on
3319   their version number and equality can be determined with a
3320   pointer comparison.  */
3321
3322static hashval_t
3323vrp_hash (const void *p)
3324{
3325  tree var = ((struct vrp_hash_elt *)p)->var;
3326
3327  return SSA_NAME_VERSION (var);
3328}
3329
3330static int
3331vrp_eq (const void *p1, const void *p2)
3332{
3333  tree var1 = ((struct vrp_hash_elt *)p1)->var;
3334  tree var2 = ((struct vrp_hash_elt *)p2)->var;
3335
3336  return var1 == var2;
3337}
3338
3339/* Hashing and equality functions for AVAIL_EXPRS.  The table stores
3340   MODIFY_EXPR statements.  We compute a value number for expressions using
3341   the code of the expression and the SSA numbers of its operands.  */
3342
3343static hashval_t
3344avail_expr_hash (const void *p)
3345{
3346  tree stmt = ((struct expr_hash_elt *)p)->stmt;
3347  tree rhs = ((struct expr_hash_elt *)p)->rhs;
3348  tree vuse;
3349  ssa_op_iter iter;
3350  hashval_t val = 0;
3351
3352  /* iterative_hash_expr knows how to deal with any expression and
3353     deals with commutative operators as well, so just use it instead
3354     of duplicating such complexities here.  */
3355  val = iterative_hash_expr (rhs, val);
3356
3357  /* If the hash table entry is not associated with a statement, then we
3358     can just hash the expression and not worry about virtual operands
3359     and such.  */
3360  if (!stmt || !stmt_ann (stmt))
3361    return val;
3362
3363  /* Add the SSA version numbers of every vuse operand.  This is important
3364     because compound variables like arrays are not renamed in the
3365     operands.  Rather, the rename is done on the virtual variable
3366     representing all the elements of the array.  */
3367  FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VUSE)
3368    val = iterative_hash_expr (vuse, val);
3369
3370  return val;
3371}
3372
3373static hashval_t
3374real_avail_expr_hash (const void *p)
3375{
3376  return ((const struct expr_hash_elt *)p)->hash;
3377}
3378
3379static int
3380avail_expr_eq (const void *p1, const void *p2)
3381{
3382  tree stmt1 = ((struct expr_hash_elt *)p1)->stmt;
3383  tree rhs1 = ((struct expr_hash_elt *)p1)->rhs;
3384  tree stmt2 = ((struct expr_hash_elt *)p2)->stmt;
3385  tree rhs2 = ((struct expr_hash_elt *)p2)->rhs;
3386
3387  /* If they are the same physical expression, return true.  */
3388  if (rhs1 == rhs2 && stmt1 == stmt2)
3389    return true;
3390
3391  /* If their codes are not equal, then quit now.  */
3392  if (TREE_CODE (rhs1) != TREE_CODE (rhs2))
3393    return false;
3394
3395  /* In case of a collision, both RHS have to be identical and have the
3396     same VUSE operands.  */
3397  if ((TREE_TYPE (rhs1) == TREE_TYPE (rhs2)
3398       || lang_hooks.types_compatible_p (TREE_TYPE (rhs1), TREE_TYPE (rhs2)))
3399      && operand_equal_p (rhs1, rhs2, OEP_PURE_SAME))
3400    {
3401      bool ret = compare_ssa_operands_equal (stmt1, stmt2, SSA_OP_VUSE);
3402      gcc_assert (!ret || ((struct expr_hash_elt *)p1)->hash
3403		  == ((struct expr_hash_elt *)p2)->hash);
3404      return ret;
3405    }
3406
3407  return false;
3408}
3409