tree-ssa-dse.c revision 1.10
1/* Dead store elimination
2   Copyright (C) 2004-2019 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 3, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "rtl.h"
25#include "tree.h"
26#include "gimple.h"
27#include "tree-pass.h"
28#include "ssa.h"
29#include "gimple-pretty-print.h"
30#include "fold-const.h"
31#include "gimple-iterator.h"
32#include "tree-cfg.h"
33#include "tree-dfa.h"
34#include "domwalk.h"
35#include "tree-cfgcleanup.h"
36#include "params.h"
37#include "alias.h"
38#include "tree-ssa-loop.h"
39
40/* This file implements dead store elimination.
41
42   A dead store is a store into a memory location which will later be
43   overwritten by another store without any intervening loads.  In this
44   case the earlier store can be deleted.
45
46   In our SSA + virtual operand world we use immediate uses of virtual
47   operands to detect dead stores.  If a store's virtual definition
48   is used precisely once by a later store to the same location which
49   post dominates the first store, then the first store is dead.
50
51   The single use of the store's virtual definition ensures that
52   there are no intervening aliased loads and the requirement that
53   the second load post dominate the first ensures that if the earlier
54   store executes, then the later stores will execute before the function
55   exits.
56
57   It may help to think of this as first moving the earlier store to
58   the point immediately before the later store.  Again, the single
59   use of the virtual definition and the post-dominance relationship
60   ensure that such movement would be safe.  Clearly if there are
61   back to back stores, then the second is redundant.
62
63   Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
64   may also help in understanding this code since it discusses the
65   relationship between dead store and redundant load elimination.  In
66   fact, they are the same transformation applied to different views of
67   the CFG.  */
68
69
70/* Bitmap of blocks that have had EH statements cleaned.  We should
71   remove their dead edges eventually.  */
72static bitmap need_eh_cleanup;
73
74/* Return value from dse_classify_store */
75enum dse_store_status
76{
77  DSE_STORE_LIVE,
78  DSE_STORE_MAYBE_PARTIAL_DEAD,
79  DSE_STORE_DEAD
80};
81
82/* STMT is a statement that may write into memory.  Analyze it and
83   initialize WRITE to describe how STMT affects memory.
84
85   Return TRUE if the the statement was analyzed, FALSE otherwise.
86
87   It is always safe to return FALSE.  But typically better optimziation
88   can be achieved by analyzing more statements.  */
89
90static bool
91initialize_ao_ref_for_dse (gimple *stmt, ao_ref *write)
92{
93  /* It's advantageous to handle certain mem* functions.  */
94  if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
95    {
96      switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
97	{
98	  case BUILT_IN_MEMCPY:
99	  case BUILT_IN_MEMMOVE:
100	  case BUILT_IN_MEMSET:
101	    {
102	      tree size = NULL_TREE;
103	      if (gimple_call_num_args (stmt) == 3)
104		size = gimple_call_arg (stmt, 2);
105	      tree ptr = gimple_call_arg (stmt, 0);
106	      ao_ref_init_from_ptr_and_size (write, ptr, size);
107	      return true;
108	    }
109	  default:
110	    break;
111	}
112    }
113  else if (is_gimple_assign (stmt))
114    {
115      ao_ref_init (write, gimple_assign_lhs (stmt));
116      return true;
117    }
118  return false;
119}
120
121/* Given REF from the the alias oracle, return TRUE if it is a valid
122   memory reference for dead store elimination, false otherwise.
123
124   In particular, the reference must have a known base, known maximum
125   size, start at a byte offset and have a size that is one or more
126   bytes.  */
127
128static bool
129valid_ao_ref_for_dse (ao_ref *ref)
130{
131  return (ao_ref_base (ref)
132	  && known_size_p (ref->max_size)
133	  && maybe_ne (ref->size, 0)
134	  && known_eq (ref->max_size, ref->size)
135	  && known_ge (ref->offset, 0)
136	  && multiple_p (ref->offset, BITS_PER_UNIT)
137	  && multiple_p (ref->size, BITS_PER_UNIT));
138}
139
140/* Try to normalize COPY (an ao_ref) relative to REF.  Essentially when we are
141   done COPY will only refer bytes found within REF.  Return true if COPY
142   is known to intersect at least one byte of REF.  */
143
144static bool
145normalize_ref (ao_ref *copy, ao_ref *ref)
146{
147  if (!ordered_p (copy->offset, ref->offset))
148    return false;
149
150  /* If COPY starts before REF, then reset the beginning of
151     COPY to match REF and decrease the size of COPY by the
152     number of bytes removed from COPY.  */
153  if (maybe_lt (copy->offset, ref->offset))
154    {
155      poly_int64 diff = ref->offset - copy->offset;
156      if (maybe_le (copy->size, diff))
157	return false;
158      copy->size -= diff;
159      copy->offset = ref->offset;
160    }
161
162  poly_int64 diff = copy->offset - ref->offset;
163  if (maybe_le (ref->size, diff))
164    return false;
165
166  /* If COPY extends beyond REF, chop off its size appropriately.  */
167  poly_int64 limit = ref->size - diff;
168  if (!ordered_p (limit, copy->size))
169    return false;
170
171  if (maybe_gt (copy->size, limit))
172    copy->size = limit;
173  return true;
174}
175
176/* Clear any bytes written by STMT from the bitmap LIVE_BYTES.  The base
177   address written by STMT must match the one found in REF, which must
178   have its base address previously initialized.
179
180   This routine must be conservative.  If we don't know the offset or
181   actual size written, assume nothing was written.  */
182
183static void
184clear_bytes_written_by (sbitmap live_bytes, gimple *stmt, ao_ref *ref)
185{
186  ao_ref write;
187  if (!initialize_ao_ref_for_dse (stmt, &write))
188    return;
189
190  /* Verify we have the same base memory address, the write
191     has a known size and overlaps with REF.  */
192  HOST_WIDE_INT start, size;
193  if (valid_ao_ref_for_dse (&write)
194      && operand_equal_p (write.base, ref->base, OEP_ADDRESS_OF)
195      && known_eq (write.size, write.max_size)
196      && normalize_ref (&write, ref)
197      && (write.offset - ref->offset).is_constant (&start)
198      && write.size.is_constant (&size))
199    bitmap_clear_range (live_bytes, start / BITS_PER_UNIT,
200			size / BITS_PER_UNIT);
201}
202
203/* REF is a memory write.  Extract relevant information from it and
204   initialize the LIVE_BYTES bitmap.  If successful, return TRUE.
205   Otherwise return FALSE.  */
206
207static bool
208setup_live_bytes_from_ref (ao_ref *ref, sbitmap live_bytes)
209{
210  HOST_WIDE_INT const_size;
211  if (valid_ao_ref_for_dse (ref)
212      && ref->size.is_constant (&const_size)
213      && (const_size / BITS_PER_UNIT
214	  <= PARAM_VALUE (PARAM_DSE_MAX_OBJECT_SIZE)))
215    {
216      bitmap_clear (live_bytes);
217      bitmap_set_range (live_bytes, 0, const_size / BITS_PER_UNIT);
218      return true;
219    }
220  return false;
221}
222
223/* Compute the number of elements that we can trim from the head and
224   tail of ORIG resulting in a bitmap that is a superset of LIVE.
225
226   Store the number of elements trimmed from the head and tail in
227   TRIM_HEAD and TRIM_TAIL.
228
229   STMT is the statement being trimmed and is used for debugging dump
230   output only.  */
231
232static void
233compute_trims (ao_ref *ref, sbitmap live, int *trim_head, int *trim_tail,
234	       gimple *stmt)
235{
236  /* We use sbitmaps biased such that ref->offset is bit zero and the bitmap
237     extends through ref->size.  So we know that in the original bitmap
238     bits 0..ref->size were true.  We don't actually need the bitmap, just
239     the REF to compute the trims.  */
240
241  /* Now identify how much, if any of the tail we can chop off.  */
242  HOST_WIDE_INT const_size;
243  int last_live = bitmap_last_set_bit (live);
244  if (ref->size.is_constant (&const_size))
245    {
246      int last_orig = (const_size / BITS_PER_UNIT) - 1;
247      /* We can leave inconvenient amounts on the tail as
248	 residual handling in mem* and str* functions is usually
249	 reasonably efficient.  */
250      *trim_tail = last_orig - last_live;
251
252      /* But don't trim away out of bounds accesses, as this defeats
253	 proper warnings.
254
255	 We could have a type with no TYPE_SIZE_UNIT or we could have a VLA
256	 where TYPE_SIZE_UNIT is not a constant.  */
257      if (*trim_tail
258	  && TYPE_SIZE_UNIT (TREE_TYPE (ref->base))
259	  && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (ref->base))) == INTEGER_CST
260	  && compare_tree_int (TYPE_SIZE_UNIT (TREE_TYPE (ref->base)),
261			       last_orig) <= 0)
262	*trim_tail = 0;
263    }
264  else
265    *trim_tail = 0;
266
267  /* Identify how much, if any of the head we can chop off.  */
268  int first_orig = 0;
269  int first_live = bitmap_first_set_bit (live);
270  *trim_head = first_live - first_orig;
271
272  /* If more than a word remains, then make sure to keep the
273     starting point at least word aligned.  */
274  if (last_live - first_live > UNITS_PER_WORD)
275    *trim_head &= ~(UNITS_PER_WORD - 1);
276
277  if ((*trim_head || *trim_tail)
278      && dump_file && (dump_flags & TDF_DETAILS))
279    {
280      fprintf (dump_file, "  Trimming statement (head = %d, tail = %d): ",
281	       *trim_head, *trim_tail);
282      print_gimple_stmt (dump_file, stmt, 0, dump_flags);
283      fprintf (dump_file, "\n");
284    }
285}
286
287/* STMT initializes an object from COMPLEX_CST where one or more of the
288   bytes written may be dead stores.  REF is a representation of the
289   memory written.  LIVE is the bitmap of stores that are actually live.
290
291   Attempt to rewrite STMT so that only the real or imaginary part of
292   the object is actually stored.  */
293
294static void
295maybe_trim_complex_store (ao_ref *ref, sbitmap live, gimple *stmt)
296{
297  int trim_head, trim_tail;
298  compute_trims (ref, live, &trim_head, &trim_tail, stmt);
299
300  /* The amount of data trimmed from the head or tail must be at
301     least half the size of the object to ensure we're trimming
302     the entire real or imaginary half.  By writing things this
303     way we avoid more O(n) bitmap operations.  */
304  if (known_ge (trim_tail * 2 * BITS_PER_UNIT, ref->size))
305    {
306      /* TREE_REALPART is live */
307      tree x = TREE_REALPART (gimple_assign_rhs1 (stmt));
308      tree y = gimple_assign_lhs (stmt);
309      y = build1 (REALPART_EXPR, TREE_TYPE (x), y);
310      gimple_assign_set_lhs (stmt, y);
311      gimple_assign_set_rhs1 (stmt, x);
312    }
313  else if (known_ge (trim_head * 2 * BITS_PER_UNIT, ref->size))
314    {
315      /* TREE_IMAGPART is live */
316      tree x = TREE_IMAGPART (gimple_assign_rhs1 (stmt));
317      tree y = gimple_assign_lhs (stmt);
318      y = build1 (IMAGPART_EXPR, TREE_TYPE (x), y);
319      gimple_assign_set_lhs (stmt, y);
320      gimple_assign_set_rhs1 (stmt, x);
321    }
322
323  /* Other cases indicate parts of both the real and imag subobjects
324     are live.  We do not try to optimize those cases.  */
325}
326
327/* STMT initializes an object using a CONSTRUCTOR where one or more of the
328   bytes written are dead stores.  ORIG is the bitmap of bytes stored by
329   STMT.  LIVE is the bitmap of stores that are actually live.
330
331   Attempt to rewrite STMT so that only the real or imaginary part of
332   the object is actually stored.
333
334   The most common case for getting here is a CONSTRUCTOR with no elements
335   being used to zero initialize an object.  We do not try to handle other
336   cases as those would force us to fully cover the object with the
337   CONSTRUCTOR node except for the components that are dead.  */
338
339static void
340maybe_trim_constructor_store (ao_ref *ref, sbitmap live, gimple *stmt)
341{
342  tree ctor = gimple_assign_rhs1 (stmt);
343
344  /* This is the only case we currently handle.  It actually seems to
345     catch most cases of actual interest.  */
346  gcc_assert (CONSTRUCTOR_NELTS (ctor) == 0);
347
348  int head_trim = 0;
349  int tail_trim = 0;
350  compute_trims (ref, live, &head_trim, &tail_trim, stmt);
351
352  /* Now we want to replace the constructor initializer
353     with memset (object + head_trim, 0, size - head_trim - tail_trim).  */
354  if (head_trim || tail_trim)
355    {
356      /* We want &lhs for the MEM_REF expression.  */
357      tree lhs_addr = build_fold_addr_expr (gimple_assign_lhs (stmt));
358
359      if (! is_gimple_min_invariant (lhs_addr))
360	return;
361
362      /* The number of bytes for the new constructor.  */
363      poly_int64 ref_bytes = exact_div (ref->size, BITS_PER_UNIT);
364      poly_int64 count = ref_bytes - head_trim - tail_trim;
365
366      /* And the new type for the CONSTRUCTOR.  Essentially it's just
367	 a char array large enough to cover the non-trimmed parts of
368	 the original CONSTRUCTOR.  Note we want explicit bounds here
369	 so that we know how many bytes to clear when expanding the
370	 CONSTRUCTOR.  */
371      tree type = build_array_type_nelts (char_type_node, count);
372
373      /* Build a suitable alias type rather than using alias set zero
374	 to avoid pessimizing.  */
375      tree alias_type = reference_alias_ptr_type (gimple_assign_lhs (stmt));
376
377      /* Build a MEM_REF representing the whole accessed area, starting
378	 at the first byte not trimmed.  */
379      tree exp = fold_build2 (MEM_REF, type, lhs_addr,
380			      build_int_cst (alias_type, head_trim));
381
382      /* Now update STMT with a new RHS and LHS.  */
383      gimple_assign_set_lhs (stmt, exp);
384      gimple_assign_set_rhs1 (stmt, build_constructor (type, NULL));
385    }
386}
387
388/* STMT is a memcpy, memmove or memset.  Decrement the number of bytes
389   copied/set by DECREMENT.  */
390static void
391decrement_count (gimple *stmt, int decrement)
392{
393  tree *countp = gimple_call_arg_ptr (stmt, 2);
394  gcc_assert (TREE_CODE (*countp) == INTEGER_CST);
395  *countp = wide_int_to_tree (TREE_TYPE (*countp), (TREE_INT_CST_LOW (*countp)
396						    - decrement));
397
398}
399
400static void
401increment_start_addr (gimple *stmt, tree *where, int increment)
402{
403  if (TREE_CODE (*where) == SSA_NAME)
404    {
405      tree tem = make_ssa_name (TREE_TYPE (*where));
406      gassign *newop
407        = gimple_build_assign (tem, POINTER_PLUS_EXPR, *where,
408			       build_int_cst (sizetype, increment));
409      gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
410      gsi_insert_before (&gsi, newop, GSI_SAME_STMT);
411      *where = tem;
412      update_stmt (gsi_stmt (gsi));
413      return;
414    }
415
416  *where = build_fold_addr_expr (fold_build2 (MEM_REF, char_type_node,
417                                             *where,
418                                             build_int_cst (ptr_type_node,
419                                                            increment)));
420}
421
422/* STMT is builtin call that writes bytes in bitmap ORIG, some bytes are dead
423   (ORIG & ~NEW) and need not be stored.  Try to rewrite STMT to reduce
424   the amount of data it actually writes.
425
426   Right now we only support trimming from the head or the tail of the
427   memory region.  In theory we could split the mem* call, but it's
428   likely of marginal value.  */
429
430static void
431maybe_trim_memstar_call (ao_ref *ref, sbitmap live, gimple *stmt)
432{
433  switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
434    {
435    case BUILT_IN_MEMCPY:
436    case BUILT_IN_MEMMOVE:
437      {
438	int head_trim, tail_trim;
439	compute_trims (ref, live, &head_trim, &tail_trim, stmt);
440
441	/* Tail trimming is easy, we can just reduce the count.  */
442        if (tail_trim)
443	  decrement_count (stmt, tail_trim);
444
445	/* Head trimming requires adjusting all the arguments.  */
446        if (head_trim)
447          {
448	    tree *dst = gimple_call_arg_ptr (stmt, 0);
449	    increment_start_addr (stmt, dst, head_trim);
450	    tree *src = gimple_call_arg_ptr (stmt, 1);
451	    increment_start_addr (stmt, src, head_trim);
452	    decrement_count (stmt, head_trim);
453	  }
454        break;
455      }
456
457    case BUILT_IN_MEMSET:
458      {
459	int head_trim, tail_trim;
460	compute_trims (ref, live, &head_trim, &tail_trim, stmt);
461
462	/* Tail trimming is easy, we can just reduce the count.  */
463        if (tail_trim)
464	  decrement_count (stmt, tail_trim);
465
466	/* Head trimming requires adjusting all the arguments.  */
467        if (head_trim)
468          {
469	    tree *dst = gimple_call_arg_ptr (stmt, 0);
470	    increment_start_addr (stmt, dst, head_trim);
471	    decrement_count (stmt, head_trim);
472	  }
473	break;
474      }
475
476      default:
477	break;
478    }
479}
480
481/* STMT is a memory write where one or more bytes written are dead
482   stores.  ORIG is the bitmap of bytes stored by STMT.  LIVE is the
483   bitmap of stores that are actually live.
484
485   Attempt to rewrite STMT so that it writes fewer memory locations.  Right
486   now we only support trimming at the start or end of the memory region.
487   It's not clear how much there is to be gained by trimming from the middle
488   of the region.  */
489
490static void
491maybe_trim_partially_dead_store (ao_ref *ref, sbitmap live, gimple *stmt)
492{
493  if (is_gimple_assign (stmt)
494      && TREE_CODE (gimple_assign_lhs (stmt)) != TARGET_MEM_REF)
495    {
496      switch (gimple_assign_rhs_code (stmt))
497	{
498	case CONSTRUCTOR:
499	  maybe_trim_constructor_store (ref, live, stmt);
500	  break;
501	case COMPLEX_CST:
502	  maybe_trim_complex_store (ref, live, stmt);
503	  break;
504	default:
505	  break;
506	}
507    }
508}
509
510/* Return TRUE if USE_REF reads bytes from LIVE where live is
511   derived from REF, a write reference.
512
513   While this routine may modify USE_REF, it's passed by value, not
514   location.  So callers do not see those modifications.  */
515
516static bool
517live_bytes_read (ao_ref use_ref, ao_ref *ref, sbitmap live)
518{
519  /* We have already verified that USE_REF and REF hit the same object.
520     Now verify that there's actually an overlap between USE_REF and REF.  */
521  HOST_WIDE_INT start, size;
522  if (normalize_ref (&use_ref, ref)
523      && (use_ref.offset - ref->offset).is_constant (&start)
524      && use_ref.size.is_constant (&size))
525    {
526      /* If USE_REF covers all of REF, then it will hit one or more
527	 live bytes.   This avoids useless iteration over the bitmap
528	 below.  */
529      if (start == 0 && known_eq (size, ref->size))
530	return true;
531
532      /* Now check if any of the remaining bits in use_ref are set in LIVE.  */
533      return bitmap_bit_in_range_p (live, start / BITS_PER_UNIT,
534				    (start + size - 1) / BITS_PER_UNIT);
535    }
536  return true;
537}
538
539/* Callback for dse_classify_store calling for_each_index.  Verify that
540   indices are invariant in the loop with backedge PHI in basic-block DATA.  */
541
542static bool
543check_name (tree, tree *idx, void *data)
544{
545  basic_block phi_bb = (basic_block) data;
546  if (TREE_CODE (*idx) == SSA_NAME
547      && !SSA_NAME_IS_DEFAULT_DEF (*idx)
548      && dominated_by_p (CDI_DOMINATORS, gimple_bb (SSA_NAME_DEF_STMT (*idx)),
549			 phi_bb))
550    return false;
551  return true;
552}
553
554/* A helper of dse_optimize_stmt.
555   Given a GIMPLE_ASSIGN in STMT that writes to REF, classify it
556   according to downstream uses and defs.  Sets *BY_CLOBBER_P to true
557   if only clobber statements influenced the classification result.
558   Returns the classification.  */
559
560static dse_store_status
561dse_classify_store (ao_ref *ref, gimple *stmt,
562		    bool byte_tracking_enabled, sbitmap live_bytes,
563		    bool *by_clobber_p = NULL)
564{
565  gimple *temp;
566  int cnt = 0;
567  auto_bitmap visited;
568
569  if (by_clobber_p)
570    *by_clobber_p = true;
571
572  /* Find the first dominated statement that clobbers (part of) the
573     memory stmt stores to with no intermediate statement that may use
574     part of the memory stmt stores.  That is, find a store that may
575     prove stmt to be a dead store.  */
576  temp = stmt;
577  do
578    {
579      gimple *use_stmt;
580      imm_use_iterator ui;
581      bool fail = false;
582      tree defvar;
583
584      if (gimple_code (temp) == GIMPLE_PHI)
585	{
586	  /* If we visit this PHI by following a backedge then we have to
587	     make sure ref->ref only refers to SSA names that are invariant
588	     with respect to the loop represented by this PHI node.  */
589	  if (dominated_by_p (CDI_DOMINATORS, gimple_bb (stmt),
590			      gimple_bb (temp))
591	      && !for_each_index (ref->ref ? &ref->ref : &ref->base,
592				  check_name, gimple_bb (temp)))
593	    return DSE_STORE_LIVE;
594	  defvar = PHI_RESULT (temp);
595	  bitmap_set_bit (visited, SSA_NAME_VERSION (defvar));
596	}
597      else
598	defvar = gimple_vdef (temp);
599      auto_vec<gimple *, 10> defs;
600      gimple *phi_def = NULL;
601      FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
602	{
603	  /* Limit stmt walking.  */
604	  if (++cnt > PARAM_VALUE (PARAM_DSE_MAX_ALIAS_QUERIES_PER_STORE))
605	    {
606	      fail = true;
607	      BREAK_FROM_IMM_USE_STMT (ui);
608	    }
609
610	  /* We have visited ourselves already so ignore STMT for the
611	     purpose of chaining.  */
612	  if (use_stmt == stmt)
613	    ;
614	  /* In simple cases we can look through PHI nodes, but we
615	     have to be careful with loops and with memory references
616	     containing operands that are also operands of PHI nodes.
617	     See gcc.c-torture/execute/20051110-*.c.  */
618	  else if (gimple_code (use_stmt) == GIMPLE_PHI)
619	    {
620	      /* If we already visited this PHI ignore it for further
621		 processing.  */
622	      if (!bitmap_bit_p (visited,
623				 SSA_NAME_VERSION (PHI_RESULT (use_stmt))))
624		{
625		  defs.safe_push (use_stmt);
626		  phi_def = use_stmt;
627		}
628	    }
629	  /* If the statement is a use the store is not dead.  */
630	  else if (ref_maybe_used_by_stmt_p (use_stmt, ref))
631	    {
632	      /* Handle common cases where we can easily build an ao_ref
633		 structure for USE_STMT and in doing so we find that the
634		 references hit non-live bytes and thus can be ignored.  */
635	      if (byte_tracking_enabled
636		  && is_gimple_assign (use_stmt))
637		{
638		  ao_ref use_ref;
639		  ao_ref_init (&use_ref, gimple_assign_rhs1 (use_stmt));
640		  if (valid_ao_ref_for_dse (&use_ref)
641		      && use_ref.base == ref->base
642		      && known_eq (use_ref.size, use_ref.max_size)
643		      && !live_bytes_read (use_ref, ref, live_bytes))
644		    {
645		      /* If this is a store, remember it as we possibly
646			 need to walk the defs uses.  */
647		      if (gimple_vdef (use_stmt))
648			defs.safe_push (use_stmt);
649		      continue;
650		    }
651		}
652
653	      fail = true;
654	      BREAK_FROM_IMM_USE_STMT (ui);
655	    }
656	  /* If this is a store, remember it as we possibly need to walk the
657	     defs uses.  */
658	  else if (gimple_vdef (use_stmt))
659	    defs.safe_push (use_stmt);
660	}
661
662      if (fail)
663	{
664	  /* STMT might be partially dead and we may be able to reduce
665	     how many memory locations it stores into.  */
666	  if (byte_tracking_enabled && !gimple_clobber_p (stmt))
667	    return DSE_STORE_MAYBE_PARTIAL_DEAD;
668	  return DSE_STORE_LIVE;
669	}
670
671      /* If we didn't find any definition this means the store is dead
672         if it isn't a store to global reachable memory.  In this case
673	 just pretend the stmt makes itself dead.  Otherwise fail.  */
674      if (defs.is_empty ())
675	{
676	  if (ref_may_alias_global_p (ref))
677	    return DSE_STORE_LIVE;
678
679	  if (by_clobber_p)
680	    *by_clobber_p = false;
681	  return DSE_STORE_DEAD;
682	}
683
684      /* Process defs and remove those we need not process further.  */
685      for (unsigned i = 0; i < defs.length ();)
686	{
687	  gimple *def = defs[i];
688	  gimple *use_stmt;
689	  use_operand_p use_p;
690	  /* If the path to check starts with a kill we do not need to
691	     process it further.
692	     ???  With byte tracking we need only kill the bytes currently
693	     live.  */
694	  if (stmt_kills_ref_p (def, ref))
695	    {
696	      if (by_clobber_p && !gimple_clobber_p (def))
697		*by_clobber_p = false;
698	      defs.unordered_remove (i);
699	    }
700	  /* In addition to kills we can remove defs whose only use
701	     is another def in defs.  That can only ever be PHIs of which
702	     we track a single for simplicity reasons (we fail for multiple
703	     PHIs anyways).  We can also ignore defs that feed only into
704	     already visited PHIs.  */
705	  else if (gimple_code (def) != GIMPLE_PHI
706		   && single_imm_use (gimple_vdef (def), &use_p, &use_stmt)
707		   && (use_stmt == phi_def
708		       || (gimple_code (use_stmt) == GIMPLE_PHI
709			   && bitmap_bit_p (visited,
710					    SSA_NAME_VERSION
711					      (PHI_RESULT (use_stmt))))))
712	    defs.unordered_remove (i);
713	  else
714	    ++i;
715	}
716
717      /* If all defs kill the ref we are done.  */
718      if (defs.is_empty ())
719	return DSE_STORE_DEAD;
720      /* If more than one def survives fail.  */
721      if (defs.length () > 1)
722	{
723	  /* STMT might be partially dead and we may be able to reduce
724	     how many memory locations it stores into.  */
725	  if (byte_tracking_enabled && !gimple_clobber_p (stmt))
726	    return DSE_STORE_MAYBE_PARTIAL_DEAD;
727	  return DSE_STORE_LIVE;
728	}
729      temp = defs[0];
730
731      /* Track partial kills.  */
732      if (byte_tracking_enabled)
733	{
734	  clear_bytes_written_by (live_bytes, temp, ref);
735	  if (bitmap_empty_p (live_bytes))
736	    {
737	      if (by_clobber_p && !gimple_clobber_p (temp))
738		*by_clobber_p = false;
739	      return DSE_STORE_DEAD;
740	    }
741	}
742    }
743  /* Continue walking until there are no more live bytes.  */
744  while (1);
745}
746
747
748class dse_dom_walker : public dom_walker
749{
750public:
751  dse_dom_walker (cdi_direction direction)
752    : dom_walker (direction),
753    m_live_bytes (PARAM_VALUE (PARAM_DSE_MAX_OBJECT_SIZE)),
754    m_byte_tracking_enabled (false) {}
755
756  virtual edge before_dom_children (basic_block);
757
758private:
759  auto_sbitmap m_live_bytes;
760  bool m_byte_tracking_enabled;
761  void dse_optimize_stmt (gimple_stmt_iterator *);
762};
763
764/* Delete a dead call at GSI, which is mem* call of some kind.  */
765static void
766delete_dead_call (gimple_stmt_iterator *gsi)
767{
768  gimple *stmt = gsi_stmt (*gsi);
769  if (dump_file && (dump_flags & TDF_DETAILS))
770    {
771      fprintf (dump_file, "  Deleted dead call: ");
772      print_gimple_stmt (dump_file, stmt, 0, dump_flags);
773      fprintf (dump_file, "\n");
774    }
775
776  tree lhs = gimple_call_lhs (stmt);
777  if (lhs)
778    {
779      tree ptr = gimple_call_arg (stmt, 0);
780      gimple *new_stmt = gimple_build_assign (lhs, ptr);
781      unlink_stmt_vdef (stmt);
782      if (gsi_replace (gsi, new_stmt, true))
783        bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
784    }
785  else
786    {
787      /* Then we need to fix the operand of the consuming stmt.  */
788      unlink_stmt_vdef (stmt);
789
790      /* Remove the dead store.  */
791      if (gsi_remove (gsi, true))
792	bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
793      release_defs (stmt);
794    }
795}
796
797/* Delete a dead store at GSI, which is a gimple assignment. */
798
799static void
800delete_dead_assignment (gimple_stmt_iterator *gsi)
801{
802  gimple *stmt = gsi_stmt (*gsi);
803  if (dump_file && (dump_flags & TDF_DETAILS))
804    {
805      fprintf (dump_file, "  Deleted dead store: ");
806      print_gimple_stmt (dump_file, stmt, 0, dump_flags);
807      fprintf (dump_file, "\n");
808    }
809
810  /* Then we need to fix the operand of the consuming stmt.  */
811  unlink_stmt_vdef (stmt);
812
813  /* Remove the dead store.  */
814  basic_block bb = gimple_bb (stmt);
815  if (gsi_remove (gsi, true))
816    bitmap_set_bit (need_eh_cleanup, bb->index);
817
818  /* And release any SSA_NAMEs set in this statement back to the
819     SSA_NAME manager.  */
820  release_defs (stmt);
821}
822
823/* Attempt to eliminate dead stores in the statement referenced by BSI.
824
825   A dead store is a store into a memory location which will later be
826   overwritten by another store without any intervening loads.  In this
827   case the earlier store can be deleted.
828
829   In our SSA + virtual operand world we use immediate uses of virtual
830   operands to detect dead stores.  If a store's virtual definition
831   is used precisely once by a later store to the same location which
832   post dominates the first store, then the first store is dead.  */
833
834void
835dse_dom_walker::dse_optimize_stmt (gimple_stmt_iterator *gsi)
836{
837  gimple *stmt = gsi_stmt (*gsi);
838
839  /* If this statement has no virtual defs, then there is nothing
840     to do.  */
841  if (!gimple_vdef (stmt))
842    return;
843
844  /* Don't return early on *this_2(D) ={v} {CLOBBER}.  */
845  if (gimple_has_volatile_ops (stmt)
846      && (!gimple_clobber_p (stmt)
847	  || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
848    return;
849
850  ao_ref ref;
851  if (!initialize_ao_ref_for_dse (stmt, &ref))
852    return;
853
854  /* We know we have virtual definitions.  We can handle assignments and
855     some builtin calls.  */
856  if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
857    {
858      switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
859	{
860	  case BUILT_IN_MEMCPY:
861	  case BUILT_IN_MEMMOVE:
862	  case BUILT_IN_MEMSET:
863	    {
864	      /* Occasionally calls with an explicit length of zero
865		 show up in the IL.  It's pointless to do analysis
866		 on them, they're trivially dead.  */
867	      tree size = gimple_call_arg (stmt, 2);
868	      if (integer_zerop (size))
869		{
870		  delete_dead_call (gsi);
871		  return;
872		}
873
874	      enum dse_store_status store_status;
875	      m_byte_tracking_enabled
876		= setup_live_bytes_from_ref (&ref, m_live_bytes);
877	      store_status = dse_classify_store (&ref, stmt,
878						 m_byte_tracking_enabled,
879						 m_live_bytes);
880	      if (store_status == DSE_STORE_LIVE)
881		return;
882
883	      if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
884		{
885		  maybe_trim_memstar_call (&ref, m_live_bytes, stmt);
886		  return;
887		}
888
889	      if (store_status == DSE_STORE_DEAD)
890		delete_dead_call (gsi);
891	      return;
892	    }
893
894	  default:
895	    return;
896	}
897    }
898
899  if (is_gimple_assign (stmt))
900    {
901      bool by_clobber_p = false;
902
903      /* Self-assignments are zombies.  */
904      if (operand_equal_p (gimple_assign_rhs1 (stmt),
905			   gimple_assign_lhs (stmt), 0))
906	;
907      else
908	{
909	  m_byte_tracking_enabled
910	    = setup_live_bytes_from_ref (&ref, m_live_bytes);
911	  enum dse_store_status store_status;
912	  store_status = dse_classify_store (&ref, stmt,
913					     m_byte_tracking_enabled,
914					     m_live_bytes, &by_clobber_p);
915	  if (store_status == DSE_STORE_LIVE)
916	    return;
917
918	  if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
919	    {
920	      maybe_trim_partially_dead_store (&ref, m_live_bytes, stmt);
921	      return;
922	    }
923	}
924
925      /* Now we know that use_stmt kills the LHS of stmt.  */
926
927      /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
928	 another clobber stmt.  */
929      if (gimple_clobber_p (stmt)
930	  && !by_clobber_p)
931	return;
932
933      delete_dead_assignment (gsi);
934    }
935}
936
937edge
938dse_dom_walker::before_dom_children (basic_block bb)
939{
940  gimple_stmt_iterator gsi;
941
942  for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
943    {
944      dse_optimize_stmt (&gsi);
945      if (gsi_end_p (gsi))
946	gsi = gsi_last_bb (bb);
947      else
948	gsi_prev (&gsi);
949    }
950  return NULL;
951}
952
953namespace {
954
955const pass_data pass_data_dse =
956{
957  GIMPLE_PASS, /* type */
958  "dse", /* name */
959  OPTGROUP_NONE, /* optinfo_flags */
960  TV_TREE_DSE, /* tv_id */
961  ( PROP_cfg | PROP_ssa ), /* properties_required */
962  0, /* properties_provided */
963  0, /* properties_destroyed */
964  0, /* todo_flags_start */
965  0, /* todo_flags_finish */
966};
967
968class pass_dse : public gimple_opt_pass
969{
970public:
971  pass_dse (gcc::context *ctxt)
972    : gimple_opt_pass (pass_data_dse, ctxt)
973  {}
974
975  /* opt_pass methods: */
976  opt_pass * clone () { return new pass_dse (m_ctxt); }
977  virtual bool gate (function *) { return flag_tree_dse != 0; }
978  virtual unsigned int execute (function *);
979
980}; // class pass_dse
981
982unsigned int
983pass_dse::execute (function *fun)
984{
985  need_eh_cleanup = BITMAP_ALLOC (NULL);
986
987  renumber_gimple_stmt_uids (cfun);
988
989  /* We might consider making this a property of each pass so that it
990     can be [re]computed on an as-needed basis.  Particularly since
991     this pass could be seen as an extension of DCE which needs post
992     dominators.  */
993  calculate_dominance_info (CDI_POST_DOMINATORS);
994  calculate_dominance_info (CDI_DOMINATORS);
995
996  /* Dead store elimination is fundamentally a walk of the post-dominator
997     tree and a backwards walk of statements within each block.  */
998  dse_dom_walker (CDI_POST_DOMINATORS).walk (fun->cfg->x_exit_block_ptr);
999
1000  /* Removal of stores may make some EH edges dead.  Purge such edges from
1001     the CFG as needed.  */
1002  if (!bitmap_empty_p (need_eh_cleanup))
1003    {
1004      gimple_purge_all_dead_eh_edges (need_eh_cleanup);
1005      cleanup_tree_cfg ();
1006    }
1007
1008  BITMAP_FREE (need_eh_cleanup);
1009
1010  /* For now, just wipe the post-dominator information.  */
1011  free_dominance_info (CDI_POST_DOMINATORS);
1012  return 0;
1013}
1014
1015} // anon namespace
1016
1017gimple_opt_pass *
1018make_pass_dse (gcc::context *ctxt)
1019{
1020  return new pass_dse (ctxt);
1021}
1022