1/* Forward propagation of expressions for single use variables.
2   Copyright (C) 2004-2015 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 3, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24#include "hash-set.h"
25#include "machmode.h"
26#include "vec.h"
27#include "double-int.h"
28#include "input.h"
29#include "alias.h"
30#include "symtab.h"
31#include "wide-int.h"
32#include "inchash.h"
33#include "tree.h"
34#include "fold-const.h"
35#include "stor-layout.h"
36#include "tm_p.h"
37#include "predict.h"
38#include "hard-reg-set.h"
39#include "function.h"
40#include "dominance.h"
41#include "cfg.h"
42#include "basic-block.h"
43#include "gimple-pretty-print.h"
44#include "tree-ssa-alias.h"
45#include "internal-fn.h"
46#include "gimple-fold.h"
47#include "tree-eh.h"
48#include "gimple-expr.h"
49#include "is-a.h"
50#include "gimple.h"
51#include "gimplify.h"
52#include "gimple-iterator.h"
53#include "gimplify-me.h"
54#include "gimple-ssa.h"
55#include "tree-cfg.h"
56#include "tree-phinodes.h"
57#include "ssa-iterators.h"
58#include "stringpool.h"
59#include "tree-ssanames.h"
60#include "hashtab.h"
61#include "rtl.h"
62#include "flags.h"
63#include "statistics.h"
64#include "real.h"
65#include "fixed-value.h"
66#include "insn-config.h"
67#include "expmed.h"
68#include "dojump.h"
69#include "explow.h"
70#include "calls.h"
71#include "emit-rtl.h"
72#include "varasm.h"
73#include "stmt.h"
74#include "expr.h"
75#include "tree-dfa.h"
76#include "tree-pass.h"
77#include "langhooks.h"
78#include "diagnostic.h"
79#include "cfgloop.h"
80#include "insn-codes.h"
81#include "optabs.h"
82#include "tree-ssa-propagate.h"
83#include "tree-ssa-dom.h"
84#include "builtins.h"
85#include "tree-cfgcleanup.h"
86#include "tree-into-ssa.h"
87#include "cfganal.h"
88
89/* This pass propagates the RHS of assignment statements into use
90   sites of the LHS of the assignment.  It's basically a specialized
91   form of tree combination.   It is hoped all of this can disappear
92   when we have a generalized tree combiner.
93
94   One class of common cases we handle is forward propagating a single use
95   variable into a COND_EXPR.
96
97     bb0:
98       x = a COND b;
99       if (x) goto ... else goto ...
100
101   Will be transformed into:
102
103     bb0:
104       if (a COND b) goto ... else goto ...
105
106   Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
107
108   Or (assuming c1 and c2 are constants):
109
110     bb0:
111       x = a + c1;
112       if (x EQ/NEQ c2) goto ... else goto ...
113
114   Will be transformed into:
115
116     bb0:
117        if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
118
119   Similarly for x = a - c1.
120
121   Or
122
123     bb0:
124       x = !a
125       if (x) goto ... else goto ...
126
127   Will be transformed into:
128
129     bb0:
130        if (a == 0) goto ... else goto ...
131
132   Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
133   For these cases, we propagate A into all, possibly more than one,
134   COND_EXPRs that use X.
135
136   Or
137
138     bb0:
139       x = (typecast) a
140       if (x) goto ... else goto ...
141
142   Will be transformed into:
143
144     bb0:
145        if (a != 0) goto ... else goto ...
146
147   (Assuming a is an integral type and x is a boolean or x is an
148    integral and a is a boolean.)
149
150   Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
151   For these cases, we propagate A into all, possibly more than one,
152   COND_EXPRs that use X.
153
154   In addition to eliminating the variable and the statement which assigns
155   a value to the variable, we may be able to later thread the jump without
156   adding insane complexity in the dominator optimizer.
157
158   Also note these transformations can cascade.  We handle this by having
159   a worklist of COND_EXPR statements to examine.  As we make a change to
160   a statement, we put it back on the worklist to examine on the next
161   iteration of the main loop.
162
163   A second class of propagation opportunities arises for ADDR_EXPR
164   nodes.
165
166     ptr = &x->y->z;
167     res = *ptr;
168
169   Will get turned into
170
171     res = x->y->z;
172
173   Or
174     ptr = (type1*)&type2var;
175     res = *ptr
176
177   Will get turned into (if type1 and type2 are the same size
178   and neither have volatile on them):
179     res = VIEW_CONVERT_EXPR<type1>(type2var)
180
181   Or
182
183     ptr = &x[0];
184     ptr2 = ptr + <constant>;
185
186   Will get turned into
187
188     ptr2 = &x[constant/elementsize];
189
190  Or
191
192     ptr = &x[0];
193     offset = index * element_size;
194     offset_p = (pointer) offset;
195     ptr2 = ptr + offset_p
196
197  Will get turned into:
198
199     ptr2 = &x[index];
200
201  Or
202    ssa = (int) decl
203    res = ssa & 1
204
205  Provided that decl has known alignment >= 2, will get turned into
206
207    res = 0
208
209  We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
210  allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
211  {NOT_EXPR,NEG_EXPR}.
212
213   This will (of course) be extended as other needs arise.  */
214
215static bool forward_propagate_addr_expr (tree, tree, bool);
216
217/* Set to true if we delete dead edges during the optimization.  */
218static bool cfg_changed;
219
220static tree rhs_to_tree (tree type, gimple stmt);
221
222static bitmap to_purge;
223
224/* Const-and-copy lattice.  */
225static vec<tree> lattice;
226
227/* Set the lattice entry for NAME to VAL.  */
228static void
229fwprop_set_lattice_val (tree name, tree val)
230{
231  if (TREE_CODE (name) == SSA_NAME)
232    {
233      if (SSA_NAME_VERSION (name) >= lattice.length ())
234	{
235	  lattice.reserve (num_ssa_names - lattice.length ());
236	  lattice.quick_grow_cleared (num_ssa_names);
237	}
238      lattice[SSA_NAME_VERSION (name)] = val;
239    }
240}
241
242/* Invalidate the lattice entry for NAME, done when releasing SSA names.  */
243static void
244fwprop_invalidate_lattice (tree name)
245{
246  if (name
247      && TREE_CODE (name) == SSA_NAME
248      && SSA_NAME_VERSION (name) < lattice.length ())
249    lattice[SSA_NAME_VERSION (name)] = NULL_TREE;
250}
251
252
253/* Get the statement we can propagate from into NAME skipping
254   trivial copies.  Returns the statement which defines the
255   propagation source or NULL_TREE if there is no such one.
256   If SINGLE_USE_ONLY is set considers only sources which have
257   a single use chain up to NAME.  If SINGLE_USE_P is non-null,
258   it is set to whether the chain to NAME is a single use chain
259   or not.  SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set.  */
260
261static gimple
262get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
263{
264  bool single_use = true;
265
266  do {
267    gimple def_stmt = SSA_NAME_DEF_STMT (name);
268
269    if (!has_single_use (name))
270      {
271	single_use = false;
272	if (single_use_only)
273	  return NULL;
274      }
275
276    /* If name is defined by a PHI node or is the default def, bail out.  */
277    if (!is_gimple_assign (def_stmt))
278      return NULL;
279
280    /* If def_stmt is a simple copy, continue looking.  */
281    if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
282      name = gimple_assign_rhs1 (def_stmt);
283    else
284      {
285	if (!single_use_only && single_use_p)
286	  *single_use_p = single_use;
287
288	return def_stmt;
289      }
290  } while (1);
291}
292
293/* Checks if the destination ssa name in DEF_STMT can be used as
294   propagation source.  Returns true if so, otherwise false.  */
295
296static bool
297can_propagate_from (gimple def_stmt)
298{
299  gcc_assert (is_gimple_assign (def_stmt));
300
301  /* If the rhs has side-effects we cannot propagate from it.  */
302  if (gimple_has_volatile_ops (def_stmt))
303    return false;
304
305  /* If the rhs is a load we cannot propagate from it.  */
306  if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
307      || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
308    return false;
309
310  /* Constants can be always propagated.  */
311  if (gimple_assign_single_p (def_stmt)
312      && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
313    return true;
314
315  /* We cannot propagate ssa names that occur in abnormal phi nodes.  */
316  if (stmt_references_abnormal_ssa_name (def_stmt))
317    return false;
318
319  /* If the definition is a conversion of a pointer to a function type,
320     then we can not apply optimizations as some targets require
321     function pointers to be canonicalized and in this case this
322     optimization could eliminate a necessary canonicalization.  */
323  if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
324    {
325      tree rhs = gimple_assign_rhs1 (def_stmt);
326      if (POINTER_TYPE_P (TREE_TYPE (rhs))
327          && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
328        return false;
329    }
330
331  return true;
332}
333
334/* Remove a chain of dead statements starting at the definition of
335   NAME.  The chain is linked via the first operand of the defining statements.
336   If NAME was replaced in its only use then this function can be used
337   to clean up dead stmts.  The function handles already released SSA
338   names gracefully.
339   Returns true if cleanup-cfg has to run.  */
340
341static bool
342remove_prop_source_from_use (tree name)
343{
344  gimple_stmt_iterator gsi;
345  gimple stmt;
346  bool cfg_changed = false;
347
348  do {
349    basic_block bb;
350
351    if (SSA_NAME_IN_FREE_LIST (name)
352	|| SSA_NAME_IS_DEFAULT_DEF (name)
353	|| !has_zero_uses (name))
354      return cfg_changed;
355
356    stmt = SSA_NAME_DEF_STMT (name);
357    if (gimple_code (stmt) == GIMPLE_PHI
358	|| gimple_has_side_effects (stmt))
359      return cfg_changed;
360
361    bb = gimple_bb (stmt);
362    gsi = gsi_for_stmt (stmt);
363    unlink_stmt_vdef (stmt);
364    if (gsi_remove (&gsi, true))
365      bitmap_set_bit (to_purge, bb->index);
366    fwprop_invalidate_lattice (gimple_get_lhs (stmt));
367    release_defs (stmt);
368
369    name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
370  } while (name && TREE_CODE (name) == SSA_NAME);
371
372  return cfg_changed;
373}
374
375/* Return the rhs of a gassign *STMT in a form of a single tree,
376   converted to type TYPE.
377
378   This should disappear, but is needed so we can combine expressions and use
379   the fold() interfaces. Long term, we need to develop folding and combine
380   routines that deal with gimple exclusively . */
381
382static tree
383rhs_to_tree (tree type, gimple stmt)
384{
385  location_t loc = gimple_location (stmt);
386  enum tree_code code = gimple_assign_rhs_code (stmt);
387  if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
388    return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
389			    gimple_assign_rhs2 (stmt),
390			    gimple_assign_rhs3 (stmt));
391  else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
392    return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
393			gimple_assign_rhs2 (stmt));
394  else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
395    return build1 (code, type, gimple_assign_rhs1 (stmt));
396  else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
397    return gimple_assign_rhs1 (stmt);
398  else
399    gcc_unreachable ();
400}
401
402/* Combine OP0 CODE OP1 in the context of a COND_EXPR.  Returns
403   the folded result in a form suitable for COND_EXPR_COND or
404   NULL_TREE, if there is no suitable simplified form.  If
405   INVARIANT_ONLY is true only gimple_min_invariant results are
406   considered simplified.  */
407
408static tree
409combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
410			tree op0, tree op1, bool invariant_only)
411{
412  tree t;
413
414  gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
415
416  fold_defer_overflow_warnings ();
417  t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
418  if (!t)
419    {
420      fold_undefer_overflow_warnings (false, NULL, 0);
421      return NULL_TREE;
422    }
423
424  /* Require that we got a boolean type out if we put one in.  */
425  gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
426
427  /* Canonicalize the combined condition for use in a COND_EXPR.  */
428  t = canonicalize_cond_expr_cond (t);
429
430  /* Bail out if we required an invariant but didn't get one.  */
431  if (!t || (invariant_only && !is_gimple_min_invariant (t)))
432    {
433      fold_undefer_overflow_warnings (false, NULL, 0);
434      return NULL_TREE;
435    }
436
437  fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
438
439  return t;
440}
441
442/* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
443   of its operand.  Return a new comparison tree or NULL_TREE if there
444   were no simplifying combines.  */
445
446static tree
447forward_propagate_into_comparison_1 (gimple stmt,
448				     enum tree_code code, tree type,
449				     tree op0, tree op1)
450{
451  tree tmp = NULL_TREE;
452  tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
453  bool single_use0_p = false, single_use1_p = false;
454
455  /* For comparisons use the first operand, that is likely to
456     simplify comparisons against constants.  */
457  if (TREE_CODE (op0) == SSA_NAME)
458    {
459      gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
460      if (def_stmt && can_propagate_from (def_stmt))
461	{
462	  enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
463	  bool invariant_only_p = !single_use0_p;
464
465	  rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
466
467	  /* Always combine comparisons or conversions from booleans.  */
468	  if (TREE_CODE (op1) == INTEGER_CST
469	      && ((CONVERT_EXPR_CODE_P (def_code)
470		   && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0, 0)))
471		      == BOOLEAN_TYPE)
472		  || TREE_CODE_CLASS (def_code) == tcc_comparison))
473	    invariant_only_p = false;
474
475	  tmp = combine_cond_expr_cond (stmt, code, type,
476					rhs0, op1, invariant_only_p);
477	  if (tmp)
478	    return tmp;
479	}
480    }
481
482  /* If that wasn't successful, try the second operand.  */
483  if (TREE_CODE (op1) == SSA_NAME)
484    {
485      gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
486      if (def_stmt && can_propagate_from (def_stmt))
487	{
488	  rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
489	  tmp = combine_cond_expr_cond (stmt, code, type,
490					op0, rhs1, !single_use1_p);
491	  if (tmp)
492	    return tmp;
493	}
494    }
495
496  /* If that wasn't successful either, try both operands.  */
497  if (rhs0 != NULL_TREE
498      && rhs1 != NULL_TREE)
499    tmp = combine_cond_expr_cond (stmt, code, type,
500				  rhs0, rhs1,
501				  !(single_use0_p && single_use1_p));
502
503  return tmp;
504}
505
506/* Propagate from the ssa name definition statements of the assignment
507   from a comparison at *GSI into the conditional if that simplifies it.
508   Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
509   otherwise returns 0.  */
510
511static int
512forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
513{
514  gimple stmt = gsi_stmt (*gsi);
515  tree tmp;
516  bool cfg_changed = false;
517  tree type = TREE_TYPE (gimple_assign_lhs (stmt));
518  tree rhs1 = gimple_assign_rhs1 (stmt);
519  tree rhs2 = gimple_assign_rhs2 (stmt);
520
521  /* Combine the comparison with defining statements.  */
522  tmp = forward_propagate_into_comparison_1 (stmt,
523					     gimple_assign_rhs_code (stmt),
524					     type, rhs1, rhs2);
525  if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
526    {
527      gimple_assign_set_rhs_from_tree (gsi, tmp);
528      fold_stmt (gsi);
529      update_stmt (gsi_stmt (*gsi));
530
531      if (TREE_CODE (rhs1) == SSA_NAME)
532	cfg_changed |= remove_prop_source_from_use (rhs1);
533      if (TREE_CODE (rhs2) == SSA_NAME)
534	cfg_changed |= remove_prop_source_from_use (rhs2);
535      return cfg_changed ? 2 : 1;
536    }
537
538  return 0;
539}
540
541/* Propagate from the ssa name definition statements of COND_EXPR
542   in GIMPLE_COND statement STMT into the conditional if that simplifies it.
543   Returns zero if no statement was changed, one if there were
544   changes and two if cfg_cleanup needs to run.
545
546   This must be kept in sync with forward_propagate_into_cond.  */
547
548static int
549forward_propagate_into_gimple_cond (gcond *stmt)
550{
551  tree tmp;
552  enum tree_code code = gimple_cond_code (stmt);
553  bool cfg_changed = false;
554  tree rhs1 = gimple_cond_lhs (stmt);
555  tree rhs2 = gimple_cond_rhs (stmt);
556
557  /* We can do tree combining on SSA_NAME and comparison expressions.  */
558  if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
559    return 0;
560
561  tmp = forward_propagate_into_comparison_1 (stmt, code,
562					     boolean_type_node,
563					     rhs1, rhs2);
564  if (tmp)
565    {
566      if (dump_file && tmp)
567	{
568	  fprintf (dump_file, "  Replaced '");
569	  print_gimple_expr (dump_file, stmt, 0, 0);
570	  fprintf (dump_file, "' with '");
571	  print_generic_expr (dump_file, tmp, 0);
572	  fprintf (dump_file, "'\n");
573	}
574
575      gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
576      update_stmt (stmt);
577
578      if (TREE_CODE (rhs1) == SSA_NAME)
579	cfg_changed |= remove_prop_source_from_use (rhs1);
580      if (TREE_CODE (rhs2) == SSA_NAME)
581	cfg_changed |= remove_prop_source_from_use (rhs2);
582      return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
583    }
584
585  /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges.  */
586  if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
587       || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
588	   && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
589      && ((code == EQ_EXPR
590	   && integer_zerop (rhs2))
591	  || (code == NE_EXPR
592	      && integer_onep (rhs2))))
593    {
594      basic_block bb = gimple_bb (stmt);
595      gimple_cond_set_code (stmt, NE_EXPR);
596      gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
597      EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
598      EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
599      return 1;
600    }
601
602  return 0;
603}
604
605
606/* Propagate from the ssa name definition statements of COND_EXPR
607   in the rhs of statement STMT into the conditional if that simplifies it.
608   Returns true zero if the stmt was changed.  */
609
610static bool
611forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
612{
613  gimple stmt = gsi_stmt (*gsi_p);
614  tree tmp = NULL_TREE;
615  tree cond = gimple_assign_rhs1 (stmt);
616  enum tree_code code = gimple_assign_rhs_code (stmt);
617
618  /* We can do tree combining on SSA_NAME and comparison expressions.  */
619  if (COMPARISON_CLASS_P (cond))
620    tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
621					       TREE_TYPE (cond),
622					       TREE_OPERAND (cond, 0),
623					       TREE_OPERAND (cond, 1));
624  else if (TREE_CODE (cond) == SSA_NAME)
625    {
626      enum tree_code def_code;
627      tree name = cond;
628      gimple def_stmt = get_prop_source_stmt (name, true, NULL);
629      if (!def_stmt || !can_propagate_from (def_stmt))
630	return 0;
631
632      def_code = gimple_assign_rhs_code (def_stmt);
633      if (TREE_CODE_CLASS (def_code) == tcc_comparison)
634	tmp = fold_build2_loc (gimple_location (def_stmt),
635			       def_code,
636			       TREE_TYPE (cond),
637			       gimple_assign_rhs1 (def_stmt),
638			       gimple_assign_rhs2 (def_stmt));
639    }
640
641  if (tmp
642      && is_gimple_condexpr (tmp))
643    {
644      if (dump_file && tmp)
645	{
646	  fprintf (dump_file, "  Replaced '");
647	  print_generic_expr (dump_file, cond, 0);
648	  fprintf (dump_file, "' with '");
649	  print_generic_expr (dump_file, tmp, 0);
650	  fprintf (dump_file, "'\n");
651	}
652
653      if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
654				  : integer_onep (tmp))
655	gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
656      else if (integer_zerop (tmp))
657	gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
658      else
659	gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
660      stmt = gsi_stmt (*gsi_p);
661      update_stmt (stmt);
662
663      return true;
664    }
665
666  return 0;
667}
668
669/* We've just substituted an ADDR_EXPR into stmt.  Update all the
670   relevant data structures to match.  */
671
672static void
673tidy_after_forward_propagate_addr (gimple stmt)
674{
675  /* We may have turned a trapping insn into a non-trapping insn.  */
676  if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
677    bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
678
679  if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
680     recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
681}
682
683/* NAME is a SSA_NAME representing DEF_RHS which is of the form
684   ADDR_EXPR <whatever>.
685
686   Try to forward propagate the ADDR_EXPR into the use USE_STMT.
687   Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
688   node or for recovery of array indexing from pointer arithmetic.
689
690   Return true if the propagation was successful (the propagation can
691   be not totally successful, yet things may have been changed).  */
692
693static bool
694forward_propagate_addr_expr_1 (tree name, tree def_rhs,
695			       gimple_stmt_iterator *use_stmt_gsi,
696			       bool single_use_p)
697{
698  tree lhs, rhs, rhs2, array_ref;
699  gimple use_stmt = gsi_stmt (*use_stmt_gsi);
700  enum tree_code rhs_code;
701  bool res = true;
702
703  gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
704
705  lhs = gimple_assign_lhs (use_stmt);
706  rhs_code = gimple_assign_rhs_code (use_stmt);
707  rhs = gimple_assign_rhs1 (use_stmt);
708
709  /* Do not perform copy-propagation but recurse through copy chains.  */
710  if (TREE_CODE (lhs) == SSA_NAME
711      && rhs_code == SSA_NAME)
712    return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
713
714  /* The use statement could be a conversion.  Recurse to the uses of the
715     lhs as copyprop does not copy through pointer to integer to pointer
716     conversions and FRE does not catch all cases either.
717     Treat the case of a single-use name and
718     a conversion to def_rhs type separate, though.  */
719  if (TREE_CODE (lhs) == SSA_NAME
720      && CONVERT_EXPR_CODE_P (rhs_code))
721    {
722      /* If there is a point in a conversion chain where the types match
723         so we can remove a conversion re-materialize the address here
724	 and stop.  */
725      if (single_use_p
726	  && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
727	{
728	  gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
729	  gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
730	  return true;
731	}
732
733      /* Else recurse if the conversion preserves the address value.  */
734      if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
735	   || POINTER_TYPE_P (TREE_TYPE (lhs)))
736	  && (TYPE_PRECISION (TREE_TYPE (lhs))
737	      >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
738	return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
739
740      return false;
741    }
742
743  /* If this isn't a conversion chain from this on we only can propagate
744     into compatible pointer contexts.  */
745  if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
746    return false;
747
748  /* Propagate through constant pointer adjustments.  */
749  if (TREE_CODE (lhs) == SSA_NAME
750      && rhs_code == POINTER_PLUS_EXPR
751      && rhs == name
752      && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
753    {
754      tree new_def_rhs;
755      /* As we come here with non-invariant addresses in def_rhs we need
756         to make sure we can build a valid constant offsetted address
757	 for further propagation.  Simply rely on fold building that
758	 and check after the fact.  */
759      new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
760				 def_rhs,
761				 fold_convert (ptr_type_node,
762					       gimple_assign_rhs2 (use_stmt)));
763      if (TREE_CODE (new_def_rhs) == MEM_REF
764	  && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
765	return false;
766      new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
767						    TREE_TYPE (rhs));
768
769      /* Recurse.  If we could propagate into all uses of lhs do not
770	 bother to replace into the current use but just pretend we did.  */
771      if (TREE_CODE (new_def_rhs) == ADDR_EXPR
772	  && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
773	return true;
774
775      if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
776	gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
777					new_def_rhs);
778      else if (is_gimple_min_invariant (new_def_rhs))
779	gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, new_def_rhs);
780      else
781	return false;
782      gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
783      update_stmt (use_stmt);
784      return true;
785    }
786
787  /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
788     ADDR_EXPR will not appear on the LHS.  */
789  tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
790  while (handled_component_p (*lhsp))
791    lhsp = &TREE_OPERAND (*lhsp, 0);
792  lhs = *lhsp;
793
794  /* Now see if the LHS node is a MEM_REF using NAME.  If so,
795     propagate the ADDR_EXPR into the use of NAME and fold the result.  */
796  if (TREE_CODE (lhs) == MEM_REF
797      && TREE_OPERAND (lhs, 0) == name)
798    {
799      tree def_rhs_base;
800      HOST_WIDE_INT def_rhs_offset;
801      /* If the address is invariant we can always fold it.  */
802      if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
803							 &def_rhs_offset)))
804	{
805	  offset_int off = mem_ref_offset (lhs);
806	  tree new_ptr;
807	  off += def_rhs_offset;
808	  if (TREE_CODE (def_rhs_base) == MEM_REF)
809	    {
810	      off += mem_ref_offset (def_rhs_base);
811	      new_ptr = TREE_OPERAND (def_rhs_base, 0);
812	    }
813	  else
814	    new_ptr = build_fold_addr_expr (def_rhs_base);
815	  TREE_OPERAND (lhs, 0) = new_ptr;
816	  TREE_OPERAND (lhs, 1)
817	    = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
818	  tidy_after_forward_propagate_addr (use_stmt);
819	  /* Continue propagating into the RHS if this was not the only use.  */
820	  if (single_use_p)
821	    return true;
822	}
823      /* If the LHS is a plain dereference and the value type is the same as
824         that of the pointed-to type of the address we can put the
825	 dereferenced address on the LHS preserving the original alias-type.  */
826      else if (integer_zerop (TREE_OPERAND (lhs, 1))
827	       && ((gimple_assign_lhs (use_stmt) == lhs
828		    && useless_type_conversion_p
829		         (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
830		          TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
831		   || types_compatible_p (TREE_TYPE (lhs),
832					  TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
833	       /* Don't forward anything into clobber stmts if it would result
834		  in the lhs no longer being a MEM_REF.  */
835	       && (!gimple_clobber_p (use_stmt)
836		   || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
837	{
838	  tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
839	  tree new_offset, new_base, saved, new_lhs;
840	  while (handled_component_p (*def_rhs_basep))
841	    def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
842	  saved = *def_rhs_basep;
843	  if (TREE_CODE (*def_rhs_basep) == MEM_REF)
844	    {
845	      new_base = TREE_OPERAND (*def_rhs_basep, 0);
846	      new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
847					 TREE_OPERAND (*def_rhs_basep, 1));
848	    }
849	  else
850	    {
851	      new_base = build_fold_addr_expr (*def_rhs_basep);
852	      new_offset = TREE_OPERAND (lhs, 1);
853	    }
854	  *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
855				   new_base, new_offset);
856	  TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
857	  TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
858	  TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
859	  new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
860	  *lhsp = new_lhs;
861	  TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
862	  TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
863	  *def_rhs_basep = saved;
864	  tidy_after_forward_propagate_addr (use_stmt);
865	  /* Continue propagating into the RHS if this was not the
866	     only use.  */
867	  if (single_use_p)
868	    return true;
869	}
870      else
871	/* We can have a struct assignment dereferencing our name twice.
872	   Note that we didn't propagate into the lhs to not falsely
873	   claim we did when propagating into the rhs.  */
874	res = false;
875    }
876
877  /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
878     nodes from the RHS.  */
879  tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
880  if (TREE_CODE (*rhsp) == ADDR_EXPR)
881    rhsp = &TREE_OPERAND (*rhsp, 0);
882  while (handled_component_p (*rhsp))
883    rhsp = &TREE_OPERAND (*rhsp, 0);
884  rhs = *rhsp;
885
886  /* Now see if the RHS node is a MEM_REF using NAME.  If so,
887     propagate the ADDR_EXPR into the use of NAME and fold the result.  */
888  if (TREE_CODE (rhs) == MEM_REF
889      && TREE_OPERAND (rhs, 0) == name)
890    {
891      tree def_rhs_base;
892      HOST_WIDE_INT def_rhs_offset;
893      if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
894							 &def_rhs_offset)))
895	{
896	  offset_int off = mem_ref_offset (rhs);
897	  tree new_ptr;
898	  off += def_rhs_offset;
899	  if (TREE_CODE (def_rhs_base) == MEM_REF)
900	    {
901	      off += mem_ref_offset (def_rhs_base);
902	      new_ptr = TREE_OPERAND (def_rhs_base, 0);
903	    }
904	  else
905	    new_ptr = build_fold_addr_expr (def_rhs_base);
906	  TREE_OPERAND (rhs, 0) = new_ptr;
907	  TREE_OPERAND (rhs, 1)
908	    = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
909	  fold_stmt_inplace (use_stmt_gsi);
910	  tidy_after_forward_propagate_addr (use_stmt);
911	  return res;
912	}
913      /* If the RHS is a plain dereference and the value type is the same as
914         that of the pointed-to type of the address we can put the
915	 dereferenced address on the RHS preserving the original alias-type.  */
916      else if (integer_zerop (TREE_OPERAND (rhs, 1))
917	       && ((gimple_assign_rhs1 (use_stmt) == rhs
918		    && useless_type_conversion_p
919		         (TREE_TYPE (gimple_assign_lhs (use_stmt)),
920		          TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
921		   || types_compatible_p (TREE_TYPE (rhs),
922					  TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
923	{
924	  tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
925	  tree new_offset, new_base, saved, new_rhs;
926	  while (handled_component_p (*def_rhs_basep))
927	    def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
928	  saved = *def_rhs_basep;
929	  if (TREE_CODE (*def_rhs_basep) == MEM_REF)
930	    {
931	      new_base = TREE_OPERAND (*def_rhs_basep, 0);
932	      new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
933					 TREE_OPERAND (*def_rhs_basep, 1));
934	    }
935	  else
936	    {
937	      new_base = build_fold_addr_expr (*def_rhs_basep);
938	      new_offset = TREE_OPERAND (rhs, 1);
939	    }
940	  *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
941				   new_base, new_offset);
942	  TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
943	  TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
944	  TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
945	  new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
946	  *rhsp = new_rhs;
947	  TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
948	  TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
949	  *def_rhs_basep = saved;
950	  fold_stmt_inplace (use_stmt_gsi);
951	  tidy_after_forward_propagate_addr (use_stmt);
952	  return res;
953	}
954    }
955
956  /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
957     is nothing to do. */
958  if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
959      || gimple_assign_rhs1 (use_stmt) != name)
960    return false;
961
962  /* The remaining cases are all for turning pointer arithmetic into
963     array indexing.  They only apply when we have the address of
964     element zero in an array.  If that is not the case then there
965     is nothing to do.  */
966  array_ref = TREE_OPERAND (def_rhs, 0);
967  if ((TREE_CODE (array_ref) != ARRAY_REF
968       || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
969       || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
970      && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
971    return false;
972
973  rhs2 = gimple_assign_rhs2 (use_stmt);
974  /* Optimize &x[C1] p+ C2 to  &x p+ C3 with C3 = C1 * element_size + C2.  */
975  if (TREE_CODE (rhs2) == INTEGER_CST)
976    {
977      tree new_rhs = build1_loc (gimple_location (use_stmt),
978				 ADDR_EXPR, TREE_TYPE (def_rhs),
979				 fold_build2 (MEM_REF,
980					      TREE_TYPE (TREE_TYPE (def_rhs)),
981					      unshare_expr (def_rhs),
982					      fold_convert (ptr_type_node,
983							    rhs2)));
984      gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
985      use_stmt = gsi_stmt (*use_stmt_gsi);
986      update_stmt (use_stmt);
987      tidy_after_forward_propagate_addr (use_stmt);
988      return true;
989    }
990
991  return false;
992}
993
994/* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
995
996   Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
997   Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
998   node or for recovery of array indexing from pointer arithmetic.
999
1000   PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
1001   the single use in the previous invocation.  Pass true when calling
1002   this as toplevel.
1003
1004   Returns true, if all uses have been propagated into.  */
1005
1006static bool
1007forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
1008{
1009  imm_use_iterator iter;
1010  gimple use_stmt;
1011  bool all = true;
1012  bool single_use_p = parent_single_use_p && has_single_use (name);
1013
1014  FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1015    {
1016      bool result;
1017      tree use_rhs;
1018
1019      /* If the use is not in a simple assignment statement, then
1020	 there is nothing we can do.  */
1021      if (!is_gimple_assign (use_stmt))
1022	{
1023	  if (!is_gimple_debug (use_stmt))
1024	    all = false;
1025	  continue;
1026	}
1027
1028      gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1029      result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1030					      single_use_p);
1031      /* If the use has moved to a different statement adjust
1032	 the update machinery for the old statement too.  */
1033      if (use_stmt != gsi_stmt (gsi))
1034	{
1035	  update_stmt (use_stmt);
1036	  use_stmt = gsi_stmt (gsi);
1037	}
1038      update_stmt (use_stmt);
1039      all &= result;
1040
1041      /* Remove intermediate now unused copy and conversion chains.  */
1042      use_rhs = gimple_assign_rhs1 (use_stmt);
1043      if (result
1044	  && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1045	  && TREE_CODE (use_rhs) == SSA_NAME
1046	  && has_zero_uses (gimple_assign_lhs (use_stmt)))
1047	{
1048	  gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1049	  fwprop_invalidate_lattice (gimple_get_lhs (use_stmt));
1050	  release_defs (use_stmt);
1051	  gsi_remove (&gsi, true);
1052	}
1053    }
1054
1055  return all && has_zero_uses (name);
1056}
1057
1058
1059/* Helper function for simplify_gimple_switch.  Remove case labels that
1060   have values outside the range of the new type.  */
1061
1062static void
1063simplify_gimple_switch_label_vec (gswitch *stmt, tree index_type)
1064{
1065  unsigned int branch_num = gimple_switch_num_labels (stmt);
1066  auto_vec<tree> labels (branch_num);
1067  unsigned int i, len;
1068
1069  /* Collect the existing case labels in a VEC, and preprocess it as if
1070     we are gimplifying a GENERIC SWITCH_EXPR.  */
1071  for (i = 1; i < branch_num; i++)
1072    labels.quick_push (gimple_switch_label (stmt, i));
1073  preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1074
1075  /* If any labels were removed, replace the existing case labels
1076     in the GIMPLE_SWITCH statement with the correct ones.
1077     Note that the type updates were done in-place on the case labels,
1078     so we only have to replace the case labels in the GIMPLE_SWITCH
1079     if the number of labels changed.  */
1080  len = labels.length ();
1081  if (len < branch_num - 1)
1082    {
1083      bitmap target_blocks;
1084      edge_iterator ei;
1085      edge e;
1086
1087      /* Corner case: *all* case labels have been removed as being
1088	 out-of-range for INDEX_TYPE.  Push one label and let the
1089	 CFG cleanups deal with this further.  */
1090      if (len == 0)
1091	{
1092	  tree label, elt;
1093
1094	  label = CASE_LABEL (gimple_switch_default_label (stmt));
1095	  elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1096	  labels.quick_push (elt);
1097	  len = 1;
1098	}
1099
1100      for (i = 0; i < labels.length (); i++)
1101	gimple_switch_set_label (stmt, i + 1, labels[i]);
1102      for (i++ ; i < branch_num; i++)
1103	gimple_switch_set_label (stmt, i, NULL_TREE);
1104      gimple_switch_set_num_labels (stmt, len + 1);
1105
1106      /* Cleanup any edges that are now dead.  */
1107      target_blocks = BITMAP_ALLOC (NULL);
1108      for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1109	{
1110	  tree elt = gimple_switch_label (stmt, i);
1111	  basic_block target = label_to_block (CASE_LABEL (elt));
1112	  bitmap_set_bit (target_blocks, target->index);
1113	}
1114      for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1115	{
1116	  if (! bitmap_bit_p (target_blocks, e->dest->index))
1117	    {
1118	      remove_edge (e);
1119	      cfg_changed = true;
1120	      free_dominance_info (CDI_DOMINATORS);
1121	    }
1122	  else
1123	    ei_next (&ei);
1124	}
1125      BITMAP_FREE (target_blocks);
1126    }
1127}
1128
1129/* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1130   the condition which we may be able to optimize better.  */
1131
1132static bool
1133simplify_gimple_switch (gswitch *stmt)
1134{
1135  /* The optimization that we really care about is removing unnecessary
1136     casts.  That will let us do much better in propagating the inferred
1137     constant at the switch target.  */
1138  tree cond = gimple_switch_index (stmt);
1139  if (TREE_CODE (cond) == SSA_NAME)
1140    {
1141      gimple def_stmt = SSA_NAME_DEF_STMT (cond);
1142      if (gimple_assign_cast_p (def_stmt))
1143	{
1144	  tree def = gimple_assign_rhs1 (def_stmt);
1145	  if (TREE_CODE (def) != SSA_NAME)
1146	    return false;
1147
1148	  /* If we have an extension or sign-change that preserves the
1149	     values we check against then we can copy the source value into
1150	     the switch.  */
1151	  tree ti = TREE_TYPE (def);
1152	  if (INTEGRAL_TYPE_P (ti)
1153	      && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
1154	    {
1155	      size_t n = gimple_switch_num_labels (stmt);
1156	      tree min = NULL_TREE, max = NULL_TREE;
1157	      if (n > 1)
1158		{
1159		  min = CASE_LOW (gimple_switch_label (stmt, 1));
1160		  if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1161		    max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1162		  else
1163		    max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1164		}
1165	      if ((!min || int_fits_type_p (min, ti))
1166		  && (!max || int_fits_type_p (max, ti)))
1167		{
1168		  gimple_switch_set_index (stmt, def);
1169		  simplify_gimple_switch_label_vec (stmt, ti);
1170		  update_stmt (stmt);
1171		  return true;
1172		}
1173	    }
1174	}
1175    }
1176
1177  return false;
1178}
1179
1180/* For pointers p2 and p1 return p2 - p1 if the
1181   difference is known and constant, otherwise return NULL.  */
1182
1183static tree
1184constant_pointer_difference (tree p1, tree p2)
1185{
1186  int i, j;
1187#define CPD_ITERATIONS 5
1188  tree exps[2][CPD_ITERATIONS];
1189  tree offs[2][CPD_ITERATIONS];
1190  int cnt[2];
1191
1192  for (i = 0; i < 2; i++)
1193    {
1194      tree p = i ? p1 : p2;
1195      tree off = size_zero_node;
1196      gimple stmt;
1197      enum tree_code code;
1198
1199      /* For each of p1 and p2 we need to iterate at least
1200	 twice, to handle ADDR_EXPR directly in p1/p2,
1201	 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1202	 on definition's stmt RHS.  Iterate a few extra times.  */
1203      j = 0;
1204      do
1205	{
1206	  if (!POINTER_TYPE_P (TREE_TYPE (p)))
1207	    break;
1208	  if (TREE_CODE (p) == ADDR_EXPR)
1209	    {
1210	      tree q = TREE_OPERAND (p, 0);
1211	      HOST_WIDE_INT offset;
1212	      tree base = get_addr_base_and_unit_offset (q, &offset);
1213	      if (base)
1214		{
1215		  q = base;
1216		  if (offset)
1217		    off = size_binop (PLUS_EXPR, off, size_int (offset));
1218		}
1219	      if (TREE_CODE (q) == MEM_REF
1220		  && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1221		{
1222		  p = TREE_OPERAND (q, 0);
1223		  off = size_binop (PLUS_EXPR, off,
1224				    wide_int_to_tree (sizetype,
1225						      mem_ref_offset (q)));
1226		}
1227	      else
1228		{
1229		  exps[i][j] = q;
1230		  offs[i][j++] = off;
1231		  break;
1232		}
1233	    }
1234	  if (TREE_CODE (p) != SSA_NAME)
1235	    break;
1236	  exps[i][j] = p;
1237	  offs[i][j++] = off;
1238	  if (j == CPD_ITERATIONS)
1239	    break;
1240	  stmt = SSA_NAME_DEF_STMT (p);
1241	  if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1242	    break;
1243	  code = gimple_assign_rhs_code (stmt);
1244	  if (code == POINTER_PLUS_EXPR)
1245	    {
1246	      if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1247		break;
1248	      off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1249	      p = gimple_assign_rhs1 (stmt);
1250	    }
1251	  else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
1252	    p = gimple_assign_rhs1 (stmt);
1253	  else
1254	    break;
1255	}
1256      while (1);
1257      cnt[i] = j;
1258    }
1259
1260  for (i = 0; i < cnt[0]; i++)
1261    for (j = 0; j < cnt[1]; j++)
1262      if (exps[0][i] == exps[1][j])
1263	return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1264
1265  return NULL_TREE;
1266}
1267
1268/* *GSI_P is a GIMPLE_CALL to a builtin function.
1269   Optimize
1270   memcpy (p, "abcd", 4);
1271   memset (p + 4, ' ', 3);
1272   into
1273   memcpy (p, "abcd   ", 7);
1274   call if the latter can be stored by pieces during expansion.  */
1275
1276static bool
1277simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1278{
1279  gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1280  tree vuse = gimple_vuse (stmt2);
1281  if (vuse == NULL)
1282    return false;
1283  stmt1 = SSA_NAME_DEF_STMT (vuse);
1284
1285  switch (DECL_FUNCTION_CODE (callee2))
1286    {
1287    case BUILT_IN_MEMSET:
1288      if (gimple_call_num_args (stmt2) != 3
1289	  || gimple_call_lhs (stmt2)
1290	  || CHAR_BIT != 8
1291	  || BITS_PER_UNIT != 8)
1292	break;
1293      else
1294	{
1295	  tree callee1;
1296	  tree ptr1, src1, str1, off1, len1, lhs1;
1297	  tree ptr2 = gimple_call_arg (stmt2, 0);
1298	  tree val2 = gimple_call_arg (stmt2, 1);
1299	  tree len2 = gimple_call_arg (stmt2, 2);
1300	  tree diff, vdef, new_str_cst;
1301	  gimple use_stmt;
1302	  unsigned int ptr1_align;
1303	  unsigned HOST_WIDE_INT src_len;
1304	  char *src_buf;
1305	  use_operand_p use_p;
1306
1307	  if (!tree_fits_shwi_p (val2)
1308	      || !tree_fits_uhwi_p (len2)
1309	      || compare_tree_int (len2, 1024) == 1)
1310	    break;
1311	  if (is_gimple_call (stmt1))
1312	    {
1313	      /* If first stmt is a call, it needs to be memcpy
1314		 or mempcpy, with string literal as second argument and
1315		 constant length.  */
1316	      callee1 = gimple_call_fndecl (stmt1);
1317	      if (callee1 == NULL_TREE
1318		  || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1319		  || gimple_call_num_args (stmt1) != 3)
1320		break;
1321	      if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1322		  && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1323		break;
1324	      ptr1 = gimple_call_arg (stmt1, 0);
1325	      src1 = gimple_call_arg (stmt1, 1);
1326	      len1 = gimple_call_arg (stmt1, 2);
1327	      lhs1 = gimple_call_lhs (stmt1);
1328	      if (!tree_fits_uhwi_p (len1))
1329		break;
1330	      str1 = string_constant (src1, &off1);
1331	      if (str1 == NULL_TREE)
1332		break;
1333	      if (!tree_fits_uhwi_p (off1)
1334		  || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1335		  || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1336					     - tree_to_uhwi (off1)) > 0
1337		  || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1338		  || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1339		     != TYPE_MODE (char_type_node))
1340		break;
1341	    }
1342	  else if (gimple_assign_single_p (stmt1))
1343	    {
1344	      /* Otherwise look for length 1 memcpy optimized into
1345		 assignment.  */
1346    	      ptr1 = gimple_assign_lhs (stmt1);
1347	      src1 = gimple_assign_rhs1 (stmt1);
1348	      if (TREE_CODE (ptr1) != MEM_REF
1349		  || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1350		  || !tree_fits_shwi_p (src1))
1351		break;
1352	      ptr1 = build_fold_addr_expr (ptr1);
1353	      callee1 = NULL_TREE;
1354	      len1 = size_one_node;
1355	      lhs1 = NULL_TREE;
1356	      off1 = size_zero_node;
1357	      str1 = NULL_TREE;
1358	    }
1359	  else
1360	    break;
1361
1362	  diff = constant_pointer_difference (ptr1, ptr2);
1363	  if (diff == NULL && lhs1 != NULL)
1364	    {
1365	      diff = constant_pointer_difference (lhs1, ptr2);
1366	      if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1367		  && diff != NULL)
1368		diff = size_binop (PLUS_EXPR, diff,
1369				   fold_convert (sizetype, len1));
1370	    }
1371	  /* If the difference between the second and first destination pointer
1372	     is not constant, or is bigger than memcpy length, bail out.  */
1373	  if (diff == NULL
1374	      || !tree_fits_uhwi_p (diff)
1375	      || tree_int_cst_lt (len1, diff)
1376	      || compare_tree_int (diff, 1024) == 1)
1377	    break;
1378
1379	  /* Use maximum of difference plus memset length and memcpy length
1380	     as the new memcpy length, if it is too big, bail out.  */
1381	  src_len = tree_to_uhwi (diff);
1382	  src_len += tree_to_uhwi (len2);
1383	  if (src_len < tree_to_uhwi (len1))
1384	    src_len = tree_to_uhwi (len1);
1385	  if (src_len > 1024)
1386	    break;
1387
1388	  /* If mempcpy value is used elsewhere, bail out, as mempcpy
1389	     with bigger length will return different result.  */
1390	  if (lhs1 != NULL_TREE
1391	      && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1392	      && (TREE_CODE (lhs1) != SSA_NAME
1393		  || !single_imm_use (lhs1, &use_p, &use_stmt)
1394		  || use_stmt != stmt2))
1395	    break;
1396
1397	  /* If anything reads memory in between memcpy and memset
1398	     call, the modified memcpy call might change it.  */
1399	  vdef = gimple_vdef (stmt1);
1400	  if (vdef != NULL
1401	      && (!single_imm_use (vdef, &use_p, &use_stmt)
1402		  || use_stmt != stmt2))
1403	    break;
1404
1405	  ptr1_align = get_pointer_alignment (ptr1);
1406	  /* Construct the new source string literal.  */
1407	  src_buf = XALLOCAVEC (char, src_len + 1);
1408	  if (callee1)
1409	    memcpy (src_buf,
1410		    TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1411		    tree_to_uhwi (len1));
1412	  else
1413	    src_buf[0] = tree_to_shwi (src1);
1414	  memset (src_buf + tree_to_uhwi (diff),
1415		  tree_to_shwi (val2), tree_to_uhwi (len2));
1416	  src_buf[src_len] = '\0';
1417	  /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1418	     handle embedded '\0's.  */
1419	  if (strlen (src_buf) != src_len)
1420	    break;
1421	  rtl_profile_for_bb (gimple_bb (stmt2));
1422	  /* If the new memcpy wouldn't be emitted by storing the literal
1423	     by pieces, this optimization might enlarge .rodata too much,
1424	     as commonly used string literals couldn't be shared any
1425	     longer.  */
1426	  if (!can_store_by_pieces (src_len,
1427				    builtin_strncpy_read_str,
1428				    src_buf, ptr1_align, false))
1429	    break;
1430
1431	  new_str_cst = build_string_literal (src_len, src_buf);
1432	  if (callee1)
1433	    {
1434	      /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1435		 memset call.  */
1436	      if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1437		gimple_call_set_lhs (stmt1, NULL_TREE);
1438	      gimple_call_set_arg (stmt1, 1, new_str_cst);
1439	      gimple_call_set_arg (stmt1, 2,
1440				   build_int_cst (TREE_TYPE (len1), src_len));
1441	      update_stmt (stmt1);
1442	      unlink_stmt_vdef (stmt2);
1443	      gsi_remove (gsi_p, true);
1444	      fwprop_invalidate_lattice (gimple_get_lhs (stmt2));
1445	      release_defs (stmt2);
1446	      if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1447		{
1448		  fwprop_invalidate_lattice (lhs1);
1449		  release_ssa_name (lhs1);
1450		}
1451	      return true;
1452	    }
1453	  else
1454	    {
1455	      /* Otherwise, if STMT1 is length 1 memcpy optimized into
1456		 assignment, remove STMT1 and change memset call into
1457		 memcpy call.  */
1458	      gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1459
1460	      if (!is_gimple_val (ptr1))
1461		ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1462						 true, GSI_SAME_STMT);
1463	      gimple_call_set_fndecl (stmt2,
1464				      builtin_decl_explicit (BUILT_IN_MEMCPY));
1465	      gimple_call_set_arg (stmt2, 0, ptr1);
1466	      gimple_call_set_arg (stmt2, 1, new_str_cst);
1467	      gimple_call_set_arg (stmt2, 2,
1468				   build_int_cst (TREE_TYPE (len2), src_len));
1469	      unlink_stmt_vdef (stmt1);
1470	      gsi_remove (&gsi, true);
1471	      fwprop_invalidate_lattice (gimple_get_lhs (stmt1));
1472	      release_defs (stmt1);
1473	      update_stmt (stmt2);
1474	      return false;
1475	    }
1476	}
1477      break;
1478    default:
1479      break;
1480    }
1481  return false;
1482}
1483
1484/* Given a ssa_name in NAME see if it was defined by an assignment and
1485   set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1486   to the second operand on the rhs. */
1487
1488static inline void
1489defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1490{
1491  gimple def;
1492  enum tree_code code1;
1493  tree arg11;
1494  tree arg21;
1495  tree arg31;
1496  enum gimple_rhs_class grhs_class;
1497
1498  code1 = TREE_CODE (name);
1499  arg11 = name;
1500  arg21 = NULL_TREE;
1501  grhs_class = get_gimple_rhs_class (code1);
1502
1503  if (code1 == SSA_NAME)
1504    {
1505      def = SSA_NAME_DEF_STMT (name);
1506
1507      if (def && is_gimple_assign (def)
1508	  && can_propagate_from (def))
1509	{
1510	  code1 = gimple_assign_rhs_code (def);
1511	  arg11 = gimple_assign_rhs1 (def);
1512          arg21 = gimple_assign_rhs2 (def);
1513          arg31 = gimple_assign_rhs2 (def);
1514	}
1515    }
1516  else if (grhs_class == GIMPLE_TERNARY_RHS
1517	   || GIMPLE_BINARY_RHS
1518	   || GIMPLE_UNARY_RHS
1519	   || GIMPLE_SINGLE_RHS)
1520    extract_ops_from_tree (name, &code1, &arg11, &arg21, &arg31);
1521
1522  *code = code1;
1523  *arg1 = arg11;
1524  if (arg2)
1525    *arg2 = arg21;
1526  /* Ignore arg3 currently. */
1527}
1528
1529
1530/* Recognize rotation patterns.  Return true if a transformation
1531   applied, otherwise return false.
1532
1533   We are looking for X with unsigned type T with bitsize B, OP being
1534   +, | or ^, some type T2 wider than T and
1535   (X << CNT1) OP (X >> CNT2)				iff CNT1 + CNT2 == B
1536   ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2))	iff CNT1 + CNT2 == B
1537   (X << Y) OP (X >> (B - Y))
1538   (X << (int) Y) OP (X >> (int) (B - Y))
1539   ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1540   ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1541   (X << Y) | (X >> ((-Y) & (B - 1)))
1542   (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1543   ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1544   ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1545
1546   and transform these into:
1547   X r<< CNT1
1548   X r<< Y
1549
1550   Note, in the patterns with T2 type, the type of OP operands
1551   might be even a signed type, but should have precision B.  */
1552
1553static bool
1554simplify_rotate (gimple_stmt_iterator *gsi)
1555{
1556  gimple stmt = gsi_stmt (*gsi);
1557  tree arg[2], rtype, rotcnt = NULL_TREE;
1558  tree def_arg1[2], def_arg2[2];
1559  enum tree_code def_code[2];
1560  tree lhs;
1561  int i;
1562  bool swapped_p = false;
1563  gimple g;
1564
1565  arg[0] = gimple_assign_rhs1 (stmt);
1566  arg[1] = gimple_assign_rhs2 (stmt);
1567  rtype = TREE_TYPE (arg[0]);
1568
1569  /* Only create rotates in complete modes.  Other cases are not
1570     expanded properly.  */
1571  if (!INTEGRAL_TYPE_P (rtype)
1572      || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
1573    return false;
1574
1575  for (i = 0; i < 2; i++)
1576    defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1577
1578  /* Look through narrowing conversions.  */
1579  if (CONVERT_EXPR_CODE_P (def_code[0])
1580      && CONVERT_EXPR_CODE_P (def_code[1])
1581      && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
1582      && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
1583      && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1584	 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
1585      && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
1586      && has_single_use (arg[0])
1587      && has_single_use (arg[1]))
1588    {
1589      for (i = 0; i < 2; i++)
1590	{
1591	  arg[i] = def_arg1[i];
1592	  defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1593	}
1594    }
1595
1596  /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR.  */
1597  for (i = 0; i < 2; i++)
1598    if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
1599      return false;
1600    else if (!has_single_use (arg[i]))
1601      return false;
1602  if (def_code[0] == def_code[1])
1603    return false;
1604
1605  /* If we've looked through narrowing conversions before, look through
1606     widening conversions from unsigned type with the same precision
1607     as rtype here.  */
1608  if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
1609    for (i = 0; i < 2; i++)
1610      {
1611	tree tem;
1612	enum tree_code code;
1613	defcodefor_name (def_arg1[i], &code, &tem, NULL);
1614	if (!CONVERT_EXPR_CODE_P (code)
1615	    || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1616	    || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1617	  return false;
1618	def_arg1[i] = tem;
1619      }
1620  /* Both shifts have to use the same first operand.  */
1621  if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
1622    return false;
1623  if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
1624    return false;
1625
1626  /* CNT1 + CNT2 == B case above.  */
1627  if (tree_fits_uhwi_p (def_arg2[0])
1628      && tree_fits_uhwi_p (def_arg2[1])
1629      && tree_to_uhwi (def_arg2[0])
1630	 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
1631    rotcnt = def_arg2[0];
1632  else if (TREE_CODE (def_arg2[0]) != SSA_NAME
1633	   || TREE_CODE (def_arg2[1]) != SSA_NAME)
1634    return false;
1635  else
1636    {
1637      tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
1638      enum tree_code cdef_code[2];
1639      /* Look through conversion of the shift count argument.
1640	 The C/C++ FE cast any shift count argument to integer_type_node.
1641	 The only problem might be if the shift count type maximum value
1642	 is equal or smaller than number of bits in rtype.  */
1643      for (i = 0; i < 2; i++)
1644	{
1645	  def_arg2_alt[i] = def_arg2[i];
1646	  defcodefor_name (def_arg2[i], &cdef_code[i],
1647			   &cdef_arg1[i], &cdef_arg2[i]);
1648	  if (CONVERT_EXPR_CODE_P (cdef_code[i])
1649	      && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
1650	      && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1651		 > floor_log2 (TYPE_PRECISION (rtype))
1652	      && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1653		 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
1654	    {
1655	      def_arg2_alt[i] = cdef_arg1[i];
1656	      defcodefor_name (def_arg2_alt[i], &cdef_code[i],
1657			       &cdef_arg1[i], &cdef_arg2[i]);
1658	    }
1659	}
1660      for (i = 0; i < 2; i++)
1661	/* Check for one shift count being Y and the other B - Y,
1662	   with optional casts.  */
1663	if (cdef_code[i] == MINUS_EXPR
1664	    && tree_fits_shwi_p (cdef_arg1[i])
1665	    && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
1666	    && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
1667	  {
1668	    tree tem;
1669	    enum tree_code code;
1670
1671	    if (cdef_arg2[i] == def_arg2[1 - i]
1672		|| cdef_arg2[i] == def_arg2_alt[1 - i])
1673	      {
1674		rotcnt = cdef_arg2[i];
1675		break;
1676	      }
1677	    defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
1678	    if (CONVERT_EXPR_CODE_P (code)
1679		&& INTEGRAL_TYPE_P (TREE_TYPE (tem))
1680		&& TYPE_PRECISION (TREE_TYPE (tem))
1681		 > floor_log2 (TYPE_PRECISION (rtype))
1682		&& TYPE_PRECISION (TREE_TYPE (tem))
1683		 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
1684		&& (tem == def_arg2[1 - i]
1685		    || tem == def_arg2_alt[1 - i]))
1686	      {
1687		rotcnt = tem;
1688		break;
1689	      }
1690	  }
1691	/* The above sequence isn't safe for Y being 0,
1692	   because then one of the shifts triggers undefined behavior.
1693	   This alternative is safe even for rotation count of 0.
1694	   One shift count is Y and the other (-Y) & (B - 1).  */
1695	else if (cdef_code[i] == BIT_AND_EXPR
1696		 && tree_fits_shwi_p (cdef_arg2[i])
1697		 && tree_to_shwi (cdef_arg2[i])
1698		    == TYPE_PRECISION (rtype) - 1
1699		 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
1700		 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
1701	  {
1702	    tree tem;
1703	    enum tree_code code;
1704
1705	    defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
1706	    if (CONVERT_EXPR_CODE_P (code)
1707		&& INTEGRAL_TYPE_P (TREE_TYPE (tem))
1708		&& TYPE_PRECISION (TREE_TYPE (tem))
1709		 > floor_log2 (TYPE_PRECISION (rtype))
1710		&& TYPE_PRECISION (TREE_TYPE (tem))
1711		 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
1712	      defcodefor_name (tem, &code, &tem, NULL);
1713
1714	    if (code == NEGATE_EXPR)
1715	      {
1716		if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
1717		  {
1718		    rotcnt = tem;
1719		    break;
1720		  }
1721		defcodefor_name (tem, &code, &tem, NULL);
1722		if (CONVERT_EXPR_CODE_P (code)
1723		    && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1724		    && TYPE_PRECISION (TREE_TYPE (tem))
1725		       > floor_log2 (TYPE_PRECISION (rtype))
1726		    && TYPE_PRECISION (TREE_TYPE (tem))
1727		       == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
1728		    && (tem == def_arg2[1 - i]
1729			|| tem == def_arg2_alt[1 - i]))
1730		  {
1731		    rotcnt = tem;
1732		    break;
1733		  }
1734	      }
1735	  }
1736      if (rotcnt == NULL_TREE)
1737	return false;
1738      swapped_p = i != 1;
1739    }
1740
1741  if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
1742				  TREE_TYPE (rotcnt)))
1743    {
1744      g = gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2[0])),
1745			       NOP_EXPR, rotcnt);
1746      gsi_insert_before (gsi, g, GSI_SAME_STMT);
1747      rotcnt = gimple_assign_lhs (g);
1748    }
1749  lhs = gimple_assign_lhs (stmt);
1750  if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1751    lhs = make_ssa_name (TREE_TYPE (def_arg1[0]));
1752  g = gimple_build_assign (lhs,
1753			   ((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
1754			   ? LROTATE_EXPR : RROTATE_EXPR, def_arg1[0], rotcnt);
1755  if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1756    {
1757      gsi_insert_before (gsi, g, GSI_SAME_STMT);
1758      g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, lhs);
1759    }
1760  gsi_replace (gsi, g, false);
1761  return true;
1762}
1763
1764/* Combine an element access with a shuffle.  Returns true if there were
1765   any changes made, else it returns false.  */
1766
1767static bool
1768simplify_bitfield_ref (gimple_stmt_iterator *gsi)
1769{
1770  gimple stmt = gsi_stmt (*gsi);
1771  gimple def_stmt;
1772  tree op, op0, op1, op2;
1773  tree elem_type;
1774  unsigned idx, n, size;
1775  enum tree_code code;
1776
1777  op = gimple_assign_rhs1 (stmt);
1778  gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
1779
1780  op0 = TREE_OPERAND (op, 0);
1781  if (TREE_CODE (op0) != SSA_NAME
1782      || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
1783    return false;
1784
1785  def_stmt = get_prop_source_stmt (op0, false, NULL);
1786  if (!def_stmt || !can_propagate_from (def_stmt))
1787    return false;
1788
1789  op1 = TREE_OPERAND (op, 1);
1790  op2 = TREE_OPERAND (op, 2);
1791  code = gimple_assign_rhs_code (def_stmt);
1792
1793  if (code == CONSTRUCTOR)
1794    {
1795      tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
1796			       gimple_assign_rhs1 (def_stmt), op1, op2);
1797      if (!tem || !valid_gimple_rhs_p (tem))
1798	return false;
1799      gimple_assign_set_rhs_from_tree (gsi, tem);
1800      update_stmt (gsi_stmt (*gsi));
1801      return true;
1802    }
1803
1804  elem_type = TREE_TYPE (TREE_TYPE (op0));
1805  if (TREE_TYPE (op) != elem_type)
1806    return false;
1807
1808  size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
1809  n = TREE_INT_CST_LOW (op1) / size;
1810  if (n != 1)
1811    return false;
1812  idx = TREE_INT_CST_LOW (op2) / size;
1813
1814  if (code == VEC_PERM_EXPR)
1815    {
1816      tree p, m, index, tem;
1817      unsigned nelts;
1818      m = gimple_assign_rhs3 (def_stmt);
1819      if (TREE_CODE (m) != VECTOR_CST)
1820	return false;
1821      nelts = VECTOR_CST_NELTS (m);
1822      idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
1823      idx %= 2 * nelts;
1824      if (idx < nelts)
1825	{
1826	  p = gimple_assign_rhs1 (def_stmt);
1827	}
1828      else
1829	{
1830	  p = gimple_assign_rhs2 (def_stmt);
1831	  idx -= nelts;
1832	}
1833      index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size);
1834      tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
1835		    unshare_expr (p), op1, index);
1836      gimple_assign_set_rhs1 (stmt, tem);
1837      fold_stmt (gsi);
1838      update_stmt (gsi_stmt (*gsi));
1839      return true;
1840    }
1841
1842  return false;
1843}
1844
1845/* Determine whether applying the 2 permutations (mask1 then mask2)
1846   gives back one of the input.  */
1847
1848static int
1849is_combined_permutation_identity (tree mask1, tree mask2)
1850{
1851  tree mask;
1852  unsigned int nelts, i, j;
1853  bool maybe_identity1 = true;
1854  bool maybe_identity2 = true;
1855
1856  gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
1857		       && TREE_CODE (mask2) == VECTOR_CST);
1858  mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
1859  gcc_assert (TREE_CODE (mask) == VECTOR_CST);
1860
1861  nelts = VECTOR_CST_NELTS (mask);
1862  for (i = 0; i < nelts; i++)
1863    {
1864      tree val = VECTOR_CST_ELT (mask, i);
1865      gcc_assert (TREE_CODE (val) == INTEGER_CST);
1866      j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
1867      if (j == i)
1868	maybe_identity2 = false;
1869      else if (j == i + nelts)
1870	maybe_identity1 = false;
1871      else
1872	return 0;
1873    }
1874  return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
1875}
1876
1877/* Combine a shuffle with its arguments.  Returns 1 if there were any
1878   changes made, 2 if cfg-cleanup needs to run.  Else it returns 0.  */
1879
1880static int
1881simplify_permutation (gimple_stmt_iterator *gsi)
1882{
1883  gimple stmt = gsi_stmt (*gsi);
1884  gimple def_stmt;
1885  tree op0, op1, op2, op3, arg0, arg1;
1886  enum tree_code code;
1887  bool single_use_op0 = false;
1888
1889  gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
1890
1891  op0 = gimple_assign_rhs1 (stmt);
1892  op1 = gimple_assign_rhs2 (stmt);
1893  op2 = gimple_assign_rhs3 (stmt);
1894
1895  if (TREE_CODE (op2) != VECTOR_CST)
1896    return 0;
1897
1898  if (TREE_CODE (op0) == VECTOR_CST)
1899    {
1900      code = VECTOR_CST;
1901      arg0 = op0;
1902    }
1903  else if (TREE_CODE (op0) == SSA_NAME)
1904    {
1905      def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
1906      if (!def_stmt || !can_propagate_from (def_stmt))
1907	return 0;
1908
1909      code = gimple_assign_rhs_code (def_stmt);
1910      arg0 = gimple_assign_rhs1 (def_stmt);
1911    }
1912  else
1913    return 0;
1914
1915  /* Two consecutive shuffles.  */
1916  if (code == VEC_PERM_EXPR)
1917    {
1918      tree orig;
1919      int ident;
1920
1921      if (op0 != op1)
1922	return 0;
1923      op3 = gimple_assign_rhs3 (def_stmt);
1924      if (TREE_CODE (op3) != VECTOR_CST)
1925	return 0;
1926      ident = is_combined_permutation_identity (op3, op2);
1927      if (!ident)
1928	return 0;
1929      orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
1930			  : gimple_assign_rhs2 (def_stmt);
1931      gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
1932      gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
1933      gimple_set_num_ops (stmt, 2);
1934      update_stmt (stmt);
1935      return remove_prop_source_from_use (op0) ? 2 : 1;
1936    }
1937
1938  /* Shuffle of a constructor.  */
1939  else if (code == CONSTRUCTOR || code == VECTOR_CST)
1940    {
1941      tree opt;
1942      bool ret = false;
1943      if (op0 != op1)
1944	{
1945	  if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
1946	    return 0;
1947
1948	  if (TREE_CODE (op1) == VECTOR_CST)
1949	    arg1 = op1;
1950	  else if (TREE_CODE (op1) == SSA_NAME)
1951	    {
1952	      enum tree_code code2;
1953
1954	      gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
1955	      if (!def_stmt2 || !can_propagate_from (def_stmt2))
1956		return 0;
1957
1958	      code2 = gimple_assign_rhs_code (def_stmt2);
1959	      if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
1960		return 0;
1961	      arg1 = gimple_assign_rhs1 (def_stmt2);
1962	    }
1963	  else
1964	    return 0;
1965	}
1966      else
1967	{
1968	  /* Already used twice in this statement.  */
1969	  if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
1970	    return 0;
1971	  arg1 = arg0;
1972	}
1973      opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
1974      if (!opt
1975	  || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
1976	return 0;
1977      gimple_assign_set_rhs_from_tree (gsi, opt);
1978      update_stmt (gsi_stmt (*gsi));
1979      if (TREE_CODE (op0) == SSA_NAME)
1980	ret = remove_prop_source_from_use (op0);
1981      if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
1982	ret |= remove_prop_source_from_use (op1);
1983      return ret ? 2 : 1;
1984    }
1985
1986  return 0;
1987}
1988
1989/* Recognize a VEC_PERM_EXPR.  Returns true if there were any changes.  */
1990
1991static bool
1992simplify_vector_constructor (gimple_stmt_iterator *gsi)
1993{
1994  gimple stmt = gsi_stmt (*gsi);
1995  gimple def_stmt;
1996  tree op, op2, orig, type, elem_type;
1997  unsigned elem_size, nelts, i;
1998  enum tree_code code;
1999  constructor_elt *elt;
2000  unsigned char *sel;
2001  bool maybe_ident;
2002
2003  gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
2004
2005  op = gimple_assign_rhs1 (stmt);
2006  type = TREE_TYPE (op);
2007  gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
2008
2009  nelts = TYPE_VECTOR_SUBPARTS (type);
2010  elem_type = TREE_TYPE (type);
2011  elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
2012
2013  sel = XALLOCAVEC (unsigned char, nelts);
2014  orig = NULL;
2015  maybe_ident = true;
2016  FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
2017    {
2018      tree ref, op1;
2019
2020      if (i >= nelts)
2021	return false;
2022
2023      if (TREE_CODE (elt->value) != SSA_NAME)
2024	return false;
2025      def_stmt = get_prop_source_stmt (elt->value, false, NULL);
2026      if (!def_stmt)
2027	return false;
2028      code = gimple_assign_rhs_code (def_stmt);
2029      if (code != BIT_FIELD_REF)
2030	return false;
2031      op1 = gimple_assign_rhs1 (def_stmt);
2032      ref = TREE_OPERAND (op1, 0);
2033      if (orig)
2034	{
2035	  if (ref != orig)
2036	    return false;
2037	}
2038      else
2039	{
2040	  if (TREE_CODE (ref) != SSA_NAME)
2041	    return false;
2042	  if (!useless_type_conversion_p (type, TREE_TYPE (ref)))
2043	    return false;
2044	  orig = ref;
2045	}
2046      if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
2047	return false;
2048      sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
2049      if (sel[i] != i) maybe_ident = false;
2050    }
2051  if (i < nelts)
2052    return false;
2053
2054  if (maybe_ident)
2055    gimple_assign_set_rhs_from_tree (gsi, orig);
2056  else
2057    {
2058      tree mask_type, *mask_elts;
2059
2060      if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
2061	return false;
2062      mask_type
2063	= build_vector_type (build_nonstandard_integer_type (elem_size, 1),
2064			     nelts);
2065      if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
2066	  || GET_MODE_SIZE (TYPE_MODE (mask_type))
2067	     != GET_MODE_SIZE (TYPE_MODE (type)))
2068	return false;
2069      mask_elts = XALLOCAVEC (tree, nelts);
2070      for (i = 0; i < nelts; i++)
2071	mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
2072      op2 = build_vector (mask_type, mask_elts);
2073      gimple_assign_set_rhs_with_ops (gsi, VEC_PERM_EXPR, orig, orig, op2);
2074    }
2075  update_stmt (gsi_stmt (*gsi));
2076  return true;
2077}
2078
2079
2080/* Primitive "lattice" function for gimple_simplify.  */
2081
2082static tree
2083fwprop_ssa_val (tree name)
2084{
2085  /* First valueize NAME.  */
2086  if (TREE_CODE (name) == SSA_NAME
2087      && SSA_NAME_VERSION (name) < lattice.length ())
2088    {
2089      tree val = lattice[SSA_NAME_VERSION (name)];
2090      if (val)
2091	name = val;
2092    }
2093  /* We continue matching along SSA use-def edges for SSA names
2094     that are not single-use.  Currently there are no patterns
2095     that would cause any issues with that.  */
2096  return name;
2097}
2098
2099/* Main entry point for the forward propagation and statement combine
2100   optimizer.  */
2101
2102namespace {
2103
2104const pass_data pass_data_forwprop =
2105{
2106  GIMPLE_PASS, /* type */
2107  "forwprop", /* name */
2108  OPTGROUP_NONE, /* optinfo_flags */
2109  TV_TREE_FORWPROP, /* tv_id */
2110  ( PROP_cfg | PROP_ssa ), /* properties_required */
2111  0, /* properties_provided */
2112  0, /* properties_destroyed */
2113  0, /* todo_flags_start */
2114  TODO_update_ssa, /* todo_flags_finish */
2115};
2116
2117class pass_forwprop : public gimple_opt_pass
2118{
2119public:
2120  pass_forwprop (gcc::context *ctxt)
2121    : gimple_opt_pass (pass_data_forwprop, ctxt)
2122  {}
2123
2124  /* opt_pass methods: */
2125  opt_pass * clone () { return new pass_forwprop (m_ctxt); }
2126  virtual bool gate (function *) { return flag_tree_forwprop; }
2127  virtual unsigned int execute (function *);
2128
2129}; // class pass_forwprop
2130
2131unsigned int
2132pass_forwprop::execute (function *fun)
2133{
2134  unsigned int todoflags = 0;
2135
2136  cfg_changed = false;
2137
2138  /* Combine stmts with the stmts defining their operands.  Do that
2139     in an order that guarantees visiting SSA defs before SSA uses.  */
2140  lattice.create (num_ssa_names);
2141  lattice.quick_grow_cleared (num_ssa_names);
2142  int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
2143  int postorder_num = inverted_post_order_compute (postorder);
2144  auto_vec<gimple, 4> to_fixup;
2145  to_purge = BITMAP_ALLOC (NULL);
2146  for (int i = 0; i < postorder_num; ++i)
2147    {
2148      gimple_stmt_iterator gsi;
2149      basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
2150
2151      /* Apply forward propagation to all stmts in the basic-block.
2152	 Note we update GSI within the loop as necessary.  */
2153      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2154	{
2155	  gimple stmt = gsi_stmt (gsi);
2156	  tree lhs, rhs;
2157	  enum tree_code code;
2158
2159	  if (!is_gimple_assign (stmt))
2160	    {
2161	      gsi_next (&gsi);
2162	      continue;
2163	    }
2164
2165	  lhs = gimple_assign_lhs (stmt);
2166	  rhs = gimple_assign_rhs1 (stmt);
2167	  code = gimple_assign_rhs_code (stmt);
2168	  if (TREE_CODE (lhs) != SSA_NAME
2169	      || has_zero_uses (lhs))
2170	    {
2171	      gsi_next (&gsi);
2172	      continue;
2173	    }
2174
2175	  /* If this statement sets an SSA_NAME to an address,
2176	     try to propagate the address into the uses of the SSA_NAME.  */
2177	  if (code == ADDR_EXPR
2178	      /* Handle pointer conversions on invariant addresses
2179		 as well, as this is valid gimple.  */
2180	      || (CONVERT_EXPR_CODE_P (code)
2181		  && TREE_CODE (rhs) == ADDR_EXPR
2182		  && POINTER_TYPE_P (TREE_TYPE (lhs))))
2183	    {
2184	      tree base = get_base_address (TREE_OPERAND (rhs, 0));
2185	      if ((!base
2186		   || !DECL_P (base)
2187		   || decl_address_invariant_p (base))
2188		  && !stmt_references_abnormal_ssa_name (stmt)
2189		  && forward_propagate_addr_expr (lhs, rhs, true))
2190		{
2191		  fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2192		  release_defs (stmt);
2193		  gsi_remove (&gsi, true);
2194		}
2195	      else
2196		gsi_next (&gsi);
2197	    }
2198	  else if (code == POINTER_PLUS_EXPR)
2199	    {
2200	      tree off = gimple_assign_rhs2 (stmt);
2201	      if (TREE_CODE (off) == INTEGER_CST
2202		  && can_propagate_from (stmt)
2203		  && !simple_iv_increment_p (stmt)
2204		  /* ???  Better adjust the interface to that function
2205		     instead of building new trees here.  */
2206		  && forward_propagate_addr_expr
2207		       (lhs,
2208			build1_loc (gimple_location (stmt),
2209				    ADDR_EXPR, TREE_TYPE (rhs),
2210				    fold_build2 (MEM_REF,
2211						 TREE_TYPE (TREE_TYPE (rhs)),
2212						 rhs,
2213						 fold_convert (ptr_type_node,
2214							       off))), true))
2215		{
2216		  fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2217		  release_defs (stmt);
2218		  gsi_remove (&gsi, true);
2219		}
2220	      else if (is_gimple_min_invariant (rhs))
2221		{
2222		  /* Make sure to fold &a[0] + off_1 here.  */
2223		  fold_stmt_inplace (&gsi);
2224		  update_stmt (stmt);
2225		  if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2226		    gsi_next (&gsi);
2227		}
2228	      else
2229		gsi_next (&gsi);
2230	    }
2231	  else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
2232		   && gimple_assign_load_p (stmt)
2233		   && !gimple_has_volatile_ops (stmt)
2234		   && (TREE_CODE (gimple_assign_rhs1 (stmt))
2235		       != TARGET_MEM_REF)
2236		   && !stmt_can_throw_internal (stmt))
2237	    {
2238	      /* Rewrite loads used only in real/imagpart extractions to
2239	         component-wise loads.  */
2240	      use_operand_p use_p;
2241	      imm_use_iterator iter;
2242	      bool rewrite = true;
2243	      FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
2244		{
2245		  gimple use_stmt = USE_STMT (use_p);
2246		  if (is_gimple_debug (use_stmt))
2247		    continue;
2248		  if (!is_gimple_assign (use_stmt)
2249		      || (gimple_assign_rhs_code (use_stmt) != REALPART_EXPR
2250			  && gimple_assign_rhs_code (use_stmt) != IMAGPART_EXPR))
2251		    {
2252		      rewrite = false;
2253		      break;
2254		    }
2255		}
2256	      if (rewrite)
2257		{
2258		  gimple use_stmt;
2259		  FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2260		    {
2261		      if (is_gimple_debug (use_stmt))
2262			{
2263			  if (gimple_debug_bind_p (use_stmt))
2264			    {
2265			      gimple_debug_bind_reset_value (use_stmt);
2266			      update_stmt (use_stmt);
2267			    }
2268			  continue;
2269			}
2270
2271		      tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
2272					     TREE_TYPE (TREE_TYPE (rhs)),
2273					     unshare_expr (rhs));
2274		      gimple new_stmt
2275			= gimple_build_assign (gimple_assign_lhs (use_stmt),
2276					       new_rhs);
2277
2278		      location_t loc = gimple_location (use_stmt);
2279		      gimple_set_location (new_stmt, loc);
2280		      gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2281		      unlink_stmt_vdef (use_stmt);
2282		      gsi_remove (&gsi2, true);
2283
2284		      gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
2285		    }
2286
2287		  release_defs (stmt);
2288		  gsi_remove (&gsi, true);
2289		}
2290	      else
2291		gsi_next (&gsi);
2292	    }
2293	  else if (code == COMPLEX_EXPR)
2294	    {
2295	      /* Rewrite stores of a single-use complex build expression
2296	         to component-wise stores.  */
2297	      use_operand_p use_p;
2298	      gimple use_stmt;
2299	      if (single_imm_use (lhs, &use_p, &use_stmt)
2300		  && gimple_store_p (use_stmt)
2301		  && !gimple_has_volatile_ops (use_stmt)
2302		  && is_gimple_assign (use_stmt)
2303		  && (TREE_CODE (gimple_assign_lhs (use_stmt))
2304		      != TARGET_MEM_REF))
2305		{
2306		  tree use_lhs = gimple_assign_lhs (use_stmt);
2307		  tree new_lhs = build1 (REALPART_EXPR,
2308					 TREE_TYPE (TREE_TYPE (use_lhs)),
2309					 unshare_expr (use_lhs));
2310		  gimple new_stmt = gimple_build_assign (new_lhs, rhs);
2311		  location_t loc = gimple_location (use_stmt);
2312		  gimple_set_location (new_stmt, loc);
2313		  gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
2314		  gimple_set_vdef (new_stmt, make_ssa_name (gimple_vop (cfun)));
2315		  SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2316		  gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
2317		  gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2318		  gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
2319
2320		  new_lhs = build1 (IMAGPART_EXPR,
2321				    TREE_TYPE (TREE_TYPE (use_lhs)),
2322				    unshare_expr (use_lhs));
2323		  gimple_assign_set_lhs (use_stmt, new_lhs);
2324		  gimple_assign_set_rhs1 (use_stmt, gimple_assign_rhs2 (stmt));
2325		  update_stmt (use_stmt);
2326
2327		  release_defs (stmt);
2328		  gsi_remove (&gsi, true);
2329		}
2330	      else
2331		gsi_next (&gsi);
2332	    }
2333	  else
2334	    gsi_next (&gsi);
2335	}
2336
2337      /* Combine stmts with the stmts defining their operands.
2338	 Note we update GSI within the loop as necessary.  */
2339      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2340	{
2341	  gimple stmt = gsi_stmt (gsi);
2342	  gimple orig_stmt = stmt;
2343	  bool changed = false;
2344	  bool was_noreturn = (is_gimple_call (stmt)
2345			       && gimple_call_noreturn_p (stmt));
2346
2347	  /* Mark stmt as potentially needing revisiting.  */
2348	  gimple_set_plf (stmt, GF_PLF_1, false);
2349
2350	  if (fold_stmt (&gsi, fwprop_ssa_val))
2351	    {
2352	      changed = true;
2353	      stmt = gsi_stmt (gsi);
2354	      if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2355		bitmap_set_bit (to_purge, bb->index);
2356	      if (!was_noreturn
2357		  && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
2358		to_fixup.safe_push (stmt);
2359	      /* Cleanup the CFG if we simplified a condition to
2360	         true or false.  */
2361	      if (gcond *cond = dyn_cast <gcond *> (stmt))
2362		if (gimple_cond_true_p (cond)
2363		    || gimple_cond_false_p (cond))
2364		  cfg_changed = true;
2365	      update_stmt (stmt);
2366	    }
2367
2368	  switch (gimple_code (stmt))
2369	    {
2370	    case GIMPLE_ASSIGN:
2371	      {
2372		tree rhs1 = gimple_assign_rhs1 (stmt);
2373		enum tree_code code = gimple_assign_rhs_code (stmt);
2374
2375		if (code == COND_EXPR
2376		    || code == VEC_COND_EXPR)
2377		  {
2378		    /* In this case the entire COND_EXPR is in rhs1. */
2379		    if (forward_propagate_into_cond (&gsi))
2380		      {
2381			changed = true;
2382			stmt = gsi_stmt (gsi);
2383		      }
2384		  }
2385		else if (TREE_CODE_CLASS (code) == tcc_comparison)
2386		  {
2387		    int did_something;
2388		    did_something = forward_propagate_into_comparison (&gsi);
2389		    if (did_something == 2)
2390		      cfg_changed = true;
2391		    changed = did_something != 0;
2392		  }
2393		else if ((code == PLUS_EXPR
2394			  || code == BIT_IOR_EXPR
2395			  || code == BIT_XOR_EXPR)
2396			 && simplify_rotate (&gsi))
2397		  changed = true;
2398		else if (code == VEC_PERM_EXPR)
2399		  {
2400		    int did_something = simplify_permutation (&gsi);
2401		    if (did_something == 2)
2402		      cfg_changed = true;
2403		    changed = did_something != 0;
2404		  }
2405		else if (code == BIT_FIELD_REF)
2406		  changed = simplify_bitfield_ref (&gsi);
2407                else if (code == CONSTRUCTOR
2408                         && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
2409                  changed = simplify_vector_constructor (&gsi);
2410		break;
2411	      }
2412
2413	    case GIMPLE_SWITCH:
2414	      changed = simplify_gimple_switch (as_a <gswitch *> (stmt));
2415	      break;
2416
2417	    case GIMPLE_COND:
2418	      {
2419		int did_something
2420		  = forward_propagate_into_gimple_cond (as_a <gcond *> (stmt));
2421		if (did_something == 2)
2422		  cfg_changed = true;
2423		changed = did_something != 0;
2424		break;
2425	      }
2426
2427	    case GIMPLE_CALL:
2428	      {
2429		tree callee = gimple_call_fndecl (stmt);
2430		if (callee != NULL_TREE
2431		    && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2432		  changed = simplify_builtin_call (&gsi, callee);
2433		break;
2434	      }
2435
2436	    default:;
2437	    }
2438
2439	  if (changed)
2440	    {
2441	      /* If the stmt changed then re-visit it and the statements
2442		 inserted before it.  */
2443	      for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2444		if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
2445		  break;
2446	      if (gsi_end_p (gsi))
2447		gsi = gsi_start_bb (bb);
2448	      else
2449		gsi_next (&gsi);
2450	    }
2451	  else
2452	    {
2453	      /* Stmt no longer needs to be revisited.  */
2454	      gimple_set_plf (stmt, GF_PLF_1, true);
2455
2456	      /* Fill up the lattice.  */
2457	      if (gimple_assign_single_p (stmt))
2458		{
2459		  tree lhs = gimple_assign_lhs (stmt);
2460		  tree rhs = gimple_assign_rhs1 (stmt);
2461		  if (TREE_CODE (lhs) == SSA_NAME)
2462		    {
2463		      tree val = lhs;
2464		      if (TREE_CODE (rhs) == SSA_NAME)
2465			val = fwprop_ssa_val (rhs);
2466		      else if (is_gimple_min_invariant (rhs))
2467			val = rhs;
2468		      fwprop_set_lattice_val (lhs, val);
2469		    }
2470		}
2471
2472	      gsi_next (&gsi);
2473	    }
2474	}
2475    }
2476  free (postorder);
2477  lattice.release ();
2478
2479  /* Fixup stmts that became noreturn calls.  This may require splitting
2480     blocks and thus isn't possible during the walk.  Do this
2481     in reverse order so we don't inadvertedly remove a stmt we want to
2482     fixup by visiting a dominating now noreturn call first.  */
2483  while (!to_fixup.is_empty ())
2484    {
2485      gimple stmt = to_fixup.pop ();
2486      if (dump_file && dump_flags & TDF_DETAILS)
2487	{
2488	  fprintf (dump_file, "Fixing up noreturn call ");
2489	  print_gimple_stmt (dump_file, stmt, 0, 0);
2490	  fprintf (dump_file, "\n");
2491	}
2492      cfg_changed |= fixup_noreturn_call (stmt);
2493    }
2494
2495  cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
2496  BITMAP_FREE (to_purge);
2497
2498  if (cfg_changed)
2499    todoflags |= TODO_cleanup_cfg;
2500
2501  return todoflags;
2502}
2503
2504} // anon namespace
2505
2506gimple_opt_pass *
2507make_pass_forwprop (gcc::context *ctxt)
2508{
2509  return new pass_forwprop (ctxt);
2510}
2511