1/* Tree inlining.
2   Copyright (C) 2001-2022 Free Software Foundation, Inc.
3   Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "target.h"
26#include "rtl.h"
27#include "tree.h"
28#include "gimple.h"
29#include "cfghooks.h"
30#include "tree-pass.h"
31#include "ssa.h"
32#include "cgraph.h"
33#include "tree-pretty-print.h"
34#include "diagnostic-core.h"
35#include "gimple-predict.h"
36#include "fold-const.h"
37#include "stor-layout.h"
38#include "calls.h"
39#include "tree-inline.h"
40#include "langhooks.h"
41#include "cfganal.h"
42#include "tree-iterator.h"
43#include "intl.h"
44#include "gimple-fold.h"
45#include "tree-eh.h"
46#include "gimplify.h"
47#include "gimple-iterator.h"
48#include "gimplify-me.h"
49#include "gimple-walk.h"
50#include "tree-cfg.h"
51#include "tree-into-ssa.h"
52#include "tree-dfa.h"
53#include "tree-ssa.h"
54#include "except.h"
55#include "debug.h"
56#include "value-prof.h"
57#include "cfgloop.h"
58#include "builtins.h"
59#include "stringpool.h"
60#include "attribs.h"
61#include "sreal.h"
62#include "tree-cfgcleanup.h"
63#include "tree-ssa-live.h"
64#include "alloc-pool.h"
65#include "symbol-summary.h"
66#include "symtab-thunks.h"
67#include "symtab-clones.h"
68
69/* I'm not real happy about this, but we need to handle gimple and
70   non-gimple trees.  */
71
72/* Inlining, Cloning, Versioning, Parallelization
73
74   Inlining: a function body is duplicated, but the PARM_DECLs are
75   remapped into VAR_DECLs, and non-void RETURN_EXPRs become
76   MODIFY_EXPRs that store to a dedicated returned-value variable.
77   The duplicated eh_region info of the copy will later be appended
78   to the info for the caller; the eh_region info in copied throwing
79   statements and RESX statements are adjusted accordingly.
80
81   Cloning: (only in C++) We have one body for a con/de/structor, and
82   multiple function decls, each with a unique parameter list.
83   Duplicate the body, using the given splay tree; some parameters
84   will become constants (like 0 or 1).
85
86   Versioning: a function body is duplicated and the result is a new
87   function rather than into blocks of an existing function as with
88   inlining.  Some parameters will become constants.
89
90   Parallelization: a region of a function is duplicated resulting in
91   a new function.  Variables may be replaced with complex expressions
92   to enable shared variable semantics.
93
94   All of these will simultaneously lookup any callgraph edges.  If
95   we're going to inline the duplicated function body, and the given
96   function has some cloned callgraph nodes (one for each place this
97   function will be inlined) those callgraph edges will be duplicated.
98   If we're cloning the body, those callgraph edges will be
99   updated to point into the new body.  (Note that the original
100   callgraph node and edge list will not be altered.)
101
102   See the CALL_EXPR handling case in copy_tree_body_r ().  */
103
104/* To Do:
105
106   o In order to make inlining-on-trees work, we pessimized
107     function-local static constants.  In particular, they are now
108     always output, even when not addressed.  Fix this by treating
109     function-local static constants just like global static
110     constants; the back-end already knows not to output them if they
111     are not needed.
112
113   o Provide heuristics to clamp inlining of recursive template
114     calls?  */
115
116
117/* Weights that estimate_num_insns uses to estimate the size of the
118   produced code.  */
119
120eni_weights eni_size_weights;
121
122/* Weights that estimate_num_insns uses to estimate the time necessary
123   to execute the produced code.  */
124
125eni_weights eni_time_weights;
126
127/* Prototypes.  */
128
129static tree declare_return_variable (copy_body_data *, tree, tree,
130				     basic_block);
131static void remap_block (tree *, copy_body_data *);
132static void copy_bind_expr (tree *, int *, copy_body_data *);
133static void declare_inline_vars (tree, tree);
134static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
135static void prepend_lexical_block (tree current_block, tree new_block);
136static tree copy_result_decl_to_var (tree, copy_body_data *);
137static tree copy_decl_maybe_to_var (tree, copy_body_data *);
138static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
139static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
140
141/* Insert a tree->tree mapping for ID.  Despite the name suggests
142   that the trees should be variables, it is used for more than that.  */
143
144void
145insert_decl_map (copy_body_data *id, tree key, tree value)
146{
147  id->decl_map->put (key, value);
148
149  /* Always insert an identity map as well.  If we see this same new
150     node again, we won't want to duplicate it a second time.  */
151  if (key != value)
152    id->decl_map->put (value, value);
153}
154
155/* If nonzero, we're remapping the contents of inlined debug
156   statements.  If negative, an error has occurred, such as a
157   reference to a variable that isn't available in the inlined
158   context.  */
159static int processing_debug_stmt = 0;
160
161/* Construct new SSA name for old NAME. ID is the inline context.  */
162
163static tree
164remap_ssa_name (tree name, copy_body_data *id)
165{
166  tree new_tree, var;
167  tree *n;
168
169  gcc_assert (TREE_CODE (name) == SSA_NAME);
170
171  n = id->decl_map->get (name);
172  if (n)
173    {
174      /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
175	 remove an unused LHS from a call statement.  Such LHS can however
176	 still appear in debug statements, but their value is lost in this
177	 function and we do not want to map them.  */
178      if (id->killed_new_ssa_names
179	  && id->killed_new_ssa_names->contains (*n))
180	{
181	  gcc_assert (processing_debug_stmt);
182	  processing_debug_stmt = -1;
183	  return name;
184	}
185
186      return unshare_expr (*n);
187    }
188
189  if (processing_debug_stmt)
190    {
191      if (SSA_NAME_IS_DEFAULT_DEF (name)
192	  && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
193	  && id->entry_bb == NULL
194	  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
195	{
196	  gimple *def_temp;
197	  gimple_stmt_iterator gsi;
198	  tree val = SSA_NAME_VAR (name);
199
200	  n = id->decl_map->get (val);
201	  if (n != NULL)
202	    val = *n;
203	  if (TREE_CODE (val) != PARM_DECL
204	      && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
205	    {
206	      processing_debug_stmt = -1;
207	      return name;
208	    }
209	  n = id->decl_map->get (val);
210	  if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
211	    return *n;
212	  tree vexpr = build_debug_expr_decl (TREE_TYPE (name));
213	  /* FIXME: Is setting the mode really necessary? */
214	  SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
215	  def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
216	  gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
217	  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
218	  insert_decl_map (id, val, vexpr);
219	  return vexpr;
220	}
221
222      processing_debug_stmt = -1;
223      return name;
224    }
225
226  /* Remap anonymous SSA names or SSA names of anonymous decls.  */
227  var = SSA_NAME_VAR (name);
228  if (!var
229      || (!SSA_NAME_IS_DEFAULT_DEF (name)
230	  && VAR_P (var)
231	  && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
232	  && DECL_ARTIFICIAL (var)
233	  && DECL_IGNORED_P (var)
234	  && !DECL_NAME (var)))
235    {
236      struct ptr_info_def *pi;
237      new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
238      if (!var && SSA_NAME_IDENTIFIER (name))
239	SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
240      insert_decl_map (id, name, new_tree);
241      SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
242	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
243      /* At least IPA points-to info can be directly transferred.  */
244      if (id->src_cfun->gimple_df
245	  && id->src_cfun->gimple_df->ipa_pta
246	  && POINTER_TYPE_P (TREE_TYPE (name))
247	  && (pi = SSA_NAME_PTR_INFO (name))
248	  && !pi->pt.anything)
249	{
250	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
251	  new_pi->pt = pi->pt;
252	}
253      /* So can range-info.  */
254      if (!POINTER_TYPE_P (TREE_TYPE (name))
255	  && SSA_NAME_RANGE_INFO (name))
256	duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
257				       SSA_NAME_RANGE_INFO (name));
258      return new_tree;
259    }
260
261  /* Do not set DEF_STMT yet as statement is not copied yet. We do that
262     in copy_bb.  */
263  new_tree = remap_decl (var, id);
264
265  /* We might've substituted constant or another SSA_NAME for
266     the variable.
267
268     Replace the SSA name representing RESULT_DECL by variable during
269     inlining:  this saves us from need to introduce PHI node in a case
270     return value is just partly initialized.  */
271  if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
272      && (!SSA_NAME_VAR (name)
273	  || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
274	  || !id->transform_return_to_modify))
275    {
276      struct ptr_info_def *pi;
277      new_tree = make_ssa_name (new_tree);
278      insert_decl_map (id, name, new_tree);
279      SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
280	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
281      /* At least IPA points-to info can be directly transferred.  */
282      if (id->src_cfun->gimple_df
283	  && id->src_cfun->gimple_df->ipa_pta
284	  && POINTER_TYPE_P (TREE_TYPE (name))
285	  && (pi = SSA_NAME_PTR_INFO (name))
286	  && !pi->pt.anything)
287	{
288	  struct ptr_info_def *new_pi = get_ptr_info (new_tree);
289	  new_pi->pt = pi->pt;
290	}
291      /* So can range-info.  */
292      if (!POINTER_TYPE_P (TREE_TYPE (name))
293	  && SSA_NAME_RANGE_INFO (name))
294	duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
295				       SSA_NAME_RANGE_INFO (name));
296      if (SSA_NAME_IS_DEFAULT_DEF (name))
297	{
298	  /* By inlining function having uninitialized variable, we might
299	     extend the lifetime (variable might get reused).  This cause
300	     ICE in the case we end up extending lifetime of SSA name across
301	     abnormal edge, but also increase register pressure.
302
303	     We simply initialize all uninitialized vars by 0 except
304	     for case we are inlining to very first BB.  We can avoid
305	     this for all BBs that are not inside strongly connected
306	     regions of the CFG, but this is expensive to test.  */
307	  if (id->entry_bb
308	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
309	      && (!SSA_NAME_VAR (name)
310		  || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
311	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
312					     0)->dest
313		  || EDGE_COUNT (id->entry_bb->preds) != 1))
314	    {
315	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
316	      gimple *init_stmt;
317	      tree zero = build_zero_cst (TREE_TYPE (new_tree));
318
319	      init_stmt = gimple_build_assign (new_tree, zero);
320	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
321	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
322	    }
323	  else
324	    {
325	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
326	      set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
327	    }
328	}
329    }
330  else
331    insert_decl_map (id, name, new_tree);
332  return new_tree;
333}
334
335/* Remap DECL during the copying of the BLOCK tree for the function.  */
336
337tree
338remap_decl (tree decl, copy_body_data *id)
339{
340  tree *n;
341
342  /* We only remap local variables in the current function.  */
343
344  /* See if we have remapped this declaration.  */
345
346  n = id->decl_map->get (decl);
347
348  if (!n && processing_debug_stmt)
349    {
350      processing_debug_stmt = -1;
351      return decl;
352    }
353
354  /* When remapping a type within copy_gimple_seq_and_replace_locals, all
355     necessary DECLs have already been remapped and we do not want to duplicate
356     a decl coming from outside of the sequence we are copying.  */
357  if (!n
358      && id->prevent_decl_creation_for_types
359      && id->remapping_type_depth > 0
360      && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
361    return decl;
362
363  /* If we didn't already have an equivalent for this declaration, create one
364     now.  */
365  if (!n)
366    {
367      /* Make a copy of the variable or label.  */
368      tree t = id->copy_decl (decl, id);
369
370      /* Remember it, so that if we encounter this local entity again
371	 we can reuse this copy.  Do this early because remap_type may
372	 need this decl for TYPE_STUB_DECL.  */
373      insert_decl_map (id, decl, t);
374
375      if (!DECL_P (t))
376	return t;
377
378      /* Remap types, if necessary.  */
379      TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
380      if (TREE_CODE (t) == TYPE_DECL)
381	{
382	  DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
383
384	  /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
385	     which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
386	     is not set on the TYPE_DECL, for example in LTO mode.  */
387	  if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
388	    {
389	      tree x = build_variant_type_copy (TREE_TYPE (t));
390	      TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
391	      TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
392	      DECL_ORIGINAL_TYPE (t) = x;
393	    }
394	}
395
396      /* Remap sizes as necessary.  */
397      walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
398      walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
399
400      /* If fields, do likewise for offset and qualifier.  */
401      if (TREE_CODE (t) == FIELD_DECL)
402	{
403	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
404	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
405	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
406	}
407
408      return t;
409    }
410
411  if (id->do_not_unshare)
412    return *n;
413  else
414    return unshare_expr (*n);
415}
416
417static tree
418remap_type_1 (tree type, copy_body_data *id)
419{
420  tree new_tree, t;
421
422  /* We do need a copy.  build and register it now.  If this is a pointer or
423     reference type, remap the designated type and make a new pointer or
424     reference type.  */
425  if (TREE_CODE (type) == POINTER_TYPE)
426    {
427      new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
428					 TYPE_MODE (type),
429					 TYPE_REF_CAN_ALIAS_ALL (type));
430      if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
431	new_tree = build_type_attribute_qual_variant (new_tree,
432						      TYPE_ATTRIBUTES (type),
433						      TYPE_QUALS (type));
434      insert_decl_map (id, type, new_tree);
435      return new_tree;
436    }
437  else if (TREE_CODE (type) == REFERENCE_TYPE)
438    {
439      new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
440					    TYPE_MODE (type),
441					    TYPE_REF_CAN_ALIAS_ALL (type));
442      if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
443	new_tree = build_type_attribute_qual_variant (new_tree,
444						      TYPE_ATTRIBUTES (type),
445						      TYPE_QUALS (type));
446      insert_decl_map (id, type, new_tree);
447      return new_tree;
448    }
449  else
450    new_tree = copy_node (type);
451
452  insert_decl_map (id, type, new_tree);
453
454  /* This is a new type, not a copy of an old type.  Need to reassociate
455     variants.  We can handle everything except the main variant lazily.  */
456  t = TYPE_MAIN_VARIANT (type);
457  if (type != t)
458    {
459      t = remap_type (t, id);
460      TYPE_MAIN_VARIANT (new_tree) = t;
461      TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
462      TYPE_NEXT_VARIANT (t) = new_tree;
463    }
464  else
465    {
466      TYPE_MAIN_VARIANT (new_tree) = new_tree;
467      TYPE_NEXT_VARIANT (new_tree) = NULL;
468    }
469
470  if (TYPE_STUB_DECL (type))
471    TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
472
473  /* Lazily create pointer and reference types.  */
474  TYPE_POINTER_TO (new_tree) = NULL;
475  TYPE_REFERENCE_TO (new_tree) = NULL;
476
477  /* Copy all types that may contain references to local variables; be sure to
478     preserve sharing in between type and its main variant when possible.  */
479  switch (TREE_CODE (new_tree))
480    {
481    case INTEGER_TYPE:
482    case REAL_TYPE:
483    case FIXED_POINT_TYPE:
484    case ENUMERAL_TYPE:
485    case BOOLEAN_TYPE:
486      if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
487	{
488	  gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
489	  gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
490
491	  TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
492	  TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
493	}
494      else
495	{
496	  t = TYPE_MIN_VALUE (new_tree);
497	  if (t && TREE_CODE (t) != INTEGER_CST)
498	    walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
499
500	  t = TYPE_MAX_VALUE (new_tree);
501	  if (t && TREE_CODE (t) != INTEGER_CST)
502	    walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
503	}
504      return new_tree;
505
506    case FUNCTION_TYPE:
507      if (TYPE_MAIN_VARIANT (new_tree) != new_tree
508	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
509	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
510      else
511        TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
512      if (TYPE_MAIN_VARIANT (new_tree) != new_tree
513	  && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
514	TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
515      else
516        walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
517      return new_tree;
518
519    case ARRAY_TYPE:
520      if (TYPE_MAIN_VARIANT (new_tree) != new_tree
521	  && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
522	TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
523      else
524	TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
525
526      if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
527	{
528	  gcc_checking_assert (TYPE_DOMAIN (type)
529			       == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
530	  TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
531	}
532      else
533        {
534	  TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
535	  /* For array bounds where we have decided not to copy over the bounds
536	     variable which isn't used in OpenMP/OpenACC region, change them to
537	     an uninitialized VAR_DECL temporary.  */
538	  if (id->adjust_array_error_bounds
539	      && TYPE_DOMAIN (new_tree)
540	      && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
541	      && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
542	    {
543	      tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
544	      DECL_ATTRIBUTES (v)
545		= tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
546			     DECL_ATTRIBUTES (v));
547	      TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
548	    }
549        }
550      break;
551
552    case RECORD_TYPE:
553    case UNION_TYPE:
554    case QUAL_UNION_TYPE:
555      if (TYPE_MAIN_VARIANT (type) != type
556	  && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
557	TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
558      else
559	{
560	  tree f, nf = NULL;
561
562	  for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
563	    {
564	      t = remap_decl (f, id);
565	      DECL_CONTEXT (t) = new_tree;
566	      DECL_CHAIN (t) = nf;
567	      nf = t;
568	    }
569	  TYPE_FIELDS (new_tree) = nreverse (nf);
570	}
571      break;
572
573    case OFFSET_TYPE:
574    default:
575      /* Shouldn't have been thought variable sized.  */
576      gcc_unreachable ();
577    }
578
579  /* All variants of type share the same size, so use the already remaped data.  */
580  if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
581    {
582      tree s = TYPE_SIZE (type);
583      tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
584      tree su = TYPE_SIZE_UNIT (type);
585      tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
586      gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
587			    && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
588			   || s == mvs);
589      gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
590			    && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
591			   || su == mvsu);
592      TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
593      TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
594    }
595  else
596    {
597      walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
598      walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
599    }
600
601  return new_tree;
602}
603
604/* Helper function for remap_type_2, called through walk_tree.  */
605
606static tree
607remap_type_3 (tree *tp, int *walk_subtrees, void *data)
608{
609  copy_body_data *id = (copy_body_data *) data;
610
611  if (TYPE_P (*tp))
612    *walk_subtrees = 0;
613
614  else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
615    return *tp;
616
617  return NULL_TREE;
618}
619
620/* Return true if TYPE needs to be remapped because remap_decl on any
621   needed embedded decl returns something other than that decl.  */
622
623static bool
624remap_type_2 (tree type, copy_body_data *id)
625{
626  tree t;
627
628#define RETURN_TRUE_IF_VAR(T) \
629  do								\
630    {								\
631      tree _t = (T);						\
632      if (_t)							\
633	{							\
634	  if (DECL_P (_t) && remap_decl (_t, id) != _t)		\
635	    return true;					\
636	  if (!TYPE_SIZES_GIMPLIFIED (type)			\
637	      && walk_tree (&_t, remap_type_3, id, NULL))	\
638	    return true;					\
639	}							\
640    }								\
641  while (0)
642
643  switch (TREE_CODE (type))
644    {
645    case POINTER_TYPE:
646    case REFERENCE_TYPE:
647    case FUNCTION_TYPE:
648    case METHOD_TYPE:
649      return remap_type_2 (TREE_TYPE (type), id);
650
651    case INTEGER_TYPE:
652    case REAL_TYPE:
653    case FIXED_POINT_TYPE:
654    case ENUMERAL_TYPE:
655    case BOOLEAN_TYPE:
656      RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
657      RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
658      return false;
659
660    case ARRAY_TYPE:
661      if (remap_type_2 (TREE_TYPE (type), id)
662	  || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
663	return true;
664      break;
665
666    case RECORD_TYPE:
667    case UNION_TYPE:
668    case QUAL_UNION_TYPE:
669      for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
670	if (TREE_CODE (t) == FIELD_DECL)
671	  {
672	    RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
673	    RETURN_TRUE_IF_VAR (DECL_SIZE (t));
674	    RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
675	    if (TREE_CODE (type) == QUAL_UNION_TYPE)
676	      RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
677	  }
678      break;
679
680    default:
681      return false;
682    }
683
684  RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
685  RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
686  return false;
687#undef RETURN_TRUE_IF_VAR
688}
689
690tree
691remap_type (tree type, copy_body_data *id)
692{
693  tree *node;
694  tree tmp;
695
696  if (type == NULL)
697    return type;
698
699  /* See if we have remapped this type.  */
700  node = id->decl_map->get (type);
701  if (node)
702    return *node;
703
704  /* The type only needs remapping if it's variably modified.  */
705  if (! variably_modified_type_p (type, id->src_fn)
706      /* Don't remap if copy_decl method doesn't always return a new
707	 decl and for all embedded decls returns the passed in decl.  */
708      || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
709    {
710      insert_decl_map (id, type, type);
711      return type;
712    }
713
714  id->remapping_type_depth++;
715  tmp = remap_type_1 (type, id);
716  id->remapping_type_depth--;
717
718  return tmp;
719}
720
721/* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
722
723static bool
724can_be_nonlocal (tree decl, copy_body_data *id)
725{
726  /* We cannot duplicate function decls.  */
727  if (TREE_CODE (decl) == FUNCTION_DECL)
728    return true;
729
730  /* Local static vars must be non-local or we get multiple declaration
731     problems.  */
732  if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
733    return true;
734
735  return false;
736}
737
738static tree
739remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
740	     copy_body_data *id)
741{
742  tree old_var;
743  tree new_decls = NULL_TREE;
744
745  /* Remap its variables.  */
746  for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
747    {
748      tree new_var;
749
750      if (can_be_nonlocal (old_var, id))
751	{
752	  /* We need to add this variable to the local decls as otherwise
753	     nothing else will do so.  */
754	  if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
755	    add_local_decl (cfun, old_var);
756	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
757	      && !DECL_IGNORED_P (old_var)
758	      && nonlocalized_list)
759	    vec_safe_push (*nonlocalized_list, old_var);
760	  continue;
761	}
762
763      /* Remap the variable.  */
764      new_var = remap_decl (old_var, id);
765
766      /* If we didn't remap this variable, we can't mess with its
767	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
768	 already declared somewhere else, so don't declare it here.  */
769
770      if (new_var == id->retvar)
771	;
772      else if (!new_var)
773        {
774	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
775	      && !DECL_IGNORED_P (old_var)
776	      && nonlocalized_list)
777	    vec_safe_push (*nonlocalized_list, old_var);
778	}
779      else
780	{
781	  gcc_assert (DECL_P (new_var));
782	  DECL_CHAIN (new_var) = new_decls;
783	  new_decls = new_var;
784
785	  /* Also copy value-expressions.  */
786	  if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
787	    {
788	      tree tem = DECL_VALUE_EXPR (new_var);
789	      bool old_regimplify = id->regimplify;
790	      id->remapping_type_depth++;
791	      walk_tree (&tem, copy_tree_body_r, id, NULL);
792	      id->remapping_type_depth--;
793	      id->regimplify = old_regimplify;
794	      SET_DECL_VALUE_EXPR (new_var, tem);
795	    }
796	}
797    }
798
799  return nreverse (new_decls);
800}
801
802/* Copy the BLOCK to contain remapped versions of the variables
803   therein.  And hook the new block into the block-tree.  */
804
805static void
806remap_block (tree *block, copy_body_data *id)
807{
808  tree old_block;
809  tree new_block;
810
811  /* Make the new block.  */
812  old_block = *block;
813  new_block = make_node (BLOCK);
814  TREE_USED (new_block) = TREE_USED (old_block);
815  BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
816  BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
817  BLOCK_NONLOCALIZED_VARS (new_block)
818    = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
819  *block = new_block;
820
821  /* Remap its variables.  */
822  BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
823  					&BLOCK_NONLOCALIZED_VARS (new_block),
824					id);
825
826  /* Remember the remapped block.  */
827  insert_decl_map (id, old_block, new_block);
828}
829
830/* Copy the whole block tree and root it in id->block.  */
831
832static tree
833remap_blocks (tree block, copy_body_data *id)
834{
835  tree t;
836  tree new_tree = block;
837
838  if (!block)
839    return NULL;
840
841  remap_block (&new_tree, id);
842  gcc_assert (new_tree != block);
843  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
844    prepend_lexical_block (new_tree, remap_blocks (t, id));
845  /* Blocks are in arbitrary order, but make things slightly prettier and do
846     not swap order when producing a copy.  */
847  BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
848  return new_tree;
849}
850
851/* Remap the block tree rooted at BLOCK to nothing.  */
852
853static void
854remap_blocks_to_null (tree block, copy_body_data *id)
855{
856  tree t;
857  insert_decl_map (id, block, NULL_TREE);
858  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
859    remap_blocks_to_null (t, id);
860}
861
862/* Remap the location info pointed to by LOCUS.  */
863
864static location_t
865remap_location (location_t locus, copy_body_data *id)
866{
867  if (LOCATION_BLOCK (locus))
868    {
869      tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
870      gcc_assert (n);
871      if (*n)
872	return set_block (locus, *n);
873    }
874
875  locus = LOCATION_LOCUS (locus);
876
877  if (locus != UNKNOWN_LOCATION && id->block)
878    return set_block (locus, id->block);
879
880  return locus;
881}
882
883static void
884copy_statement_list (tree *tp)
885{
886  tree_stmt_iterator oi, ni;
887  tree new_tree;
888
889  new_tree = alloc_stmt_list ();
890  ni = tsi_start (new_tree);
891  oi = tsi_start (*tp);
892  TREE_TYPE (new_tree) = TREE_TYPE (*tp);
893  *tp = new_tree;
894
895  for (; !tsi_end_p (oi); tsi_next (&oi))
896    {
897      tree stmt = tsi_stmt (oi);
898      if (TREE_CODE (stmt) == STATEMENT_LIST)
899	/* This copy is not redundant; tsi_link_after will smash this
900	   STATEMENT_LIST into the end of the one we're building, and we
901	   don't want to do that with the original.  */
902	copy_statement_list (&stmt);
903      tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
904    }
905}
906
907static void
908copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
909{
910  tree block = BIND_EXPR_BLOCK (*tp);
911  /* Copy (and replace) the statement.  */
912  copy_tree_r (tp, walk_subtrees, NULL);
913  if (block)
914    {
915      remap_block (&block, id);
916      BIND_EXPR_BLOCK (*tp) = block;
917    }
918
919  if (BIND_EXPR_VARS (*tp))
920    /* This will remap a lot of the same decls again, but this should be
921       harmless.  */
922    BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
923}
924
925
926/* Create a new gimple_seq by remapping all the statements in BODY
927   using the inlining information in ID.  */
928
929static gimple_seq
930remap_gimple_seq (gimple_seq body, copy_body_data *id)
931{
932  gimple_stmt_iterator si;
933  gimple_seq new_body = NULL;
934
935  for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
936    {
937      gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
938      gimple_seq_add_seq (&new_body, new_stmts);
939    }
940
941  return new_body;
942}
943
944
945/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
946   block using the mapping information in ID.  */
947
948static gimple *
949copy_gimple_bind (gbind *stmt, copy_body_data *id)
950{
951  gimple *new_bind;
952  tree new_block, new_vars;
953  gimple_seq body, new_body;
954
955  /* Copy the statement.  Note that we purposely don't use copy_stmt
956     here because we need to remap statements as we copy.  */
957  body = gimple_bind_body (stmt);
958  new_body = remap_gimple_seq (body, id);
959
960  new_block = gimple_bind_block (stmt);
961  if (new_block)
962    remap_block (&new_block, id);
963
964  /* This will remap a lot of the same decls again, but this should be
965     harmless.  */
966  new_vars = gimple_bind_vars (stmt);
967  if (new_vars)
968    new_vars = remap_decls (new_vars, NULL, id);
969
970  new_bind = gimple_build_bind (new_vars, new_body, new_block);
971
972  return new_bind;
973}
974
975/* Return true if DECL is a parameter or a SSA_NAME for a parameter.  */
976
977static bool
978is_parm (tree decl)
979{
980  if (TREE_CODE (decl) == SSA_NAME)
981    {
982      decl = SSA_NAME_VAR (decl);
983      if (!decl)
984	return false;
985    }
986
987  return (TREE_CODE (decl) == PARM_DECL);
988}
989
990/* Remap the dependence CLIQUE from the source to the destination function
991   as specified in ID.  */
992
993static unsigned short
994remap_dependence_clique (copy_body_data *id, unsigned short clique)
995{
996  if (clique == 0 || processing_debug_stmt)
997    return 0;
998  if (!id->dependence_map)
999    id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1000  bool existed;
1001  unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1002  if (!existed)
1003    {
1004      /* Clique 1 is reserved for local ones set by PTA.  */
1005      if (cfun->last_clique == 0)
1006	cfun->last_clique = 1;
1007      newc = ++cfun->last_clique;
1008    }
1009  return newc;
1010}
1011
1012/* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
1013   'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
1014   WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1015   recursing into the children nodes of *TP.  */
1016
1017static tree
1018remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1019{
1020  struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1021  copy_body_data *id = (copy_body_data *) wi_p->info;
1022  tree fn = id->src_fn;
1023
1024  /* For recursive invocations this is no longer the LHS itself.  */
1025  bool is_lhs = wi_p->is_lhs;
1026  wi_p->is_lhs = false;
1027
1028  if (TREE_CODE (*tp) == SSA_NAME)
1029    {
1030      *tp = remap_ssa_name (*tp, id);
1031      *walk_subtrees = 0;
1032      if (is_lhs)
1033	SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1034      return NULL;
1035    }
1036  else if (auto_var_in_fn_p (*tp, fn))
1037    {
1038      /* Local variables and labels need to be replaced by equivalent
1039	 variables.  We don't want to copy static variables; there's
1040	 only one of those, no matter how many times we inline the
1041	 containing function.  Similarly for globals from an outer
1042	 function.  */
1043      tree new_decl;
1044
1045      /* Remap the declaration.  */
1046      new_decl = remap_decl (*tp, id);
1047      gcc_assert (new_decl);
1048      /* Replace this variable with the copy.  */
1049      STRIP_TYPE_NOPS (new_decl);
1050      /* ???  The C++ frontend uses void * pointer zero to initialize
1051         any other type.  This confuses the middle-end type verification.
1052	 As cloned bodies do not go through gimplification again the fixup
1053	 there doesn't trigger.  */
1054      if (TREE_CODE (new_decl) == INTEGER_CST
1055	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1056	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1057      *tp = new_decl;
1058      *walk_subtrees = 0;
1059    }
1060  else if (TREE_CODE (*tp) == STATEMENT_LIST)
1061    gcc_unreachable ();
1062  else if (TREE_CODE (*tp) == SAVE_EXPR)
1063    gcc_unreachable ();
1064  else if (TREE_CODE (*tp) == LABEL_DECL
1065	   && (!DECL_CONTEXT (*tp)
1066	       || decl_function_context (*tp) == id->src_fn))
1067    /* These may need to be remapped for EH handling.  */
1068    *tp = remap_decl (*tp, id);
1069  else if (TREE_CODE (*tp) == FIELD_DECL)
1070    {
1071      /* If the enclosing record type is variably_modified_type_p, the field
1072	 has already been remapped.  Otherwise, it need not be.  */
1073      tree *n = id->decl_map->get (*tp);
1074      if (n)
1075	*tp = *n;
1076      *walk_subtrees = 0;
1077    }
1078  else if (TYPE_P (*tp))
1079    /* Types may need remapping as well.  */
1080    *tp = remap_type (*tp, id);
1081  else if (CONSTANT_CLASS_P (*tp))
1082    {
1083      /* If this is a constant, we have to copy the node iff the type
1084	 will be remapped.  copy_tree_r will not copy a constant.  */
1085      tree new_type = remap_type (TREE_TYPE (*tp), id);
1086
1087      if (new_type == TREE_TYPE (*tp))
1088	*walk_subtrees = 0;
1089
1090      else if (TREE_CODE (*tp) == INTEGER_CST)
1091	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1092      else
1093	{
1094	  *tp = copy_node (*tp);
1095	  TREE_TYPE (*tp) = new_type;
1096	}
1097    }
1098  else
1099    {
1100      /* Otherwise, just copy the node.  Note that copy_tree_r already
1101	 knows not to copy VAR_DECLs, etc., so this is safe.  */
1102
1103      if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1104	{
1105	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1106	     that can happen when a pointer argument is an ADDR_EXPR.
1107	     Recurse here manually to allow that.  */
1108	  tree ptr = TREE_OPERAND (*tp, 0);
1109	  tree type = remap_type (TREE_TYPE (*tp), id);
1110	  tree old = *tp;
1111	  walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1112	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1113	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1114	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1115	  copy_warning (*tp, old);
1116	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1117	    {
1118	      MR_DEPENDENCE_CLIQUE (*tp)
1119	        = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1120	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1121	    }
1122	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1123	     remapped a parameter as the property might be valid only
1124	     for the parameter itself.  */
1125	  if (TREE_THIS_NOTRAP (old)
1126	      && (!is_parm (TREE_OPERAND (old, 0))
1127		  || (!id->transform_parameter && is_parm (ptr))))
1128	    TREE_THIS_NOTRAP (*tp) = 1;
1129	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1130	  *walk_subtrees = 0;
1131	  return NULL;
1132	}
1133
1134      /* Here is the "usual case".  Copy this tree node, and then
1135	 tweak some special cases.  */
1136      copy_tree_r (tp, walk_subtrees, NULL);
1137
1138      if (TREE_CODE (*tp) != OMP_CLAUSE)
1139	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1140
1141      if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1142	{
1143	  /* The copied TARGET_EXPR has never been expanded, even if the
1144	     original node was expanded already.  */
1145	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1146	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1147	}
1148      else if (TREE_CODE (*tp) == ADDR_EXPR)
1149	{
1150	  /* Variable substitution need not be simple.  In particular,
1151	     the MEM_REF substitution above.  Make sure that
1152	     TREE_CONSTANT and friends are up-to-date.  */
1153	  int invariant = is_gimple_min_invariant (*tp);
1154	  walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1155	  recompute_tree_invariant_for_addr_expr (*tp);
1156
1157	  /* If this used to be invariant, but is not any longer,
1158	     then regimplification is probably needed.  */
1159	  if (invariant && !is_gimple_min_invariant (*tp))
1160	    id->regimplify = true;
1161
1162	  *walk_subtrees = 0;
1163	}
1164    }
1165
1166  /* Update the TREE_BLOCK for the cloned expr.  */
1167  if (EXPR_P (*tp))
1168    {
1169      tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1170      tree old_block = TREE_BLOCK (*tp);
1171      if (old_block)
1172	{
1173	  tree *n;
1174	  n = id->decl_map->get (TREE_BLOCK (*tp));
1175	  if (n)
1176	    new_block = *n;
1177	}
1178      TREE_SET_BLOCK (*tp, new_block);
1179    }
1180
1181  /* Keep iterating.  */
1182  return NULL_TREE;
1183}
1184
1185
1186/* Called from copy_body_id via walk_tree.  DATA is really a
1187   `copy_body_data *'.  */
1188
1189tree
1190copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1191{
1192  copy_body_data *id = (copy_body_data *) data;
1193  tree fn = id->src_fn;
1194  tree new_block;
1195
1196  /* Begin by recognizing trees that we'll completely rewrite for the
1197     inlining context.  Our output for these trees is completely
1198     different from out input (e.g. RETURN_EXPR is deleted, and morphs
1199     into an edge).  Further down, we'll handle trees that get
1200     duplicated and/or tweaked.  */
1201
1202  /* When requested, RETURN_EXPRs should be transformed to just the
1203     contained MODIFY_EXPR.  The branch semantics of the return will
1204     be handled elsewhere by manipulating the CFG rather than a statement.  */
1205  if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1206    {
1207      tree assignment = TREE_OPERAND (*tp, 0);
1208
1209      /* If we're returning something, just turn that into an
1210	 assignment into the equivalent of the original RESULT_DECL.
1211	 If the "assignment" is just the result decl, the result
1212	 decl has already been set (e.g. a recent "foo (&result_decl,
1213	 ...)"); just toss the entire RETURN_EXPR.  */
1214      if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1215	{
1216	  /* Replace the RETURN_EXPR with (a copy of) the
1217	     MODIFY_EXPR hanging underneath.  */
1218	  *tp = copy_node (assignment);
1219	}
1220      else /* Else the RETURN_EXPR returns no value.  */
1221	{
1222	  *tp = NULL;
1223	  return (tree) (void *)1;
1224	}
1225    }
1226  else if (TREE_CODE (*tp) == SSA_NAME)
1227    {
1228      *tp = remap_ssa_name (*tp, id);
1229      *walk_subtrees = 0;
1230      return NULL;
1231    }
1232
1233  /* Local variables and labels need to be replaced by equivalent
1234     variables.  We don't want to copy static variables; there's only
1235     one of those, no matter how many times we inline the containing
1236     function.  Similarly for globals from an outer function.  */
1237  else if (auto_var_in_fn_p (*tp, fn))
1238    {
1239      tree new_decl;
1240
1241      /* Remap the declaration.  */
1242      new_decl = remap_decl (*tp, id);
1243      gcc_assert (new_decl);
1244      /* Replace this variable with the copy.  */
1245      STRIP_TYPE_NOPS (new_decl);
1246      *tp = new_decl;
1247      *walk_subtrees = 0;
1248    }
1249  else if (TREE_CODE (*tp) == STATEMENT_LIST)
1250    copy_statement_list (tp);
1251  else if (TREE_CODE (*tp) == SAVE_EXPR
1252	   || TREE_CODE (*tp) == TARGET_EXPR)
1253    remap_save_expr (tp, id->decl_map, walk_subtrees);
1254  else if (TREE_CODE (*tp) == LABEL_DECL
1255	   && (! DECL_CONTEXT (*tp)
1256	       || decl_function_context (*tp) == id->src_fn))
1257    /* These may need to be remapped for EH handling.  */
1258    *tp = remap_decl (*tp, id);
1259  else if (TREE_CODE (*tp) == BIND_EXPR)
1260    copy_bind_expr (tp, walk_subtrees, id);
1261  /* Types may need remapping as well.  */
1262  else if (TYPE_P (*tp))
1263    *tp = remap_type (*tp, id);
1264
1265  /* If this is a constant, we have to copy the node iff the type will be
1266     remapped.  copy_tree_r will not copy a constant.  */
1267  else if (CONSTANT_CLASS_P (*tp))
1268    {
1269      tree new_type = remap_type (TREE_TYPE (*tp), id);
1270
1271      if (new_type == TREE_TYPE (*tp))
1272	*walk_subtrees = 0;
1273
1274      else if (TREE_CODE (*tp) == INTEGER_CST)
1275	*tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1276      else
1277	{
1278	  *tp = copy_node (*tp);
1279	  TREE_TYPE (*tp) = new_type;
1280	}
1281    }
1282
1283  /* Otherwise, just copy the node.  Note that copy_tree_r already
1284     knows not to copy VAR_DECLs, etc., so this is safe.  */
1285  else
1286    {
1287      /* Here we handle trees that are not completely rewritten.
1288	 First we detect some inlining-induced bogosities for
1289	 discarding.  */
1290      if (TREE_CODE (*tp) == MODIFY_EXPR
1291	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1292	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1293	{
1294	  /* Some assignments VAR = VAR; don't generate any rtl code
1295	     and thus don't count as variable modification.  Avoid
1296	     keeping bogosities like 0 = 0.  */
1297	  tree decl = TREE_OPERAND (*tp, 0), value;
1298	  tree *n;
1299
1300	  n = id->decl_map->get (decl);
1301	  if (n)
1302	    {
1303	      value = *n;
1304	      STRIP_TYPE_NOPS (value);
1305	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1306		{
1307		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1308		  return copy_tree_body_r (tp, walk_subtrees, data);
1309		}
1310	    }
1311	}
1312      else if (TREE_CODE (*tp) == INDIRECT_REF)
1313	{
1314	  /* Get rid of *& from inline substitutions that can happen when a
1315	     pointer argument is an ADDR_EXPR.  */
1316	  tree decl = TREE_OPERAND (*tp, 0);
1317	  tree *n = id->decl_map->get (decl);
1318	  if (n)
1319	    {
1320	      /* If we happen to get an ADDR_EXPR in n->value, strip
1321	         it manually here as we'll eventually get ADDR_EXPRs
1322		 which lie about their types pointed to.  In this case
1323		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1324		 but we absolutely rely on that.  As fold_indirect_ref
1325	         does other useful transformations, try that first, though.  */
1326	      tree type = TREE_TYPE (*tp);
1327	      tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1328	      tree old = *tp;
1329	      *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1330	      if (! *tp)
1331	        {
1332		  type = remap_type (type, id);
1333		  if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1334		    {
1335		      *tp
1336		        = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1337		      /* ???  We should either assert here or build
1338			 a VIEW_CONVERT_EXPR instead of blindly leaking
1339			 incompatible types to our IL.  */
1340		      if (! *tp)
1341			*tp = TREE_OPERAND (ptr, 0);
1342		    }
1343	          else
1344		    {
1345	              *tp = build1 (INDIRECT_REF, type, ptr);
1346		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1347		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1348		      TREE_READONLY (*tp) = TREE_READONLY (old);
1349		      /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1350			 have remapped a parameter as the property might be
1351			 valid only for the parameter itself.  */
1352		      if (TREE_THIS_NOTRAP (old)
1353			  && (!is_parm (TREE_OPERAND (old, 0))
1354			      || (!id->transform_parameter && is_parm (ptr))))
1355		        TREE_THIS_NOTRAP (*tp) = 1;
1356		    }
1357		}
1358	      *walk_subtrees = 0;
1359	      return NULL;
1360	    }
1361	}
1362      else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1363	{
1364	  /* We need to re-canonicalize MEM_REFs from inline substitutions
1365	     that can happen when a pointer argument is an ADDR_EXPR.
1366	     Recurse here manually to allow that.  */
1367	  tree ptr = TREE_OPERAND (*tp, 0);
1368	  tree type = remap_type (TREE_TYPE (*tp), id);
1369	  tree old = *tp;
1370	  walk_tree (&ptr, copy_tree_body_r, data, NULL);
1371	  *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1372	  TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1373	  TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1374	  copy_warning (*tp, old);
1375	  if (MR_DEPENDENCE_CLIQUE (old) != 0)
1376	    {
1377	      MR_DEPENDENCE_CLIQUE (*tp)
1378		= remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1379	      MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1380	    }
1381	  /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1382	     remapped a parameter as the property might be valid only
1383	     for the parameter itself.  */
1384	  if (TREE_THIS_NOTRAP (old)
1385	      && (!is_parm (TREE_OPERAND (old, 0))
1386		  || (!id->transform_parameter && is_parm (ptr))))
1387	    TREE_THIS_NOTRAP (*tp) = 1;
1388	  REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1389	  *walk_subtrees = 0;
1390	  return NULL;
1391	}
1392
1393      /* Here is the "usual case".  Copy this tree node, and then
1394	 tweak some special cases.  */
1395      copy_tree_r (tp, walk_subtrees, NULL);
1396
1397      /* If EXPR has block defined, map it to newly constructed block.
1398         When inlining we want EXPRs without block appear in the block
1399	 of function call if we are not remapping a type.  */
1400      if (EXPR_P (*tp))
1401	{
1402	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1403	  if (TREE_BLOCK (*tp))
1404	    {
1405	      tree *n;
1406	      n = id->decl_map->get (TREE_BLOCK (*tp));
1407	      if (n)
1408		new_block = *n;
1409	    }
1410	  TREE_SET_BLOCK (*tp, new_block);
1411	}
1412
1413      if (TREE_CODE (*tp) != OMP_CLAUSE)
1414	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1415
1416      /* The copied TARGET_EXPR has never been expanded, even if the
1417	 original node was expanded already.  */
1418      if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1419	{
1420	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1421	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1422	}
1423
1424      /* Variable substitution need not be simple.  In particular, the
1425	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1426	 and friends are up-to-date.  */
1427      else if (TREE_CODE (*tp) == ADDR_EXPR)
1428	{
1429	  int invariant = is_gimple_min_invariant (*tp);
1430	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1431
1432	  /* Handle the case where we substituted an INDIRECT_REF
1433	     into the operand of the ADDR_EXPR.  */
1434	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1435	      && !id->do_not_fold)
1436	    {
1437	      tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1438	      if (TREE_TYPE (t) != TREE_TYPE (*tp))
1439		t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1440	      *tp = t;
1441	    }
1442	  else
1443	    recompute_tree_invariant_for_addr_expr (*tp);
1444
1445	  /* If this used to be invariant, but is not any longer,
1446	     then regimplification is probably needed.  */
1447	  if (invariant && !is_gimple_min_invariant (*tp))
1448	    id->regimplify = true;
1449
1450	  *walk_subtrees = 0;
1451	}
1452      else if (TREE_CODE (*tp) == OMP_CLAUSE
1453	       && (OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_AFFINITY
1454		   || OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_DEPEND))
1455	{
1456	  tree t = OMP_CLAUSE_DECL (*tp);
1457	  if (t
1458	      && TREE_CODE (t) == TREE_LIST
1459	      && TREE_PURPOSE (t)
1460	      && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
1461	    {
1462	      *walk_subtrees = 0;
1463	      OMP_CLAUSE_DECL (*tp) = copy_node (t);
1464	      t = OMP_CLAUSE_DECL (*tp);
1465	      TREE_PURPOSE (t) = copy_node (TREE_PURPOSE (t));
1466	      for (int i = 0; i <= 4; i++)
1467		walk_tree (&TREE_VEC_ELT (TREE_PURPOSE (t), i),
1468			   copy_tree_body_r, id, NULL);
1469	      if (TREE_VEC_ELT (TREE_PURPOSE (t), 5))
1470		remap_block (&TREE_VEC_ELT (TREE_PURPOSE (t), 5), id);
1471	      walk_tree (&TREE_VALUE (t), copy_tree_body_r, id, NULL);
1472	    }
1473	}
1474    }
1475
1476  /* Keep iterating.  */
1477  return NULL_TREE;
1478}
1479
1480/* Helper for remap_gimple_stmt.  Given an EH region number for the
1481   source function, map that to the duplicate EH region number in
1482   the destination function.  */
1483
1484static int
1485remap_eh_region_nr (int old_nr, copy_body_data *id)
1486{
1487  eh_region old_r, new_r;
1488
1489  old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1490  new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1491
1492  return new_r->index;
1493}
1494
1495/* Similar, but operate on INTEGER_CSTs.  */
1496
1497static tree
1498remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1499{
1500  int old_nr, new_nr;
1501
1502  old_nr = tree_to_shwi (old_t_nr);
1503  new_nr = remap_eh_region_nr (old_nr, id);
1504
1505  return build_int_cst (integer_type_node, new_nr);
1506}
1507
1508/* Helper for copy_bb.  Remap statement STMT using the inlining
1509   information in ID.  Return the new statement copy.  */
1510
1511static gimple_seq
1512remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1513{
1514  gimple *copy = NULL;
1515  struct walk_stmt_info wi;
1516  bool skip_first = false;
1517  gimple_seq stmts = NULL;
1518
1519  if (is_gimple_debug (stmt)
1520      && (gimple_debug_nonbind_marker_p (stmt)
1521	  ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1522	  : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1523    return NULL;
1524
1525  if (!is_gimple_debug (stmt)
1526      && id->param_body_adjs
1527      && id->param_body_adjs->m_dead_stmts.contains (stmt))
1528    {
1529      tree *dval = id->param_body_adjs->m_dead_stmt_debug_equiv.get (stmt);
1530      if (!dval)
1531	return NULL;
1532
1533      gcc_assert (is_gimple_assign (stmt));
1534      tree lhs = gimple_assign_lhs (stmt);
1535      tree *dvar = id->param_body_adjs->m_dead_ssa_debug_equiv.get (lhs);
1536      gdebug *bind = gimple_build_debug_bind (*dvar, *dval, stmt);
1537      if (id->reset_location)
1538	gimple_set_location (bind, input_location);
1539      id->debug_stmts.safe_push (bind);
1540      gimple_seq_add_stmt (&stmts, bind);
1541      return stmts;
1542    }
1543
1544  /* Begin by recognizing trees that we'll completely rewrite for the
1545     inlining context.  Our output for these trees is completely
1546     different from our input (e.g. RETURN_EXPR is deleted and morphs
1547     into an edge).  Further down, we'll handle trees that get
1548     duplicated and/or tweaked.  */
1549
1550  /* When requested, GIMPLE_RETURN should be transformed to just the
1551     contained GIMPLE_ASSIGN.  The branch semantics of the return will
1552     be handled elsewhere by manipulating the CFG rather than the
1553     statement.  */
1554  if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1555    {
1556      tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1557
1558      /* If we're returning something, just turn that into an
1559	 assignment to the equivalent of the original RESULT_DECL.
1560	 If RETVAL is just the result decl, the result decl has
1561	 already been set (e.g. a recent "foo (&result_decl, ...)");
1562	 just toss the entire GIMPLE_RETURN.  Likewise for when the
1563	 call doesn't want the return value.  */
1564      if (retval
1565	  && (TREE_CODE (retval) != RESULT_DECL
1566	      && (!id->call_stmt
1567		  || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1568	      && (TREE_CODE (retval) != SSA_NAME
1569		  || ! SSA_NAME_VAR (retval)
1570		  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1571        {
1572	  copy = gimple_build_assign (id->do_not_unshare
1573				      ? id->retvar : unshare_expr (id->retvar),
1574				      retval);
1575	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1576	  skip_first = true;
1577	}
1578      else
1579	return NULL;
1580    }
1581  else if (gimple_has_substatements (stmt))
1582    {
1583      gimple_seq s1, s2;
1584
1585      /* When cloning bodies from the C++ front end, we will be handed bodies
1586	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1587	 have embedded statements.  */
1588      switch (gimple_code (stmt))
1589	{
1590	case GIMPLE_BIND:
1591	  copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1592	  break;
1593
1594	case GIMPLE_CATCH:
1595	  {
1596	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1597	    s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1598	    copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1599	  }
1600	  break;
1601
1602	case GIMPLE_EH_FILTER:
1603	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1604	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1605	  break;
1606
1607	case GIMPLE_TRY:
1608	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1609	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1610	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1611	  break;
1612
1613	case GIMPLE_WITH_CLEANUP_EXPR:
1614	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1615	  copy = gimple_build_wce (s1);
1616	  break;
1617
1618	case GIMPLE_OMP_PARALLEL:
1619	  {
1620	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1621	    s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1622	    copy = gimple_build_omp_parallel
1623	             (s1,
1624		      gimple_omp_parallel_clauses (omp_par_stmt),
1625		      gimple_omp_parallel_child_fn (omp_par_stmt),
1626		      gimple_omp_parallel_data_arg (omp_par_stmt));
1627	  }
1628	  break;
1629
1630	case GIMPLE_OMP_TASK:
1631	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1632	  copy = gimple_build_omp_task
1633	           (s1,
1634		    gimple_omp_task_clauses (stmt),
1635		    gimple_omp_task_child_fn (stmt),
1636		    gimple_omp_task_data_arg (stmt),
1637		    gimple_omp_task_copy_fn (stmt),
1638		    gimple_omp_task_arg_size (stmt),
1639		    gimple_omp_task_arg_align (stmt));
1640	  break;
1641
1642	case GIMPLE_OMP_FOR:
1643	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1644	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1645	  copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1646				       gimple_omp_for_clauses (stmt),
1647				       gimple_omp_for_collapse (stmt), s2);
1648	  {
1649	    size_t i;
1650	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1651	      {
1652		gimple_omp_for_set_index (copy, i,
1653					  gimple_omp_for_index (stmt, i));
1654		gimple_omp_for_set_initial (copy, i,
1655					    gimple_omp_for_initial (stmt, i));
1656		gimple_omp_for_set_final (copy, i,
1657					  gimple_omp_for_final (stmt, i));
1658		gimple_omp_for_set_incr (copy, i,
1659					 gimple_omp_for_incr (stmt, i));
1660		gimple_omp_for_set_cond (copy, i,
1661					 gimple_omp_for_cond (stmt, i));
1662	      }
1663	  }
1664	  break;
1665
1666	case GIMPLE_OMP_MASTER:
1667	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1668	  copy = gimple_build_omp_master (s1);
1669	  break;
1670
1671	case GIMPLE_OMP_MASKED:
1672	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1673	  copy = gimple_build_omp_masked
1674		   (s1, gimple_omp_masked_clauses (stmt));
1675	  break;
1676
1677	case GIMPLE_OMP_SCOPE:
1678	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1679	  copy = gimple_build_omp_scope
1680		   (s1, gimple_omp_scope_clauses (stmt));
1681	  break;
1682
1683	case GIMPLE_OMP_TASKGROUP:
1684	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1685	  copy = gimple_build_omp_taskgroup
1686		   (s1, gimple_omp_taskgroup_clauses (stmt));
1687	  break;
1688
1689	case GIMPLE_OMP_ORDERED:
1690	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1691	  copy = gimple_build_omp_ordered
1692		   (s1,
1693		    gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1694	  break;
1695
1696	case GIMPLE_OMP_SCAN:
1697	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1698	  copy = gimple_build_omp_scan
1699		   (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1700	  break;
1701
1702	case GIMPLE_OMP_SECTION:
1703	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1704	  copy = gimple_build_omp_section (s1);
1705	  break;
1706
1707	case GIMPLE_OMP_SECTIONS:
1708	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1709	  copy = gimple_build_omp_sections
1710	           (s1, gimple_omp_sections_clauses (stmt));
1711	  break;
1712
1713	case GIMPLE_OMP_SINGLE:
1714	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1715	  copy = gimple_build_omp_single
1716	           (s1, gimple_omp_single_clauses (stmt));
1717	  break;
1718
1719	case GIMPLE_OMP_TARGET:
1720	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1721	  copy = gimple_build_omp_target
1722		   (s1, gimple_omp_target_kind (stmt),
1723		    gimple_omp_target_clauses (stmt));
1724	  break;
1725
1726	case GIMPLE_OMP_TEAMS:
1727	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1728	  copy = gimple_build_omp_teams
1729		   (s1, gimple_omp_teams_clauses (stmt));
1730	  break;
1731
1732	case GIMPLE_OMP_CRITICAL:
1733	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1734	  copy = gimple_build_omp_critical (s1,
1735					    gimple_omp_critical_name
1736					      (as_a <gomp_critical *> (stmt)),
1737					    gimple_omp_critical_clauses
1738					      (as_a <gomp_critical *> (stmt)));
1739	  break;
1740
1741	case GIMPLE_TRANSACTION:
1742	  {
1743	    gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1744	    gtransaction *new_trans_stmt;
1745	    s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1746				   id);
1747	    copy = new_trans_stmt = gimple_build_transaction (s1);
1748	    gimple_transaction_set_subcode (new_trans_stmt,
1749	      gimple_transaction_subcode (old_trans_stmt));
1750	    gimple_transaction_set_label_norm (new_trans_stmt,
1751	      gimple_transaction_label_norm (old_trans_stmt));
1752	    gimple_transaction_set_label_uninst (new_trans_stmt,
1753	      gimple_transaction_label_uninst (old_trans_stmt));
1754	    gimple_transaction_set_label_over (new_trans_stmt,
1755	      gimple_transaction_label_over (old_trans_stmt));
1756	  }
1757	  break;
1758
1759	default:
1760	  gcc_unreachable ();
1761	}
1762    }
1763  else
1764    {
1765      if (gimple_assign_copy_p (stmt)
1766	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1767	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1768	{
1769	  /* Here we handle statements that are not completely rewritten.
1770	     First we detect some inlining-induced bogosities for
1771	     discarding.  */
1772
1773	  /* Some assignments VAR = VAR; don't generate any rtl code
1774	     and thus don't count as variable modification.  Avoid
1775	     keeping bogosities like 0 = 0.  */
1776	  tree decl = gimple_assign_lhs (stmt), value;
1777	  tree *n;
1778
1779	  n = id->decl_map->get (decl);
1780	  if (n)
1781	    {
1782	      value = *n;
1783	      STRIP_TYPE_NOPS (value);
1784	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1785		return NULL;
1786	    }
1787	}
1788
1789      /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1790	 in a block that we aren't copying during tree_function_versioning,
1791	 just drop the clobber stmt.  */
1792      if (id->blocks_to_copy && gimple_clobber_p (stmt))
1793	{
1794	  tree lhs = gimple_assign_lhs (stmt);
1795	  if (TREE_CODE (lhs) == MEM_REF
1796	      && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1797	    {
1798	      gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1799	      if (gimple_bb (def_stmt)
1800		  && !bitmap_bit_p (id->blocks_to_copy,
1801				    gimple_bb (def_stmt)->index))
1802		return NULL;
1803	    }
1804	}
1805
1806      /* We do not allow CLOBBERs of handled components.  In case
1807	 returned value is stored via such handled component, remove
1808	 the clobber so stmt verifier is happy.  */
1809      if (gimple_clobber_p (stmt)
1810	  && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1811	{
1812	  tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1813	  if (!DECL_P (remapped)
1814	      && TREE_CODE (remapped) != MEM_REF)
1815	    return NULL;
1816	}
1817
1818      if (gimple_debug_bind_p (stmt))
1819	{
1820	  tree var = gimple_debug_bind_get_var (stmt);
1821	  tree value = gimple_debug_bind_get_value (stmt);
1822	  if (id->param_body_adjs
1823	      && id->param_body_adjs->m_dead_stmts.contains (stmt))
1824	    {
1825	      value = unshare_expr_without_location (value);
1826	      id->param_body_adjs->remap_with_debug_expressions (&value);
1827	    }
1828
1829	  gdebug *copy = gimple_build_debug_bind (var, value, stmt);
1830	  if (id->reset_location)
1831	    gimple_set_location (copy, input_location);
1832	  id->debug_stmts.safe_push (copy);
1833	  gimple_seq_add_stmt (&stmts, copy);
1834	  return stmts;
1835	}
1836      if (gimple_debug_source_bind_p (stmt))
1837	{
1838	  gdebug *copy = gimple_build_debug_source_bind
1839	                   (gimple_debug_source_bind_get_var (stmt),
1840			    gimple_debug_source_bind_get_value (stmt),
1841			    stmt);
1842	  if (id->reset_location)
1843	    gimple_set_location (copy, input_location);
1844	  id->debug_stmts.safe_push (copy);
1845	  gimple_seq_add_stmt (&stmts, copy);
1846	  return stmts;
1847	}
1848      if (gimple_debug_nonbind_marker_p (stmt))
1849	{
1850	  /* If the inlined function has too many debug markers,
1851	     don't copy them.  */
1852	  if (id->src_cfun->debug_marker_count
1853	      > param_max_debug_marker_count
1854	      || id->reset_location)
1855	    return stmts;
1856
1857	  gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1858	  id->debug_stmts.safe_push (copy);
1859	  gimple_seq_add_stmt (&stmts, copy);
1860	  return stmts;
1861	}
1862
1863      /* Create a new deep copy of the statement.  */
1864      copy = gimple_copy (stmt);
1865
1866      /* Clear flags that need revisiting.  */
1867      if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1868        {
1869	  if (gimple_call_tail_p (call_stmt))
1870	    gimple_call_set_tail (call_stmt, false);
1871	  if (gimple_call_from_thunk_p (call_stmt))
1872	    gimple_call_set_from_thunk (call_stmt, false);
1873	  if (gimple_call_internal_p (call_stmt))
1874	    switch (gimple_call_internal_fn (call_stmt))
1875	      {
1876	      case IFN_GOMP_SIMD_LANE:
1877	      case IFN_GOMP_SIMD_VF:
1878	      case IFN_GOMP_SIMD_LAST_LANE:
1879	      case IFN_GOMP_SIMD_ORDERED_START:
1880	      case IFN_GOMP_SIMD_ORDERED_END:
1881		DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1882	        break;
1883	      default:
1884		break;
1885	      }
1886	}
1887
1888      /* Remap the region numbers for __builtin_eh_{pointer,filter},
1889	 RESX and EH_DISPATCH.  */
1890      if (id->eh_map)
1891	switch (gimple_code (copy))
1892	  {
1893	  case GIMPLE_CALL:
1894	    {
1895	      tree r, fndecl = gimple_call_fndecl (copy);
1896	      if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1897		switch (DECL_FUNCTION_CODE (fndecl))
1898		  {
1899		  case BUILT_IN_EH_COPY_VALUES:
1900		    r = gimple_call_arg (copy, 1);
1901		    r = remap_eh_region_tree_nr (r, id);
1902		    gimple_call_set_arg (copy, 1, r);
1903		    /* FALLTHRU */
1904
1905		  case BUILT_IN_EH_POINTER:
1906		  case BUILT_IN_EH_FILTER:
1907		    r = gimple_call_arg (copy, 0);
1908		    r = remap_eh_region_tree_nr (r, id);
1909		    gimple_call_set_arg (copy, 0, r);
1910		    break;
1911
1912		  default:
1913		    break;
1914		  }
1915
1916	      /* Reset alias info if we didn't apply measures to
1917		 keep it valid over inlining by setting DECL_PT_UID.  */
1918	      if (!id->src_cfun->gimple_df
1919		  || !id->src_cfun->gimple_df->ipa_pta)
1920		gimple_call_reset_alias_info (as_a <gcall *> (copy));
1921	    }
1922	    break;
1923
1924	  case GIMPLE_RESX:
1925	    {
1926	      gresx *resx_stmt = as_a <gresx *> (copy);
1927	      int r = gimple_resx_region (resx_stmt);
1928	      r = remap_eh_region_nr (r, id);
1929	      gimple_resx_set_region (resx_stmt, r);
1930	    }
1931	    break;
1932
1933	  case GIMPLE_EH_DISPATCH:
1934	    {
1935	      geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1936	      int r = gimple_eh_dispatch_region (eh_dispatch);
1937	      r = remap_eh_region_nr (r, id);
1938	      gimple_eh_dispatch_set_region (eh_dispatch, r);
1939	    }
1940	    break;
1941
1942	  default:
1943	    break;
1944	  }
1945    }
1946
1947  /* If STMT has a block defined, map it to the newly constructed block.  */
1948  if (tree block = gimple_block (copy))
1949    {
1950      tree *n;
1951      n = id->decl_map->get (block);
1952      gcc_assert (n);
1953      gimple_set_block (copy, *n);
1954    }
1955  if (id->param_body_adjs)
1956    {
1957      gimple_seq extra_stmts = NULL;
1958      id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts, stmt);
1959      if (!gimple_seq_empty_p (extra_stmts))
1960	{
1961	  memset (&wi, 0, sizeof (wi));
1962	  wi.info = id;
1963	  for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1964	       !gsi_end_p (egsi);
1965	       gsi_next (&egsi))
1966	    walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1967	  gimple_seq_add_seq (&stmts, extra_stmts);
1968	}
1969    }
1970
1971  if (id->reset_location)
1972    gimple_set_location (copy, input_location);
1973
1974  /* Debug statements ought to be rebuilt and not copied.  */
1975  gcc_checking_assert (!is_gimple_debug (copy));
1976
1977  /* Remap all the operands in COPY.  */
1978  memset (&wi, 0, sizeof (wi));
1979  wi.info = id;
1980  if (skip_first)
1981    walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1982  else
1983    walk_gimple_op (copy, remap_gimple_op_r, &wi);
1984
1985  /* Clear the copied virtual operands.  We are not remapping them here
1986     but are going to recreate them from scratch.  */
1987  if (gimple_has_mem_ops (copy))
1988    {
1989      gimple_set_vdef (copy, NULL_TREE);
1990      gimple_set_vuse (copy, NULL_TREE);
1991    }
1992
1993  if (cfun->can_throw_non_call_exceptions)
1994    {
1995      /* When inlining a function which does not have non-call exceptions
1996	 enabled into a function that has (which only happens with
1997	 always-inline) we have to fixup stmts that cannot throw.  */
1998      if (gcond *cond = dyn_cast <gcond *> (copy))
1999	if (gimple_could_trap_p (cond))
2000	  {
2001	    gassign *cmp
2002	      = gimple_build_assign (make_ssa_name (boolean_type_node),
2003				     gimple_cond_code (cond),
2004				     gimple_cond_lhs (cond),
2005				     gimple_cond_rhs (cond));
2006	    gimple_seq_add_stmt (&stmts, cmp);
2007	    gimple_cond_set_code (cond, NE_EXPR);
2008	    gimple_cond_set_lhs (cond, gimple_assign_lhs (cmp));
2009	    gimple_cond_set_rhs (cond, boolean_false_node);
2010	  }
2011    }
2012
2013  gimple_seq_add_stmt (&stmts, copy);
2014  return stmts;
2015}
2016
2017
2018/* Copy basic block, scale profile accordingly.  Edges will be taken care of
2019   later  */
2020
2021static basic_block
2022copy_bb (copy_body_data *id, basic_block bb,
2023         profile_count num, profile_count den)
2024{
2025  gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
2026  basic_block copy_basic_block;
2027  tree decl;
2028  basic_block prev;
2029
2030  profile_count::adjust_for_ipa_scaling (&num, &den);
2031
2032  /* Search for previous copied basic block.  */
2033  prev = bb->prev_bb;
2034  while (!prev->aux)
2035    prev = prev->prev_bb;
2036
2037  /* create_basic_block() will append every new block to
2038     basic_block_info automatically.  */
2039  copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
2040  copy_basic_block->count = bb->count.apply_scale (num, den);
2041
2042  copy_gsi = gsi_start_bb (copy_basic_block);
2043
2044  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2045    {
2046      gimple_seq stmts;
2047      gimple *stmt = gsi_stmt (gsi);
2048      gimple *orig_stmt = stmt;
2049      gimple_stmt_iterator stmts_gsi;
2050      bool stmt_added = false;
2051
2052      id->regimplify = false;
2053      stmts = remap_gimple_stmt (stmt, id);
2054
2055      if (gimple_seq_empty_p (stmts))
2056	continue;
2057
2058      seq_gsi = copy_gsi;
2059
2060      for (stmts_gsi = gsi_start (stmts);
2061	   !gsi_end_p (stmts_gsi); )
2062	{
2063	  stmt = gsi_stmt (stmts_gsi);
2064
2065	  /* Advance iterator now before stmt is moved to seq_gsi.  */
2066	  gsi_next (&stmts_gsi);
2067
2068	  if (gimple_nop_p (stmt))
2069	      continue;
2070
2071	  gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2072					    orig_stmt);
2073
2074	  /* With return slot optimization we can end up with
2075	     non-gimple (foo *)&this->m, fix that here.  */
2076	  if (is_gimple_assign (stmt)
2077	      && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2078	      && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2079	    {
2080	      tree new_rhs;
2081	      new_rhs = force_gimple_operand_gsi (&seq_gsi,
2082						  gimple_assign_rhs1 (stmt),
2083						  true, NULL, false,
2084						  GSI_CONTINUE_LINKING);
2085	      gimple_assign_set_rhs1 (stmt, new_rhs);
2086	      id->regimplify = false;
2087	    }
2088
2089	  gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2090
2091	  if (id->regimplify)
2092	    gimple_regimplify_operands (stmt, &seq_gsi);
2093
2094	  stmt_added = true;
2095	}
2096
2097      if (!stmt_added)
2098	continue;
2099
2100      /* If copy_basic_block has been empty at the start of this iteration,
2101	 call gsi_start_bb again to get at the newly added statements.  */
2102      if (gsi_end_p (copy_gsi))
2103	copy_gsi = gsi_start_bb (copy_basic_block);
2104      else
2105	gsi_next (&copy_gsi);
2106
2107      /* Process the new statement.  The call to gimple_regimplify_operands
2108	 possibly turned the statement into multiple statements, we
2109	 need to process all of them.  */
2110      do
2111	{
2112	  tree fn;
2113	  gcall *call_stmt;
2114
2115	  stmt = gsi_stmt (copy_gsi);
2116	  call_stmt = dyn_cast <gcall *> (stmt);
2117	  if (call_stmt
2118	      && gimple_call_va_arg_pack_p (call_stmt)
2119	      && id->call_stmt
2120	      && ! gimple_call_va_arg_pack_p (id->call_stmt))
2121	    {
2122	      /* __builtin_va_arg_pack () should be replaced by
2123		 all arguments corresponding to ... in the caller.  */
2124	      tree p;
2125	      gcall *new_call;
2126	      vec<tree> argarray;
2127	      size_t nargs_caller = gimple_call_num_args (id->call_stmt);
2128	      size_t nargs = nargs_caller;
2129
2130	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2131		{
2132		  /* Avoid crashing on invalid IL that doesn't have a
2133		     varargs function or that passes not enough arguments.  */
2134		  if (nargs == 0)
2135		    break;
2136		  nargs--;
2137		}
2138
2139	      /* Create the new array of arguments.  */
2140	      size_t nargs_callee = gimple_call_num_args (call_stmt);
2141	      size_t n = nargs + nargs_callee;
2142	      argarray.create (n);
2143	      argarray.safe_grow_cleared (n, true);
2144
2145	      /* Copy all the arguments before '...'  */
2146	      if (nargs_callee)
2147		memcpy (argarray.address (),
2148			gimple_call_arg_ptr (call_stmt, 0),
2149			nargs_callee * sizeof (tree));
2150
2151	      /* Append the arguments passed in '...'  */
2152	      if (nargs)
2153		memcpy (argarray.address () + nargs_callee,
2154			gimple_call_arg_ptr (id->call_stmt, 0)
2155			+ (nargs_caller - nargs), nargs * sizeof (tree));
2156
2157	      new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2158						argarray);
2159
2160	      argarray.release ();
2161
2162	      /* Copy all GIMPLE_CALL flags, location and block, except
2163		 GF_CALL_VA_ARG_PACK.  */
2164	      gimple_call_copy_flags (new_call, call_stmt);
2165	      gimple_call_set_va_arg_pack (new_call, false);
2166	      gimple_call_set_fntype (new_call, gimple_call_fntype (call_stmt));
2167	      /* location includes block.  */
2168	      gimple_set_location (new_call, gimple_location (stmt));
2169	      gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2170
2171	      gsi_replace (&copy_gsi, new_call, false);
2172	      stmt = new_call;
2173	    }
2174	  else if (call_stmt
2175		   && id->call_stmt
2176		   && (decl = gimple_call_fndecl (stmt))
2177		   && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2178	    {
2179	      /* __builtin_va_arg_pack_len () should be replaced by
2180		 the number of anonymous arguments.  */
2181	      size_t nargs = gimple_call_num_args (id->call_stmt);
2182	      tree count, p;
2183	      gimple *new_stmt;
2184
2185	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2186		nargs--;
2187
2188	      if (!gimple_call_lhs (stmt))
2189		{
2190		  /* Drop unused calls.  */
2191		  gsi_remove (&copy_gsi, false);
2192		  continue;
2193		}
2194	      else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2195		{
2196		  count = build_int_cst (integer_type_node, nargs);
2197		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2198		  gsi_replace (&copy_gsi, new_stmt, false);
2199		  stmt = new_stmt;
2200		}
2201	      else if (nargs != 0)
2202		{
2203		  tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2204		  count = build_int_cst (integer_type_node, nargs);
2205		  new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2206						  PLUS_EXPR, newlhs, count);
2207		  gimple_call_set_lhs (stmt, newlhs);
2208		  gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2209		}
2210	    }
2211	  else if (call_stmt
2212		   && id->call_stmt
2213		   && gimple_call_internal_p (stmt)
2214		   && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2215	    {
2216	      /* Drop TSAN_FUNC_EXIT () internal calls during inlining.  */
2217	      gsi_remove (&copy_gsi, false);
2218	      continue;
2219	    }
2220
2221	  /* Statements produced by inlining can be unfolded, especially
2222	     when we constant propagated some operands.  We can't fold
2223	     them right now for two reasons:
2224	     1) folding require SSA_NAME_DEF_STMTs to be correct
2225	     2) we can't change function calls to builtins.
2226	     So we just mark statement for later folding.  We mark
2227	     all new statements, instead just statements that has changed
2228	     by some nontrivial substitution so even statements made
2229	     foldable indirectly are updated.  If this turns out to be
2230	     expensive, copy_body can be told to watch for nontrivial
2231	     changes.  */
2232	  if (id->statements_to_fold)
2233	    id->statements_to_fold->add (stmt);
2234
2235	  /* We're duplicating a CALL_EXPR.  Find any corresponding
2236	     callgraph edges and update or duplicate them.  */
2237	  if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2238	    {
2239	      struct cgraph_edge *edge;
2240
2241	      switch (id->transform_call_graph_edges)
2242		{
2243		case CB_CGE_DUPLICATE:
2244		  edge = id->src_node->get_edge (orig_stmt);
2245		  if (edge)
2246		    {
2247		      struct cgraph_edge *old_edge = edge;
2248
2249		      /* A speculative call is consist of multiple
2250			 edges - indirect edge and one or more direct edges
2251			 Duplicate the whole thing and distribute frequencies
2252			 accordingly.  */
2253		      if (edge->speculative)
2254			{
2255			  int n = 0;
2256			  profile_count direct_cnt
2257				 = profile_count::zero ();
2258
2259			  /* First figure out the distribution of counts
2260			     so we can re-scale BB profile accordingly.  */
2261			  for (cgraph_edge *e = old_edge; e;
2262			       e = e->next_speculative_call_target ())
2263			    direct_cnt = direct_cnt + e->count;
2264
2265			  cgraph_edge *indirect
2266				 = old_edge->speculative_call_indirect_edge ();
2267			  profile_count indir_cnt = indirect->count;
2268
2269			  /* Next iterate all direct edges, clone it and its
2270			     corresponding reference and update profile.  */
2271			  for (cgraph_edge *e = old_edge;
2272			       e;
2273			       e = e->next_speculative_call_target ())
2274			    {
2275			      profile_count cnt = e->count;
2276
2277			      id->dst_node->clone_reference
2278				 (e->speculative_call_target_ref (), stmt);
2279			      edge = e->clone (id->dst_node, call_stmt,
2280					       gimple_uid (stmt), num, den,
2281					       true);
2282			      profile_probability prob
2283				 = cnt.probability_in (direct_cnt
2284						       + indir_cnt);
2285			      edge->count
2286				 = copy_basic_block->count.apply_probability
2287					 (prob);
2288			      n++;
2289			    }
2290			  gcc_checking_assert
2291				 (indirect->num_speculative_call_targets_p ()
2292				  == n);
2293
2294			  /* Duplicate the indirect edge after all direct edges
2295			     cloned.  */
2296			  indirect = indirect->clone (id->dst_node, call_stmt,
2297						      gimple_uid (stmt),
2298						      num, den,
2299						      true);
2300
2301			  profile_probability prob
2302			     = indir_cnt.probability_in (direct_cnt
2303							 + indir_cnt);
2304			  indirect->count
2305			     = copy_basic_block->count.apply_probability (prob);
2306			}
2307		      else
2308			{
2309			  edge = edge->clone (id->dst_node, call_stmt,
2310					      gimple_uid (stmt),
2311					      num, den,
2312					      true);
2313			  edge->count = copy_basic_block->count;
2314			}
2315		    }
2316		  break;
2317
2318		case CB_CGE_MOVE_CLONES:
2319		  id->dst_node->set_call_stmt_including_clones (orig_stmt,
2320								call_stmt);
2321		  edge = id->dst_node->get_edge (stmt);
2322		  break;
2323
2324		case CB_CGE_MOVE:
2325		  edge = id->dst_node->get_edge (orig_stmt);
2326		  if (edge)
2327		    edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2328		  break;
2329
2330		default:
2331		  gcc_unreachable ();
2332		}
2333
2334	      /* Constant propagation on argument done during inlining
2335		 may create new direct call.  Produce an edge for it.  */
2336	      if ((!edge
2337		   || (edge->indirect_inlining_edge
2338		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2339		  && id->dst_node->definition
2340		  && (fn = gimple_call_fndecl (stmt)) != NULL)
2341		{
2342		  struct cgraph_node *dest = cgraph_node::get_create (fn);
2343
2344		  /* We have missing edge in the callgraph.  This can happen
2345		     when previous inlining turned an indirect call into a
2346		     direct call by constant propagating arguments or we are
2347		     producing dead clone (for further cloning).  In all
2348		     other cases we hit a bug (incorrect node sharing is the
2349		     most common reason for missing edges).  */
2350		  gcc_assert (!dest->definition
2351			      || dest->address_taken
2352		  	      || !id->src_node->definition
2353			      || !id->dst_node->definition);
2354		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2355		    id->dst_node->create_edge_including_clones
2356		      (dest, orig_stmt, call_stmt, bb->count,
2357		       CIF_ORIGINALLY_INDIRECT_CALL);
2358		  else
2359		    id->dst_node->create_edge (dest, call_stmt,
2360					bb->count)->inline_failed
2361		      = CIF_ORIGINALLY_INDIRECT_CALL;
2362		  if (dump_file)
2363		    {
2364		      fprintf (dump_file, "Created new direct edge to %s\n",
2365			       dest->dump_name ());
2366		    }
2367		}
2368
2369	      notice_special_calls (as_a <gcall *> (stmt));
2370	    }
2371
2372	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2373				      id->eh_map, id->eh_lp_nr);
2374
2375	  gsi_next (&copy_gsi);
2376	}
2377      while (!gsi_end_p (copy_gsi));
2378
2379      copy_gsi = gsi_last_bb (copy_basic_block);
2380    }
2381
2382  return copy_basic_block;
2383}
2384
2385/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2386   form is quite easy, since dominator relationship for old basic blocks does
2387   not change.
2388
2389   There is however exception where inlining might change dominator relation
2390   across EH edges from basic block within inlined functions destinating
2391   to landing pads in function we inline into.
2392
2393   The function fills in PHI_RESULTs of such PHI nodes if they refer
2394   to gimple regs.  Otherwise, the function mark PHI_RESULT of such
2395   PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
2396   EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2397   set, and this means that there will be no overlapping live ranges
2398   for the underlying symbol.
2399
2400   This might change in future if we allow redirecting of EH edges and
2401   we might want to change way build CFG pre-inlining to include
2402   all the possible edges then.  */
2403static void
2404update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2405				  bool can_throw, bool nonlocal_goto)
2406{
2407  edge e;
2408  edge_iterator ei;
2409
2410  FOR_EACH_EDGE (e, ei, bb->succs)
2411    if (!e->dest->aux
2412	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2413      {
2414	gphi *phi;
2415	gphi_iterator si;
2416
2417	if (!nonlocal_goto)
2418	  gcc_assert (e->flags & EDGE_EH);
2419
2420	if (!can_throw)
2421	  gcc_assert (!(e->flags & EDGE_EH));
2422
2423	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2424	  {
2425	    edge re;
2426
2427	    phi = si.phi ();
2428
2429	    /* For abnormal goto/call edges the receiver can be the
2430	       ENTRY_BLOCK.  Do not assert this cannot happen.  */
2431
2432	    gcc_assert ((e->flags & EDGE_EH)
2433			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2434
2435	    re = find_edge (ret_bb, e->dest);
2436	    gcc_checking_assert (re);
2437	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2438			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2439
2440	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2441		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2442	  }
2443      }
2444}
2445
2446/* Insert clobbers for automatic variables of inlined ID->src_fn
2447   function at the start of basic block ID->eh_landing_pad_dest.  */
2448
2449static void
2450add_clobbers_to_eh_landing_pad (copy_body_data *id)
2451{
2452  tree var;
2453  basic_block bb = id->eh_landing_pad_dest;
2454  live_vars_map *vars = NULL;
2455  unsigned int cnt = 0;
2456  unsigned int i;
2457  FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2458    if (VAR_P (var)
2459	&& !DECL_HARD_REGISTER (var)
2460	&& !TREE_THIS_VOLATILE (var)
2461	&& !DECL_HAS_VALUE_EXPR_P (var)
2462	&& !is_gimple_reg (var)
2463	&& auto_var_in_fn_p (var, id->src_fn)
2464	&& !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2465      {
2466	tree *t = id->decl_map->get (var);
2467	if (!t)
2468	  continue;
2469	tree new_var = *t;
2470	if (VAR_P (new_var)
2471	    && !DECL_HARD_REGISTER (new_var)
2472	    && !TREE_THIS_VOLATILE (new_var)
2473	    && !DECL_HAS_VALUE_EXPR_P (new_var)
2474	    && !is_gimple_reg (new_var)
2475	    && auto_var_in_fn_p (new_var, id->dst_fn))
2476	  {
2477	    if (vars == NULL)
2478	      vars = new live_vars_map;
2479            vars->put (DECL_UID (var), cnt++);
2480	  }
2481      }
2482  if (vars == NULL)
2483    return;
2484
2485  vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2486  FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2487    if (VAR_P (var))
2488      {
2489	edge e;
2490	edge_iterator ei;
2491	bool needed = false;
2492	unsigned int *v = vars->get (DECL_UID (var));
2493	if (v == NULL)
2494	  continue;
2495	FOR_EACH_EDGE (e, ei, bb->preds)
2496	  if ((e->flags & EDGE_EH) != 0
2497	      && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2498	    {
2499	      basic_block src_bb = (basic_block) e->src->aux;
2500
2501	      if (bitmap_bit_p (&live[src_bb->index], *v))
2502		{
2503		  needed = true;
2504		  break;
2505		}
2506	    }
2507	if (needed)
2508	  {
2509	    tree new_var = *id->decl_map->get (var);
2510	    gimple_stmt_iterator gsi = gsi_after_labels (bb);
2511	    tree clobber = build_clobber (TREE_TYPE (new_var));
2512	    gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2513	    gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2514	  }
2515      }
2516  destroy_live_vars (live);
2517  delete vars;
2518}
2519
2520/* Copy edges from BB into its copy constructed earlier, scale profile
2521   accordingly.  Edges will be taken care of later.  Assume aux
2522   pointers to point to the copies of each BB.  Return true if any
2523   debug stmts are left after a statement that must end the basic block.  */
2524
2525static bool
2526copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2527		   basic_block ret_bb, basic_block abnormal_goto_dest,
2528		   copy_body_data *id)
2529{
2530  basic_block new_bb = (basic_block) bb->aux;
2531  edge_iterator ei;
2532  edge old_edge;
2533  gimple_stmt_iterator si;
2534  bool need_debug_cleanup = false;
2535
2536  /* Use the indices from the original blocks to create edges for the
2537     new ones.  */
2538  FOR_EACH_EDGE (old_edge, ei, bb->succs)
2539    if (!(old_edge->flags & EDGE_EH))
2540      {
2541	edge new_edge;
2542	int flags = old_edge->flags;
2543	location_t locus = old_edge->goto_locus;
2544
2545	/* Return edges do get a FALLTHRU flag when they get inlined.  */
2546	if (old_edge->dest->index == EXIT_BLOCK
2547	    && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2548	    && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2549	  flags |= EDGE_FALLTHRU;
2550
2551	new_edge
2552	  = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2553	new_edge->probability = old_edge->probability;
2554	if (!id->reset_location)
2555	  new_edge->goto_locus = remap_location (locus, id);
2556      }
2557
2558  if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2559    return false;
2560
2561  /* When doing function splitting, we must decrease count of the return block
2562     which was previously reachable by block we did not copy.  */
2563  if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2564    FOR_EACH_EDGE (old_edge, ei, bb->preds)
2565      if (old_edge->src->index != ENTRY_BLOCK
2566	  && !old_edge->src->aux)
2567	new_bb->count -= old_edge->count ().apply_scale (num, den);
2568
2569  for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2570    {
2571      gimple *copy_stmt;
2572      bool can_throw, nonlocal_goto;
2573
2574      copy_stmt = gsi_stmt (si);
2575      if (!is_gimple_debug (copy_stmt))
2576	update_stmt (copy_stmt);
2577
2578      /* Do this before the possible split_block.  */
2579      gsi_next (&si);
2580
2581      /* If this tree could throw an exception, there are two
2582         cases where we need to add abnormal edge(s): the
2583         tree wasn't in a region and there is a "current
2584         region" in the caller; or the original tree had
2585         EH edges.  In both cases split the block after the tree,
2586         and add abnormal edge(s) as needed; we need both
2587         those from the callee and the caller.
2588         We check whether the copy can throw, because the const
2589         propagation can change an INDIRECT_REF which throws
2590         into a COMPONENT_REF which doesn't.  If the copy
2591         can throw, the original could also throw.  */
2592      can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2593      nonlocal_goto
2594	= (stmt_can_make_abnormal_goto (copy_stmt)
2595	   && !computed_goto_p (copy_stmt));
2596
2597      if (can_throw || nonlocal_goto)
2598	{
2599	  if (!gsi_end_p (si))
2600	    {
2601	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2602		gsi_next (&si);
2603	      if (gsi_end_p (si))
2604		need_debug_cleanup = true;
2605	    }
2606	  if (!gsi_end_p (si))
2607	    /* Note that bb's predecessor edges aren't necessarily
2608	       right at this point; split_block doesn't care.  */
2609	    {
2610	      edge e = split_block (new_bb, copy_stmt);
2611
2612	      new_bb = e->dest;
2613	      new_bb->aux = e->src->aux;
2614	      si = gsi_start_bb (new_bb);
2615	    }
2616	}
2617
2618      bool update_probs = false;
2619
2620      if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2621	{
2622	  make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2623	  update_probs = true;
2624	}
2625      else if (can_throw)
2626	{
2627	  make_eh_edges (copy_stmt);
2628	  update_probs = true;
2629	}
2630
2631      /* EH edges may not match old edges.  Copy as much as possible.  */
2632      if (update_probs)
2633	{
2634          edge e;
2635          edge_iterator ei;
2636	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2637
2638          FOR_EACH_EDGE (old_edge, ei, bb->succs)
2639            if ((old_edge->flags & EDGE_EH)
2640		&& (e = find_edge (copy_stmt_bb,
2641				   (basic_block) old_edge->dest->aux))
2642		&& (e->flags & EDGE_EH))
2643	      e->probability = old_edge->probability;
2644
2645          FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2646	    if (e->flags & EDGE_EH)
2647	      {
2648		if (!e->probability.initialized_p ())
2649		  e->probability = profile_probability::never ();
2650		if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2651		  {
2652		    if (id->eh_landing_pad_dest == NULL)
2653		      id->eh_landing_pad_dest = e->dest;
2654		    else
2655		      gcc_assert (id->eh_landing_pad_dest == e->dest);
2656		  }
2657	      }
2658        }
2659
2660
2661      /* If the call we inline cannot make abnormal goto do not add
2662         additional abnormal edges but only retain those already present
2663	 in the original function body.  */
2664      if (abnormal_goto_dest == NULL)
2665	nonlocal_goto = false;
2666      if (nonlocal_goto)
2667	{
2668	  basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2669
2670	  if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2671	    nonlocal_goto = false;
2672	  /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2673	     in OpenMP regions which aren't allowed to be left abnormally.
2674	     So, no need to add abnormal edge in that case.  */
2675	  else if (is_gimple_call (copy_stmt)
2676		   && gimple_call_internal_p (copy_stmt)
2677		   && (gimple_call_internal_fn (copy_stmt)
2678		       == IFN_ABNORMAL_DISPATCHER)
2679		   && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2680	    nonlocal_goto = false;
2681	  else
2682	    make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2683				   EDGE_ABNORMAL);
2684	}
2685
2686      if ((can_throw || nonlocal_goto)
2687	  && gimple_in_ssa_p (cfun))
2688	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2689					  can_throw, nonlocal_goto);
2690    }
2691  return need_debug_cleanup;
2692}
2693
2694/* Copy the PHIs.  All blocks and edges are copied, some blocks
2695   was possibly split and new outgoing EH edges inserted.
2696   BB points to the block of original function and AUX pointers links
2697   the original and newly copied blocks.  */
2698
2699static void
2700copy_phis_for_bb (basic_block bb, copy_body_data *id)
2701{
2702  basic_block const new_bb = (basic_block) bb->aux;
2703  edge_iterator ei;
2704  gphi *phi;
2705  gphi_iterator si;
2706  edge new_edge;
2707  bool inserted = false;
2708
2709  for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2710    {
2711      tree res, new_res;
2712      gphi *new_phi;
2713
2714      phi = si.phi ();
2715      res = PHI_RESULT (phi);
2716      new_res = res;
2717      if (!virtual_operand_p (res)
2718	  && (!id->param_body_adjs
2719	      || !id->param_body_adjs->m_dead_stmts.contains (phi)))
2720	{
2721	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
2722	  if (EDGE_COUNT (new_bb->preds) == 0)
2723	    {
2724	      /* Technically we'd want a SSA_DEFAULT_DEF here... */
2725	      SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2726	    }
2727	  else
2728	    {
2729	      new_phi = create_phi_node (new_res, new_bb);
2730	      FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2731		{
2732		  edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2733					     bb);
2734		  tree arg;
2735		  tree new_arg;
2736		  edge_iterator ei2;
2737		  location_t locus;
2738
2739		  /* When doing partial cloning, we allow PHIs on the entry
2740		     block as long as all the arguments are the same.
2741		     Find any input edge to see argument to copy.  */
2742		  if (!old_edge)
2743		    FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2744		      if (!old_edge->src->aux)
2745			break;
2746
2747		  arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2748		  new_arg = arg;
2749		  walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2750		  gcc_assert (new_arg);
2751		  /* With return slot optimization we can end up with
2752		     non-gimple (foo *)&this->m, fix that here.  */
2753		  if (TREE_CODE (new_arg) != SSA_NAME
2754		      && TREE_CODE (new_arg) != FUNCTION_DECL
2755		      && !is_gimple_val (new_arg))
2756		    {
2757		      gimple_seq stmts = NULL;
2758		      new_arg = force_gimple_operand (new_arg, &stmts, true,
2759						      NULL);
2760		      gsi_insert_seq_on_edge (new_edge, stmts);
2761		      inserted = true;
2762		    }
2763		  locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2764		  if (id->reset_location)
2765		    locus = input_location;
2766		  else
2767		    locus = remap_location (locus, id);
2768		  add_phi_arg (new_phi, new_arg, new_edge, locus);
2769		}
2770	    }
2771	}
2772    }
2773
2774  /* Commit the delayed edge insertions.  */
2775  if (inserted)
2776    FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2777      gsi_commit_one_edge_insert (new_edge, NULL);
2778}
2779
2780
2781/* Wrapper for remap_decl so it can be used as a callback.  */
2782
2783static tree
2784remap_decl_1 (tree decl, void *data)
2785{
2786  return remap_decl (decl, (copy_body_data *) data);
2787}
2788
2789/* Build struct function and associated datastructures for the new clone
2790   NEW_FNDECL to be build.  CALLEE_FNDECL is the original.  Function changes
2791   the cfun to the function of new_fndecl (and current_function_decl too).  */
2792
2793static void
2794initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2795{
2796  struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2797
2798  /* Register specific tree functions.  */
2799  gimple_register_cfg_hooks ();
2800
2801  /* Get clean struct function.  */
2802  push_struct_function (new_fndecl, true);
2803  targetm.target_option.relayout_function (new_fndecl);
2804
2805  /* We will rebuild these, so just sanity check that they are empty.  */
2806  gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2807  gcc_assert (cfun->local_decls == NULL);
2808  gcc_assert (cfun->cfg == NULL);
2809  gcc_assert (cfun->decl == new_fndecl);
2810
2811  /* Copy items we preserve during cloning.  */
2812  cfun->static_chain_decl = src_cfun->static_chain_decl;
2813  cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2814  cfun->function_end_locus = src_cfun->function_end_locus;
2815  cfun->curr_properties = src_cfun->curr_properties;
2816  cfun->last_verified = src_cfun->last_verified;
2817  cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2818  cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2819  cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2820  cfun->calls_eh_return = src_cfun->calls_eh_return;
2821  cfun->stdarg = src_cfun->stdarg;
2822  cfun->after_inlining = src_cfun->after_inlining;
2823  cfun->can_throw_non_call_exceptions
2824    = src_cfun->can_throw_non_call_exceptions;
2825  cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2826  cfun->returns_struct = src_cfun->returns_struct;
2827  cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2828
2829  init_empty_tree_cfg ();
2830
2831  profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2832
2833  profile_count num = count;
2834  profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2835  profile_count::adjust_for_ipa_scaling (&num, &den);
2836
2837  ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2838    ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2839				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2840  EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2841    EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2842				ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2843  if (src_cfun->eh)
2844    init_eh_for_function ();
2845
2846  if (src_cfun->gimple_df)
2847    {
2848      init_tree_ssa (cfun);
2849      cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2850      if (cfun->gimple_df->in_ssa_p)
2851	init_ssa_operands (cfun);
2852    }
2853}
2854
2855/* Helper function for copy_cfg_body.  Move debug stmts from the end
2856   of NEW_BB to the beginning of successor basic blocks when needed.  If the
2857   successor has multiple predecessors, reset them, otherwise keep
2858   their value.  */
2859
2860static void
2861maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2862{
2863  edge e;
2864  edge_iterator ei;
2865  gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2866
2867  if (gsi_end_p (si)
2868      || gsi_one_before_end_p (si)
2869      || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2870	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2871    return;
2872
2873  FOR_EACH_EDGE (e, ei, new_bb->succs)
2874    {
2875      gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2876      gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2877      while (is_gimple_debug (gsi_stmt (ssi)))
2878	{
2879	  gimple *stmt = gsi_stmt (ssi);
2880	  gdebug *new_stmt;
2881	  tree var;
2882	  tree value;
2883
2884	  /* For the last edge move the debug stmts instead of copying
2885	     them.  */
2886	  if (ei_one_before_end_p (ei))
2887	    {
2888	      si = ssi;
2889	      gsi_prev (&ssi);
2890	      if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2891		{
2892		  gimple_debug_bind_reset_value (stmt);
2893		  gimple_set_location (stmt, UNKNOWN_LOCATION);
2894		}
2895	      gsi_remove (&si, false);
2896	      gsi_insert_before (&dsi, stmt, GSI_NEW_STMT);
2897	      continue;
2898	    }
2899
2900	  if (gimple_debug_bind_p (stmt))
2901	    {
2902	      var = gimple_debug_bind_get_var (stmt);
2903	      if (single_pred_p (e->dest))
2904		{
2905		  value = gimple_debug_bind_get_value (stmt);
2906		  value = unshare_expr (value);
2907		  new_stmt = gimple_build_debug_bind (var, value, stmt);
2908		}
2909	      else
2910		new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2911	    }
2912	  else if (gimple_debug_source_bind_p (stmt))
2913	    {
2914	      var = gimple_debug_source_bind_get_var (stmt);
2915	      value = gimple_debug_source_bind_get_value (stmt);
2916	      new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2917	    }
2918	  else if (gimple_debug_nonbind_marker_p (stmt))
2919	    new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2920	  else
2921	    gcc_unreachable ();
2922	  gsi_insert_before (&dsi, new_stmt, GSI_NEW_STMT);
2923	  id->debug_stmts.safe_push (new_stmt);
2924	  gsi_prev (&ssi);
2925	}
2926    }
2927}
2928
2929/* Make a copy of the sub-loops of SRC_PARENT and place them
2930   as siblings of DEST_PARENT.  */
2931
2932static void
2933copy_loops (copy_body_data *id,
2934	    class loop *dest_parent, class loop *src_parent)
2935{
2936  class loop *src_loop = src_parent->inner;
2937  while (src_loop)
2938    {
2939      if (!id->blocks_to_copy
2940	  || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2941	{
2942	  class loop *dest_loop = alloc_loop ();
2943
2944	  /* Assign the new loop its header and latch and associate
2945	     those with the new loop.  */
2946	  dest_loop->header = (basic_block)src_loop->header->aux;
2947	  dest_loop->header->loop_father = dest_loop;
2948	  if (src_loop->latch != NULL)
2949	    {
2950	      dest_loop->latch = (basic_block)src_loop->latch->aux;
2951	      dest_loop->latch->loop_father = dest_loop;
2952	    }
2953
2954	  /* Copy loop meta-data.  */
2955	  copy_loop_info (src_loop, dest_loop);
2956	  if (dest_loop->unroll)
2957	    cfun->has_unroll = true;
2958	  if (dest_loop->force_vectorize)
2959	    cfun->has_force_vectorize_loops = true;
2960	  if (id->src_cfun->last_clique != 0)
2961	    dest_loop->owned_clique
2962	      = remap_dependence_clique (id,
2963					 src_loop->owned_clique
2964					 ? src_loop->owned_clique : 1);
2965
2966	  /* Finally place it into the loop array and the loop tree.  */
2967	  place_new_loop (cfun, dest_loop);
2968	  flow_loop_tree_node_add (dest_parent, dest_loop);
2969
2970	  if (src_loop->simduid)
2971	    {
2972	      dest_loop->simduid = remap_decl (src_loop->simduid, id);
2973	      cfun->has_simduid_loops = true;
2974	    }
2975
2976	  /* Recurse.  */
2977	  copy_loops (id, dest_loop, src_loop);
2978	}
2979      src_loop = src_loop->next;
2980    }
2981}
2982
2983/* Call redirect_call_stmt_to_callee on all calls in BB.  */
2984
2985void
2986redirect_all_calls (copy_body_data * id, basic_block bb)
2987{
2988  gimple_stmt_iterator si;
2989  gimple *last = last_stmt (bb);
2990  for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2991    {
2992      gimple *stmt = gsi_stmt (si);
2993      if (is_gimple_call (stmt))
2994	{
2995	  tree old_lhs = gimple_call_lhs (stmt);
2996	  struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2997	  if (edge)
2998	    {
2999	      gimple *new_stmt
3000		= cgraph_edge::redirect_call_stmt_to_callee (edge);
3001	      /* If IPA-SRA transformation, run as part of edge redirection,
3002		 removed the LHS because it is unused, save it to
3003		 killed_new_ssa_names so that we can prune it from debug
3004		 statements.  */
3005	      if (old_lhs
3006		  && TREE_CODE (old_lhs) == SSA_NAME
3007		  && !gimple_call_lhs (new_stmt))
3008		{
3009		  if (!id->killed_new_ssa_names)
3010		    id->killed_new_ssa_names = new hash_set<tree> (16);
3011		  id->killed_new_ssa_names->add (old_lhs);
3012		}
3013
3014	      if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
3015		gimple_purge_dead_eh_edges (bb);
3016	    }
3017	}
3018    }
3019}
3020
3021/* Make a copy of the body of FN so that it can be inserted inline in
3022   another function.  Walks FN via CFG, returns new fndecl.  */
3023
3024static tree
3025copy_cfg_body (copy_body_data * id,
3026	       basic_block entry_block_map, basic_block exit_block_map,
3027	       basic_block new_entry)
3028{
3029  tree callee_fndecl = id->src_fn;
3030  /* Original cfun for the callee, doesn't change.  */
3031  struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3032  struct function *cfun_to_copy;
3033  basic_block bb;
3034  tree new_fndecl = NULL;
3035  bool need_debug_cleanup = false;
3036  int last;
3037  profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
3038  profile_count num = entry_block_map->count;
3039
3040  cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3041
3042  /* Register specific tree functions.  */
3043  gimple_register_cfg_hooks ();
3044
3045  /* If we are inlining just region of the function, make sure to connect
3046     new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun).  Since new entry can be
3047     part of loop, we must compute frequency and probability of
3048     ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
3049     probabilities of edges incoming from nonduplicated region.  */
3050  if (new_entry)
3051    {
3052      edge e;
3053      edge_iterator ei;
3054      den = profile_count::zero ();
3055
3056      FOR_EACH_EDGE (e, ei, new_entry->preds)
3057	if (!e->src->aux)
3058	  den += e->count ();
3059      ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
3060    }
3061
3062  profile_count::adjust_for_ipa_scaling (&num, &den);
3063
3064  /* Must have a CFG here at this point.  */
3065  gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3066	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
3067
3068
3069  ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
3070  EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3071  entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3072  exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3073
3074  /* Duplicate any exception-handling regions.  */
3075  if (cfun->eh)
3076    id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3077				       remap_decl_1, id);
3078
3079  /* Use aux pointers to map the original blocks to copy.  */
3080  FOR_EACH_BB_FN (bb, cfun_to_copy)
3081    if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3082      {
3083	basic_block new_bb = copy_bb (id, bb, num, den);
3084	bb->aux = new_bb;
3085	new_bb->aux = bb;
3086	new_bb->loop_father = entry_block_map->loop_father;
3087      }
3088
3089  last = last_basic_block_for_fn (cfun);
3090
3091  /* Now that we've duplicated the blocks, duplicate their edges.  */
3092  basic_block abnormal_goto_dest = NULL;
3093  if (id->call_stmt
3094      && stmt_can_make_abnormal_goto (id->call_stmt))
3095    {
3096      gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3097
3098      bb = gimple_bb (id->call_stmt);
3099      gsi_next (&gsi);
3100      if (gsi_end_p (gsi))
3101	abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3102    }
3103  FOR_ALL_BB_FN (bb, cfun_to_copy)
3104    if (!id->blocks_to_copy
3105	|| (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3106      need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3107					       abnormal_goto_dest, id);
3108
3109  if (id->eh_landing_pad_dest)
3110    {
3111      add_clobbers_to_eh_landing_pad (id);
3112      id->eh_landing_pad_dest = NULL;
3113    }
3114
3115  if (new_entry)
3116    {
3117      edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3118			  EDGE_FALLTHRU);
3119      e->probability = profile_probability::always ();
3120    }
3121
3122  /* Duplicate the loop tree, if available and wanted.  */
3123  if (loops_for_fn (src_cfun) != NULL
3124      && current_loops != NULL)
3125    {
3126      copy_loops (id, entry_block_map->loop_father,
3127		  get_loop (src_cfun, 0));
3128      /* Defer to cfgcleanup to update loop-father fields of basic-blocks.  */
3129      loops_state_set (LOOPS_NEED_FIXUP);
3130    }
3131
3132  /* If the loop tree in the source function needed fixup, mark the
3133     destination loop tree for fixup, too.  */
3134  if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3135    loops_state_set (LOOPS_NEED_FIXUP);
3136
3137  if (gimple_in_ssa_p (cfun))
3138    FOR_ALL_BB_FN (bb, cfun_to_copy)
3139      if (!id->blocks_to_copy
3140	  || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3141	copy_phis_for_bb (bb, id);
3142
3143  FOR_ALL_BB_FN (bb, cfun_to_copy)
3144    if (bb->aux)
3145      {
3146	if (need_debug_cleanup
3147	    && bb->index != ENTRY_BLOCK
3148	    && bb->index != EXIT_BLOCK)
3149	  maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3150	/* Update call edge destinations.  This cannot be done before loop
3151	   info is updated, because we may split basic blocks.  */
3152	if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3153	    && bb->index != ENTRY_BLOCK
3154	    && bb->index != EXIT_BLOCK)
3155	  redirect_all_calls (id, (basic_block)bb->aux);
3156	((basic_block)bb->aux)->aux = NULL;
3157	bb->aux = NULL;
3158      }
3159
3160  /* Zero out AUX fields of newly created block during EH edge
3161     insertion. */
3162  for (; last < last_basic_block_for_fn (cfun); last++)
3163    {
3164      if (need_debug_cleanup)
3165	maybe_move_debug_stmts_to_successors (id,
3166					      BASIC_BLOCK_FOR_FN (cfun, last));
3167      BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3168      /* Update call edge destinations.  This cannot be done before loop
3169	 info is updated, because we may split basic blocks.  */
3170      if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3171	redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3172    }
3173  entry_block_map->aux = NULL;
3174  exit_block_map->aux = NULL;
3175
3176  if (id->eh_map)
3177    {
3178      delete id->eh_map;
3179      id->eh_map = NULL;
3180    }
3181  if (id->dependence_map)
3182    {
3183      delete id->dependence_map;
3184      id->dependence_map = NULL;
3185    }
3186
3187  return new_fndecl;
3188}
3189
3190/* Copy the debug STMT using ID.  We deal with these statements in a
3191   special way: if any variable in their VALUE expression wasn't
3192   remapped yet, we won't remap it, because that would get decl uids
3193   out of sync, causing codegen differences between -g and -g0.  If
3194   this arises, we drop the VALUE expression altogether.  */
3195
3196static void
3197copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3198{
3199  tree t, *n;
3200  struct walk_stmt_info wi;
3201
3202  if (tree block = gimple_block (stmt))
3203    {
3204      n = id->decl_map->get (block);
3205      gimple_set_block (stmt, n ? *n : id->block);
3206    }
3207
3208  if (gimple_debug_nonbind_marker_p (stmt))
3209    {
3210      if (id->call_stmt && !gimple_block (stmt))
3211	{
3212	  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3213	  gsi_remove (&gsi, true);
3214	}
3215      return;
3216    }
3217
3218  /* Remap all the operands in COPY.  */
3219  memset (&wi, 0, sizeof (wi));
3220  wi.info = id;
3221
3222  processing_debug_stmt = 1;
3223
3224  if (gimple_debug_source_bind_p (stmt))
3225    t = gimple_debug_source_bind_get_var (stmt);
3226  else if (gimple_debug_bind_p (stmt))
3227    t = gimple_debug_bind_get_var (stmt);
3228  else
3229    gcc_unreachable ();
3230
3231  if (TREE_CODE (t) == PARM_DECL
3232      && id->debug_map
3233      && (n = id->debug_map->get (t)))
3234    {
3235      gcc_assert (VAR_P (*n));
3236      t = *n;
3237    }
3238  else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3239    /* T is a non-localized variable.  */;
3240  else
3241    walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3242
3243  if (gimple_debug_bind_p (stmt))
3244    {
3245      gimple_debug_bind_set_var (stmt, t);
3246
3247      if (gimple_debug_bind_has_value_p (stmt))
3248	walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3249		   remap_gimple_op_r, &wi, NULL);
3250
3251      /* Punt if any decl couldn't be remapped.  */
3252      if (processing_debug_stmt < 0)
3253	gimple_debug_bind_reset_value (stmt);
3254    }
3255  else if (gimple_debug_source_bind_p (stmt))
3256    {
3257      gimple_debug_source_bind_set_var (stmt, t);
3258      /* When inlining and source bind refers to one of the optimized
3259	 away parameters, change the source bind into normal debug bind
3260	 referring to the corresponding DEBUG_EXPR_DECL that should have
3261	 been bound before the call stmt.  */
3262      t = gimple_debug_source_bind_get_value (stmt);
3263      if (t != NULL_TREE
3264	  && TREE_CODE (t) == PARM_DECL
3265	  && id->call_stmt)
3266	{
3267	  vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3268	  unsigned int i;
3269	  if (debug_args != NULL)
3270	    {
3271	      for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3272		if ((**debug_args)[i] == DECL_ORIGIN (t)
3273		    && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3274		  {
3275		    t = (**debug_args)[i + 1];
3276		    stmt->subcode = GIMPLE_DEBUG_BIND;
3277		    gimple_debug_bind_set_value (stmt, t);
3278		    break;
3279		  }
3280	    }
3281	}
3282      if (gimple_debug_source_bind_p (stmt))
3283	walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3284		   remap_gimple_op_r, &wi, NULL);
3285    }
3286
3287  processing_debug_stmt = 0;
3288
3289  update_stmt (stmt);
3290}
3291
3292/* Process deferred debug stmts.  In order to give values better odds
3293   of being successfully remapped, we delay the processing of debug
3294   stmts until all other stmts that might require remapping are
3295   processed.  */
3296
3297static void
3298copy_debug_stmts (copy_body_data *id)
3299{
3300  if (!id->debug_stmts.exists ())
3301    return;
3302
3303  for (gdebug *stmt : id->debug_stmts)
3304    copy_debug_stmt (stmt, id);
3305
3306  id->debug_stmts.release ();
3307}
3308
3309/* Make a copy of the body of SRC_FN so that it can be inserted inline in
3310   another function.  */
3311
3312static tree
3313copy_tree_body (copy_body_data *id)
3314{
3315  tree fndecl = id->src_fn;
3316  tree body = DECL_SAVED_TREE (fndecl);
3317
3318  walk_tree (&body, copy_tree_body_r, id, NULL);
3319
3320  return body;
3321}
3322
3323/* Make a copy of the body of FN so that it can be inserted inline in
3324   another function.  */
3325
3326static tree
3327copy_body (copy_body_data *id,
3328	   basic_block entry_block_map, basic_block exit_block_map,
3329	   basic_block new_entry)
3330{
3331  tree fndecl = id->src_fn;
3332  tree body;
3333
3334  /* If this body has a CFG, walk CFG and copy.  */
3335  gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3336  body = copy_cfg_body (id, entry_block_map, exit_block_map,
3337			new_entry);
3338  copy_debug_stmts (id);
3339  delete id->killed_new_ssa_names;
3340  id->killed_new_ssa_names = NULL;
3341
3342  return body;
3343}
3344
3345/* Return true if VALUE is an ADDR_EXPR of an automatic variable
3346   defined in function FN, or of a data member thereof.  */
3347
3348static bool
3349self_inlining_addr_expr (tree value, tree fn)
3350{
3351  tree var;
3352
3353  if (TREE_CODE (value) != ADDR_EXPR)
3354    return false;
3355
3356  var = get_base_address (TREE_OPERAND (value, 0));
3357
3358  return var && auto_var_in_fn_p (var, fn);
3359}
3360
3361/* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3362   lexical block and line number information from base_stmt, if given,
3363   or from the last stmt of the block otherwise.  */
3364
3365static gimple *
3366insert_init_debug_bind (copy_body_data *id,
3367			basic_block bb, tree var, tree value,
3368			gimple *base_stmt)
3369{
3370  gimple *note;
3371  gimple_stmt_iterator gsi;
3372  tree tracked_var;
3373
3374  if (!gimple_in_ssa_p (id->src_cfun))
3375    return NULL;
3376
3377  if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3378    return NULL;
3379
3380  tracked_var = target_for_debug_bind (var);
3381  if (!tracked_var)
3382    return NULL;
3383
3384  if (bb)
3385    {
3386      gsi = gsi_last_bb (bb);
3387      if (!base_stmt && !gsi_end_p (gsi))
3388	base_stmt = gsi_stmt (gsi);
3389    }
3390
3391  note = gimple_build_debug_bind (tracked_var,
3392				  value == error_mark_node
3393				  ? NULL_TREE : unshare_expr (value),
3394				  base_stmt);
3395
3396  if (bb)
3397    {
3398      if (!gsi_end_p (gsi))
3399	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3400      else
3401	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3402    }
3403
3404  return note;
3405}
3406
3407static void
3408insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3409{
3410  /* If VAR represents a zero-sized variable, it's possible that the
3411     assignment statement may result in no gimple statements.  */
3412  if (init_stmt)
3413    {
3414      gimple_stmt_iterator si = gsi_last_bb (bb);
3415
3416      /* We can end up with init statements that store to a non-register
3417         from a rhs with a conversion.  Handle that here by forcing the
3418	 rhs into a temporary.  gimple_regimplify_operands is not
3419	 prepared to do this for us.  */
3420      if (!is_gimple_debug (init_stmt)
3421	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3422	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3423	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3424	{
3425	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3426			     TREE_TYPE (gimple_assign_lhs (init_stmt)),
3427			     gimple_assign_rhs1 (init_stmt));
3428	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3429					  GSI_NEW_STMT);
3430	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3431	  gimple_assign_set_rhs1 (init_stmt, rhs);
3432	}
3433      gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3434      if (!is_gimple_debug (init_stmt))
3435	{
3436	  gimple_regimplify_operands (init_stmt, &si);
3437
3438	  tree def = gimple_assign_lhs (init_stmt);
3439	  insert_init_debug_bind (id, bb, def, def, init_stmt);
3440	}
3441    }
3442}
3443
3444/* Deal with mismatched formal/actual parameters, in a rather brute-force way
3445   if need be (which should only be necessary for invalid programs).  Attempt
3446   to convert VAL to TYPE and return the result if it is possible, just return
3447   a zero constant of the given type if it fails.  */
3448
3449tree
3450force_value_to_type (tree type, tree value)
3451{
3452  /* If we can match up types by promotion/demotion do so.  */
3453  if (fold_convertible_p (type, value))
3454    return fold_convert (type, value);
3455
3456  /* ???  For valid programs we should not end up here.
3457     Still if we end up with truly mismatched types here, fall back
3458     to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3459     GIMPLE to the following passes.  */
3460  if (TREE_CODE (value) == WITH_SIZE_EXPR)
3461    return error_mark_node;
3462  else if (!is_gimple_reg_type (TREE_TYPE (value))
3463	   || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3464    return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3465  else
3466    return build_zero_cst (type);
3467}
3468
3469/* Initialize parameter P with VALUE.  If needed, produce init statement
3470   at the end of BB.  When BB is NULL, we return init statement to be
3471   output later.  */
3472static gimple *
3473setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3474		     basic_block bb, tree *vars)
3475{
3476  gimple *init_stmt = NULL;
3477  tree var;
3478  tree def = (gimple_in_ssa_p (cfun)
3479	      ? ssa_default_def (id->src_cfun, p) : NULL);
3480
3481  /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
3482     here since the type of this decl must be visible to the calling
3483     function.  */
3484  var = copy_decl_to_var (p, id);
3485
3486  /* Declare this new variable.  */
3487  DECL_CHAIN (var) = *vars;
3488  *vars = var;
3489
3490  /* Make gimplifier happy about this variable.  */
3491  DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3492
3493  /* If the parameter is never assigned to, has no SSA_NAMEs created,
3494     we would not need to create a new variable here at all, if it
3495     weren't for debug info.  Still, we can just use the argument
3496     value.  */
3497  if (TREE_READONLY (p)
3498      && !TREE_ADDRESSABLE (p)
3499      && value
3500      && !TREE_SIDE_EFFECTS (value)
3501      && !def)
3502    {
3503      /* We may produce non-gimple trees by adding NOPs or introduce invalid
3504	 sharing when the value is not constant or DECL.  And we need to make
3505	 sure that it cannot be modified from another path in the callee.  */
3506      if (((is_gimple_min_invariant (value)
3507	    /* When the parameter is used in a context that forces it to
3508	       not be a GIMPLE register avoid substituting something that
3509	       is not a decl there.  */
3510	    && ! DECL_NOT_GIMPLE_REG_P (p))
3511	   || (DECL_P (value) && TREE_READONLY (value))
3512	   || (auto_var_in_fn_p (value, id->dst_fn)
3513	       && !TREE_ADDRESSABLE (value)))
3514	  && useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value))
3515	  /* We have to be very careful about ADDR_EXPR.  Make sure
3516	     the base variable isn't a local variable of the inlined
3517	     function, e.g., when doing recursive inlining, direct or
3518	     mutually-recursive or whatever, which is why we don't
3519	     just test whether fn == current_function_decl.  */
3520	  && ! self_inlining_addr_expr (value, fn))
3521	{
3522	  insert_decl_map (id, p, value);
3523	  if (!id->debug_map)
3524	    id->debug_map = new hash_map<tree, tree>;
3525	  id->debug_map->put (p, var);
3526	  return insert_init_debug_bind (id, bb, var, value, NULL);
3527	}
3528    }
3529
3530  /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3531     that way, when the PARM_DECL is encountered, it will be
3532     automatically replaced by the VAR_DECL.  */
3533  insert_decl_map (id, p, var);
3534
3535  /* Even if P was TREE_READONLY, the new VAR should not be.  In the original
3536     code, we would have constructed a temporary, and then the function body
3537     would have never changed the value of P.  However, now, we will be
3538     constructing VAR directly.  Therefore, it must not be TREE_READONLY.  */
3539  TREE_READONLY (var) = 0;
3540
3541  tree rhs = value;
3542  if (value
3543      && value != error_mark_node
3544      && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3545    rhs = force_value_to_type (TREE_TYPE (p), value);
3546
3547  /* If there is no setup required and we are in SSA, take the easy route
3548     replacing all SSA names representing the function parameter by the
3549     SSA name passed to function.
3550
3551     We need to construct map for the variable anyway as it might be used
3552     in different SSA names when parameter is set in function.
3553
3554     Do replacement at -O0 for const arguments replaced by constant.
3555     This is important for builtin_constant_p and other construct requiring
3556     constant argument to be visible in inlined function body.  */
3557  if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3558      && (optimize
3559          || (TREE_READONLY (p)
3560	      && is_gimple_min_invariant (rhs)))
3561      && (TREE_CODE (rhs) == SSA_NAME
3562	  || is_gimple_min_invariant (rhs))
3563      && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3564    {
3565      insert_decl_map (id, def, rhs);
3566      return insert_init_debug_bind (id, bb, var, rhs, NULL);
3567    }
3568
3569  /* If the value of argument is never used, don't care about initializing
3570     it.  */
3571  if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3572    {
3573      gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3574      return insert_init_debug_bind (id, bb, var, rhs, NULL);
3575    }
3576
3577  /* Initialize this VAR_DECL from the equivalent argument.  Convert
3578     the argument to the proper type in case it was promoted.  */
3579  if (value)
3580    {
3581      if (rhs == error_mark_node)
3582	{
3583	  insert_decl_map (id, p, var);
3584	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
3585	}
3586
3587      STRIP_USELESS_TYPE_CONVERSION (rhs);
3588
3589      /* If we are in SSA form properly remap the default definition
3590         or assign to a dummy SSA name if the parameter is unused and
3591	 we are not optimizing.  */
3592      if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3593	{
3594	  if (def)
3595	    {
3596	      def = remap_ssa_name (def, id);
3597	      init_stmt = gimple_build_assign (def, rhs);
3598	      SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3599	      set_ssa_default_def (cfun, var, NULL);
3600	    }
3601	  else if (!optimize)
3602	    {
3603	      def = make_ssa_name (var);
3604	      init_stmt = gimple_build_assign (def, rhs);
3605	    }
3606	}
3607      else if (!is_empty_type (TREE_TYPE (var)))
3608        init_stmt = gimple_build_assign (var, rhs);
3609
3610      if (bb && init_stmt)
3611        insert_init_stmt (id, bb, init_stmt);
3612    }
3613  return init_stmt;
3614}
3615
3616/* Generate code to initialize the parameters of the function at the
3617   top of the stack in ID from the GIMPLE_CALL STMT.  */
3618
3619static void
3620initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3621			       tree fn, basic_block bb)
3622{
3623  tree parms;
3624  size_t i;
3625  tree p;
3626  tree vars = NULL_TREE;
3627  tree static_chain = gimple_call_chain (stmt);
3628
3629  /* Figure out what the parameters are.  */
3630  parms = DECL_ARGUMENTS (fn);
3631
3632  /* Loop through the parameter declarations, replacing each with an
3633     equivalent VAR_DECL, appropriately initialized.  */
3634  for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3635    {
3636      tree val;
3637      val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3638      setup_one_parameter (id, p, val, fn, bb, &vars);
3639    }
3640  /* After remapping parameters remap their types.  This has to be done
3641     in a second loop over all parameters to appropriately remap
3642     variable sized arrays when the size is specified in a
3643     parameter following the array.  */
3644  for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3645    {
3646      tree *varp = id->decl_map->get (p);
3647      if (varp && VAR_P (*varp))
3648	{
3649	  tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3650		      ? ssa_default_def (id->src_cfun, p) : NULL);
3651	  tree var = *varp;
3652	  TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3653	  /* Also remap the default definition if it was remapped
3654	     to the default definition of the parameter replacement
3655	     by the parameter setup.  */
3656	  if (def)
3657	    {
3658	      tree *defp = id->decl_map->get (def);
3659	      if (defp
3660		  && TREE_CODE (*defp) == SSA_NAME
3661		  && SSA_NAME_VAR (*defp) == var)
3662		TREE_TYPE (*defp) = TREE_TYPE (var);
3663	    }
3664	}
3665    }
3666
3667  /* Initialize the static chain.  */
3668  p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3669  gcc_assert (fn != current_function_decl);
3670  if (p)
3671    {
3672      /* No static chain?  Seems like a bug in tree-nested.cc.  */
3673      gcc_assert (static_chain);
3674
3675      setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3676    }
3677
3678  declare_inline_vars (id->block, vars);
3679}
3680
3681
3682/* Declare a return variable to replace the RESULT_DECL for the
3683   function we are calling.  An appropriate DECL_STMT is returned.
3684   The USE_STMT is filled to contain a use of the declaration to
3685   indicate the return value of the function.
3686
3687   RETURN_SLOT, if non-null is place where to store the result.  It
3688   is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
3689   was the LHS of the MODIFY_EXPR to which this call is the RHS.
3690
3691   The return value is a (possibly null) value that holds the result
3692   as seen by the caller.  */
3693
3694static tree
3695declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3696			 basic_block entry_bb)
3697{
3698  tree callee = id->src_fn;
3699  tree result = DECL_RESULT (callee);
3700  tree callee_type = TREE_TYPE (result);
3701  tree caller_type;
3702  tree var, use;
3703
3704  /* Handle type-mismatches in the function declaration return type
3705     vs. the call expression.  */
3706  if (modify_dest)
3707    caller_type = TREE_TYPE (modify_dest);
3708  else if (return_slot)
3709    caller_type = TREE_TYPE (return_slot);
3710  else /* No LHS on the call.  */
3711    caller_type = TREE_TYPE (TREE_TYPE (callee));
3712
3713  /* We don't need to do anything for functions that don't return anything.  */
3714  if (VOID_TYPE_P (callee_type))
3715    return NULL_TREE;
3716
3717  /* If there was a return slot, then the return value is the
3718     dereferenced address of that object.  */
3719  if (return_slot)
3720    {
3721      /* The front end shouldn't have used both return_slot and
3722	 a modify expression.  */
3723      gcc_assert (!modify_dest);
3724      if (DECL_BY_REFERENCE (result))
3725	{
3726	  tree return_slot_addr = build_fold_addr_expr (return_slot);
3727	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3728
3729	  /* We are going to construct *&return_slot and we can't do that
3730	     for variables believed to be not addressable.
3731
3732	     FIXME: This check possibly can match, because values returned
3733	     via return slot optimization are not believed to have address
3734	     taken by alias analysis.  */
3735	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3736	  var = return_slot_addr;
3737	  mark_addressable (return_slot);
3738	}
3739      else
3740	{
3741	  var = return_slot;
3742	  gcc_assert (TREE_CODE (var) != SSA_NAME);
3743	  if (TREE_ADDRESSABLE (result))
3744	    mark_addressable (var);
3745	}
3746      if (DECL_NOT_GIMPLE_REG_P (result)
3747	  && DECL_P (var))
3748	DECL_NOT_GIMPLE_REG_P (var) = 1;
3749
3750      if (!useless_type_conversion_p (callee_type, caller_type))
3751	var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3752
3753      use = NULL;
3754      goto done;
3755    }
3756
3757  /* All types requiring non-trivial constructors should have been handled.  */
3758  gcc_assert (!TREE_ADDRESSABLE (callee_type));
3759
3760  /* Attempt to avoid creating a new temporary variable.  */
3761  if (modify_dest
3762      && TREE_CODE (modify_dest) != SSA_NAME)
3763    {
3764      bool use_it = false;
3765
3766      /* We can't use MODIFY_DEST if there's type promotion involved.  */
3767      if (!useless_type_conversion_p (callee_type, caller_type))
3768	use_it = false;
3769
3770      /* ??? If we're assigning to a variable sized type, then we must
3771	 reuse the destination variable, because we've no good way to
3772	 create variable sized temporaries at this point.  */
3773      else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3774	use_it = true;
3775
3776      /* If the callee cannot possibly modify MODIFY_DEST, then we can
3777	 reuse it as the result of the call directly.  Don't do this if
3778	 it would promote MODIFY_DEST to addressable.  */
3779      else if (TREE_ADDRESSABLE (result))
3780	use_it = false;
3781      else
3782	{
3783	  tree base_m = get_base_address (modify_dest);
3784
3785	  /* If the base isn't a decl, then it's a pointer, and we don't
3786	     know where that's going to go.  */
3787	  if (!DECL_P (base_m))
3788	    use_it = false;
3789	  else if (is_global_var (base_m))
3790	    use_it = false;
3791	  else if (DECL_NOT_GIMPLE_REG_P (result)
3792		   && !DECL_NOT_GIMPLE_REG_P (base_m))
3793	    use_it = false;
3794	  else if (!TREE_ADDRESSABLE (base_m))
3795	    use_it = true;
3796	}
3797
3798      if (use_it)
3799	{
3800	  var = modify_dest;
3801	  use = NULL;
3802	  goto done;
3803	}
3804    }
3805
3806  gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3807
3808  var = copy_result_decl_to_var (result, id);
3809  DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3810
3811  /* Do not have the rest of GCC warn about this variable as it should
3812     not be visible to the user.  */
3813  suppress_warning (var /* OPT_Wuninitialized? */);
3814
3815  declare_inline_vars (id->block, var);
3816
3817  /* Build the use expr.  If the return type of the function was
3818     promoted, convert it back to the expected type.  */
3819  use = var;
3820  if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3821    {
3822      /* If we can match up types by promotion/demotion do so.  */
3823      if (fold_convertible_p (caller_type, var))
3824	use = fold_convert (caller_type, var);
3825      else
3826	{
3827	  /* ???  For valid programs we should not end up here.
3828	     Still if we end up with truly mismatched types here, fall back
3829	     to using a MEM_REF to not leak invalid GIMPLE to the following
3830	     passes.  */
3831	  /* Prevent var from being written into SSA form.  */
3832	  if (is_gimple_reg_type (TREE_TYPE (var)))
3833	    DECL_NOT_GIMPLE_REG_P (var) = true;
3834	  use = fold_build2 (MEM_REF, caller_type,
3835			     build_fold_addr_expr (var),
3836			     build_int_cst (ptr_type_node, 0));
3837	}
3838    }
3839
3840  STRIP_USELESS_TYPE_CONVERSION (use);
3841
3842  if (DECL_BY_REFERENCE (result))
3843    {
3844      TREE_ADDRESSABLE (var) = 1;
3845      var = build_fold_addr_expr (var);
3846    }
3847
3848 done:
3849  /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3850     way, when the RESULT_DECL is encountered, it will be
3851     automatically replaced by the VAR_DECL.
3852
3853     When returning by reference, ensure that RESULT_DECL remaps to
3854     gimple_val.  */
3855  if (DECL_BY_REFERENCE (result)
3856      && !is_gimple_val (var))
3857    {
3858      tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3859      insert_decl_map (id, result, temp);
3860      /* When RESULT_DECL is in SSA form, we need to remap and initialize
3861	 it's default_def SSA_NAME.  */
3862      if (gimple_in_ssa_p (id->src_cfun)
3863	  && is_gimple_reg (result))
3864	{
3865	  temp = make_ssa_name (temp);
3866	  insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3867	}
3868      insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3869    }
3870  else
3871    insert_decl_map (id, result, var);
3872
3873  /* Remember this so we can ignore it in remap_decls.  */
3874  id->retvar = var;
3875  return use;
3876}
3877
3878/* Determine if the function can be copied.  If so return NULL.  If
3879   not return a string describng the reason for failure.  */
3880
3881const char *
3882copy_forbidden (struct function *fun)
3883{
3884  const char *reason = fun->cannot_be_copied_reason;
3885
3886  /* Only examine the function once.  */
3887  if (fun->cannot_be_copied_set)
3888    return reason;
3889
3890  /* We cannot copy a function that receives a non-local goto
3891     because we cannot remap the destination label used in the
3892     function that is performing the non-local goto.  */
3893  /* ??? Actually, this should be possible, if we work at it.
3894     No doubt there's just a handful of places that simply
3895     assume it doesn't happen and don't substitute properly.  */
3896  if (fun->has_nonlocal_label)
3897    {
3898      reason = G_("function %q+F can never be copied "
3899		  "because it receives a non-local goto");
3900      goto fail;
3901    }
3902
3903  if (fun->has_forced_label_in_static)
3904    {
3905      reason = G_("function %q+F can never be copied because it saves "
3906		  "address of local label in a static variable");
3907      goto fail;
3908    }
3909
3910 fail:
3911  fun->cannot_be_copied_reason = reason;
3912  fun->cannot_be_copied_set = true;
3913  return reason;
3914}
3915
3916
3917static const char *inline_forbidden_reason;
3918
3919/* A callback for walk_gimple_seq to handle statements.  Returns non-null
3920   iff a function cannot be inlined.  Also sets the reason why. */
3921
3922static tree
3923inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3924			 struct walk_stmt_info *wip)
3925{
3926  tree fn = (tree) wip->info;
3927  tree t;
3928  gimple *stmt = gsi_stmt (*gsi);
3929
3930  switch (gimple_code (stmt))
3931    {
3932    case GIMPLE_CALL:
3933      /* Refuse to inline alloca call unless user explicitly forced so as
3934	 this may change program's memory overhead drastically when the
3935	 function using alloca is called in loop.  In GCC present in
3936	 SPEC2000 inlining into schedule_block cause it to require 2GB of
3937	 RAM instead of 256MB.  Don't do so for alloca calls emitted for
3938	 VLA objects as those can't cause unbounded growth (they're always
3939	 wrapped inside stack_save/stack_restore regions.  */
3940      if (gimple_maybe_alloca_call_p (stmt)
3941	  && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3942	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3943	{
3944	  inline_forbidden_reason
3945	    = G_("function %q+F can never be inlined because it uses "
3946		 "alloca (override using the always_inline attribute)");
3947	  *handled_ops_p = true;
3948	  return fn;
3949	}
3950
3951      t = gimple_call_fndecl (stmt);
3952      if (t == NULL_TREE)
3953	break;
3954
3955      /* We cannot inline functions that call setjmp.  */
3956      if (setjmp_call_p (t))
3957	{
3958	  inline_forbidden_reason
3959	    = G_("function %q+F can never be inlined because it uses setjmp");
3960	  *handled_ops_p = true;
3961	  return t;
3962	}
3963
3964      if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3965	switch (DECL_FUNCTION_CODE (t))
3966	  {
3967	    /* We cannot inline functions that take a variable number of
3968	       arguments.  */
3969	  case BUILT_IN_VA_START:
3970	  case BUILT_IN_NEXT_ARG:
3971	  case BUILT_IN_VA_END:
3972	    inline_forbidden_reason
3973	      = G_("function %q+F can never be inlined because it "
3974		   "uses variable argument lists");
3975	    *handled_ops_p = true;
3976	    return t;
3977
3978	  case BUILT_IN_LONGJMP:
3979	    /* We can't inline functions that call __builtin_longjmp at
3980	       all.  The non-local goto machinery really requires the
3981	       destination be in a different function.  If we allow the
3982	       function calling __builtin_longjmp to be inlined into the
3983	       function calling __builtin_setjmp, Things will Go Awry.  */
3984	    inline_forbidden_reason
3985	      = G_("function %q+F can never be inlined because "
3986		   "it uses setjmp-longjmp exception handling");
3987	    *handled_ops_p = true;
3988	    return t;
3989
3990	  case BUILT_IN_NONLOCAL_GOTO:
3991	    /* Similarly.  */
3992	    inline_forbidden_reason
3993	      = G_("function %q+F can never be inlined because "
3994		   "it uses non-local goto");
3995	    *handled_ops_p = true;
3996	    return t;
3997
3998	  case BUILT_IN_RETURN:
3999	  case BUILT_IN_APPLY_ARGS:
4000	    /* If a __builtin_apply_args caller would be inlined,
4001	       it would be saving arguments of the function it has
4002	       been inlined into.  Similarly __builtin_return would
4003	       return from the function the inline has been inlined into.  */
4004	    inline_forbidden_reason
4005	      = G_("function %q+F can never be inlined because "
4006		   "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
4007	    *handled_ops_p = true;
4008	    return t;
4009
4010	  default:
4011	    break;
4012	  }
4013      break;
4014
4015    case GIMPLE_GOTO:
4016      t = gimple_goto_dest (stmt);
4017
4018      /* We will not inline a function which uses computed goto.  The
4019	 addresses of its local labels, which may be tucked into
4020	 global storage, are of course not constant across
4021	 instantiations, which causes unexpected behavior.  */
4022      if (TREE_CODE (t) != LABEL_DECL)
4023	{
4024	  inline_forbidden_reason
4025	    = G_("function %q+F can never be inlined "
4026		 "because it contains a computed goto");
4027	  *handled_ops_p = true;
4028	  return t;
4029	}
4030      break;
4031
4032    default:
4033      break;
4034    }
4035
4036  *handled_ops_p = false;
4037  return NULL_TREE;
4038}
4039
4040/* Return true if FNDECL is a function that cannot be inlined into
4041   another one.  */
4042
4043static bool
4044inline_forbidden_p (tree fndecl)
4045{
4046  struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
4047  struct walk_stmt_info wi;
4048  basic_block bb;
4049  bool forbidden_p = false;
4050
4051  /* First check for shared reasons not to copy the code.  */
4052  inline_forbidden_reason = copy_forbidden (fun);
4053  if (inline_forbidden_reason != NULL)
4054    return true;
4055
4056  /* Next, walk the statements of the function looking for
4057     constraucts we can't handle, or are non-optimal for inlining.  */
4058  hash_set<tree> visited_nodes;
4059  memset (&wi, 0, sizeof (wi));
4060  wi.info = (void *) fndecl;
4061  wi.pset = &visited_nodes;
4062
4063  /* We cannot inline a function with a variable-sized parameter because we
4064     cannot materialize a temporary of such a type in the caller if need be.
4065     Note that the return case is not symmetrical because we can guarantee
4066     that a temporary is not needed by means of CALL_EXPR_RETURN_SLOT_OPT.  */
4067  for (tree parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
4068    if (!poly_int_tree_p (DECL_SIZE (parm)))
4069      {
4070	inline_forbidden_reason
4071	  = G_("function %q+F can never be inlined because "
4072	       "it has a VLA argument");
4073	return true;
4074      }
4075
4076  FOR_EACH_BB_FN (bb, fun)
4077    {
4078      gimple *ret;
4079      gimple_seq seq = bb_seq (bb);
4080      ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
4081      forbidden_p = (ret != NULL);
4082      if (forbidden_p)
4083	break;
4084    }
4085
4086  return forbidden_p;
4087}
4088
4089/* Return false if the function FNDECL cannot be inlined on account of its
4090   attributes, true otherwise.  */
4091static bool
4092function_attribute_inlinable_p (const_tree fndecl)
4093{
4094  if (targetm.attribute_table)
4095    {
4096      const_tree a;
4097
4098      for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4099	{
4100	  const_tree name = get_attribute_name (a);
4101	  int i;
4102
4103	  for (i = 0; targetm.attribute_table[i].name != NULL; i++)
4104	    if (is_attribute_p (targetm.attribute_table[i].name, name))
4105	      return targetm.function_attribute_inlinable_p (fndecl);
4106	}
4107    }
4108
4109  return true;
4110}
4111
4112/* Returns nonzero if FN is a function that does not have any
4113   fundamental inline blocking properties.  */
4114
4115bool
4116tree_inlinable_function_p (tree fn)
4117{
4118  bool inlinable = true;
4119  bool do_warning;
4120  tree always_inline;
4121
4122  /* If we've already decided this function shouldn't be inlined,
4123     there's no need to check again.  */
4124  if (DECL_UNINLINABLE (fn))
4125    return false;
4126
4127  /* We only warn for functions declared `inline' by the user.  */
4128  do_warning = (opt_for_fn (fn, warn_inline)
4129		&& DECL_DECLARED_INLINE_P (fn)
4130		&& !DECL_NO_INLINE_WARNING_P (fn)
4131		&& !DECL_IN_SYSTEM_HEADER (fn));
4132
4133  always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4134
4135  if (flag_no_inline
4136      && always_inline == NULL)
4137    {
4138      if (do_warning)
4139	warning (OPT_Winline, "function %q+F can never be inlined because it "
4140		 "is suppressed using %<-fno-inline%>", fn);
4141      inlinable = false;
4142    }
4143
4144  else if (!function_attribute_inlinable_p (fn))
4145    {
4146      if (do_warning)
4147        warning (OPT_Winline, "function %q+F can never be inlined because it "
4148                 "uses attributes conflicting with inlining", fn);
4149      inlinable = false;
4150    }
4151
4152  else if (inline_forbidden_p (fn))
4153    {
4154      /* See if we should warn about uninlinable functions.  Previously,
4155	 some of these warnings would be issued while trying to expand
4156	 the function inline, but that would cause multiple warnings
4157	 about functions that would for example call alloca.  But since
4158	 this a property of the function, just one warning is enough.
4159	 As a bonus we can now give more details about the reason why a
4160	 function is not inlinable.  */
4161      if (always_inline)
4162	error (inline_forbidden_reason, fn);
4163      else if (do_warning)
4164	warning (OPT_Winline, inline_forbidden_reason, fn);
4165
4166      inlinable = false;
4167    }
4168
4169  /* Squirrel away the result so that we don't have to check again.  */
4170  DECL_UNINLINABLE (fn) = !inlinable;
4171
4172  return inlinable;
4173}
4174
4175/* Estimate the cost of a memory move of type TYPE.  Use machine dependent
4176   word size and take possible memcpy call into account and return
4177   cost based on whether optimizing for size or speed according to SPEED_P.  */
4178
4179int
4180estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4181{
4182  HOST_WIDE_INT size;
4183
4184  gcc_assert (!VOID_TYPE_P (type));
4185
4186  if (TREE_CODE (type) == VECTOR_TYPE)
4187    {
4188      scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4189      machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4190      int orig_mode_size
4191	= estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4192      int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4193      return ((orig_mode_size + simd_mode_size - 1)
4194	      / simd_mode_size);
4195    }
4196
4197  size = int_size_in_bytes (type);
4198
4199  if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4200    /* Cost of a memcpy call, 3 arguments and the call.  */
4201    return 4;
4202  else
4203    return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4204}
4205
4206/* Returns cost of operation CODE, according to WEIGHTS  */
4207
4208static int
4209estimate_operator_cost (enum tree_code code, eni_weights *weights,
4210			tree op1 ATTRIBUTE_UNUSED, tree op2)
4211{
4212  switch (code)
4213    {
4214    /* These are "free" conversions, or their presumed cost
4215       is folded into other operations.  */
4216    case RANGE_EXPR:
4217    CASE_CONVERT:
4218    case COMPLEX_EXPR:
4219    case PAREN_EXPR:
4220    case VIEW_CONVERT_EXPR:
4221      return 0;
4222
4223    /* Assign cost of 1 to usual operations.
4224       ??? We may consider mapping RTL costs to this.  */
4225    case COND_EXPR:
4226    case VEC_COND_EXPR:
4227    case VEC_PERM_EXPR:
4228
4229    case PLUS_EXPR:
4230    case POINTER_PLUS_EXPR:
4231    case POINTER_DIFF_EXPR:
4232    case MINUS_EXPR:
4233    case MULT_EXPR:
4234    case MULT_HIGHPART_EXPR:
4235
4236    case ADDR_SPACE_CONVERT_EXPR:
4237    case FIXED_CONVERT_EXPR:
4238    case FIX_TRUNC_EXPR:
4239
4240    case NEGATE_EXPR:
4241    case FLOAT_EXPR:
4242    case MIN_EXPR:
4243    case MAX_EXPR:
4244    case ABS_EXPR:
4245    case ABSU_EXPR:
4246
4247    case LSHIFT_EXPR:
4248    case RSHIFT_EXPR:
4249    case LROTATE_EXPR:
4250    case RROTATE_EXPR:
4251
4252    case BIT_IOR_EXPR:
4253    case BIT_XOR_EXPR:
4254    case BIT_AND_EXPR:
4255    case BIT_NOT_EXPR:
4256
4257    case TRUTH_ANDIF_EXPR:
4258    case TRUTH_ORIF_EXPR:
4259    case TRUTH_AND_EXPR:
4260    case TRUTH_OR_EXPR:
4261    case TRUTH_XOR_EXPR:
4262    case TRUTH_NOT_EXPR:
4263
4264    case LT_EXPR:
4265    case LE_EXPR:
4266    case GT_EXPR:
4267    case GE_EXPR:
4268    case EQ_EXPR:
4269    case NE_EXPR:
4270    case ORDERED_EXPR:
4271    case UNORDERED_EXPR:
4272
4273    case UNLT_EXPR:
4274    case UNLE_EXPR:
4275    case UNGT_EXPR:
4276    case UNGE_EXPR:
4277    case UNEQ_EXPR:
4278    case LTGT_EXPR:
4279
4280    case CONJ_EXPR:
4281
4282    case PREDECREMENT_EXPR:
4283    case PREINCREMENT_EXPR:
4284    case POSTDECREMENT_EXPR:
4285    case POSTINCREMENT_EXPR:
4286
4287    case REALIGN_LOAD_EXPR:
4288
4289    case WIDEN_PLUS_EXPR:
4290    case WIDEN_MINUS_EXPR:
4291    case WIDEN_SUM_EXPR:
4292    case WIDEN_MULT_EXPR:
4293    case DOT_PROD_EXPR:
4294    case SAD_EXPR:
4295    case WIDEN_MULT_PLUS_EXPR:
4296    case WIDEN_MULT_MINUS_EXPR:
4297    case WIDEN_LSHIFT_EXPR:
4298
4299    case VEC_WIDEN_PLUS_HI_EXPR:
4300    case VEC_WIDEN_PLUS_LO_EXPR:
4301    case VEC_WIDEN_MINUS_HI_EXPR:
4302    case VEC_WIDEN_MINUS_LO_EXPR:
4303    case VEC_WIDEN_MULT_HI_EXPR:
4304    case VEC_WIDEN_MULT_LO_EXPR:
4305    case VEC_WIDEN_MULT_EVEN_EXPR:
4306    case VEC_WIDEN_MULT_ODD_EXPR:
4307    case VEC_UNPACK_HI_EXPR:
4308    case VEC_UNPACK_LO_EXPR:
4309    case VEC_UNPACK_FLOAT_HI_EXPR:
4310    case VEC_UNPACK_FLOAT_LO_EXPR:
4311    case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4312    case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4313    case VEC_PACK_TRUNC_EXPR:
4314    case VEC_PACK_SAT_EXPR:
4315    case VEC_PACK_FIX_TRUNC_EXPR:
4316    case VEC_PACK_FLOAT_EXPR:
4317    case VEC_WIDEN_LSHIFT_HI_EXPR:
4318    case VEC_WIDEN_LSHIFT_LO_EXPR:
4319    case VEC_DUPLICATE_EXPR:
4320    case VEC_SERIES_EXPR:
4321
4322      return 1;
4323
4324    /* Few special cases of expensive operations.  This is useful
4325       to avoid inlining on functions having too many of these.  */
4326    case TRUNC_DIV_EXPR:
4327    case CEIL_DIV_EXPR:
4328    case FLOOR_DIV_EXPR:
4329    case ROUND_DIV_EXPR:
4330    case EXACT_DIV_EXPR:
4331    case TRUNC_MOD_EXPR:
4332    case CEIL_MOD_EXPR:
4333    case FLOOR_MOD_EXPR:
4334    case ROUND_MOD_EXPR:
4335    case RDIV_EXPR:
4336      if (TREE_CODE (op2) != INTEGER_CST)
4337        return weights->div_mod_cost;
4338      return 1;
4339
4340    /* Bit-field insertion needs several shift and mask operations.  */
4341    case BIT_INSERT_EXPR:
4342      return 3;
4343
4344    default:
4345      /* We expect a copy assignment with no operator.  */
4346      gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4347      return 0;
4348    }
4349}
4350
4351
4352/* Estimate number of instructions that will be created by expanding
4353   the statements in the statement sequence STMTS.
4354   WEIGHTS contains weights attributed to various constructs.  */
4355
4356int
4357estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4358{
4359  int cost;
4360  gimple_stmt_iterator gsi;
4361
4362  cost = 0;
4363  for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4364    cost += estimate_num_insns (gsi_stmt (gsi), weights);
4365
4366  return cost;
4367}
4368
4369
4370/* Estimate number of instructions that will be created by expanding STMT.
4371   WEIGHTS contains weights attributed to various constructs.  */
4372
4373int
4374estimate_num_insns (gimple *stmt, eni_weights *weights)
4375{
4376  unsigned cost, i;
4377  enum gimple_code code = gimple_code (stmt);
4378  tree lhs;
4379  tree rhs;
4380
4381  switch (code)
4382    {
4383    case GIMPLE_ASSIGN:
4384      /* Try to estimate the cost of assignments.  We have three cases to
4385	 deal with:
4386	 1) Simple assignments to registers;
4387	 2) Stores to things that must live in memory.  This includes
4388	    "normal" stores to scalars, but also assignments of large
4389	    structures, or constructors of big arrays;
4390
4391	 Let us look at the first two cases, assuming we have "a = b + C":
4392	 <GIMPLE_ASSIGN <var_decl "a">
4393	        <plus_expr <var_decl "b"> <constant C>>
4394	 If "a" is a GIMPLE register, the assignment to it is free on almost
4395	 any target, because "a" usually ends up in a real register.  Hence
4396	 the only cost of this expression comes from the PLUS_EXPR, and we
4397	 can ignore the GIMPLE_ASSIGN.
4398	 If "a" is not a GIMPLE register, the assignment to "a" will most
4399	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4400	 of moving something into "a", which we compute using the function
4401	 estimate_move_cost.  */
4402      if (gimple_clobber_p (stmt))
4403	return 0;	/* ={v} {CLOBBER} stmt expands to nothing.  */
4404
4405      lhs = gimple_assign_lhs (stmt);
4406      rhs = gimple_assign_rhs1 (stmt);
4407
4408      cost = 0;
4409
4410      /* Account for the cost of moving to / from memory.  */
4411      if (gimple_store_p (stmt))
4412	cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4413      if (gimple_assign_load_p (stmt))
4414	cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4415
4416      cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4417      				      gimple_assign_rhs1 (stmt),
4418				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4419				      == GIMPLE_BINARY_RHS
4420				      ? gimple_assign_rhs2 (stmt) : NULL);
4421      break;
4422
4423    case GIMPLE_COND:
4424      cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4425      				         gimple_op (stmt, 0),
4426				         gimple_op (stmt, 1));
4427      break;
4428
4429    case GIMPLE_SWITCH:
4430      {
4431	gswitch *switch_stmt = as_a <gswitch *> (stmt);
4432	/* Take into account cost of the switch + guess 2 conditional jumps for
4433	   each case label.
4434
4435	   TODO: once the switch expansion logic is sufficiently separated, we can
4436	   do better job on estimating cost of the switch.  */
4437	if (weights->time_based)
4438	  cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4439	else
4440	  cost = gimple_switch_num_labels (switch_stmt) * 2;
4441      }
4442      break;
4443
4444    case GIMPLE_CALL:
4445      {
4446	tree decl;
4447
4448	if (gimple_call_internal_p (stmt))
4449	  return 0;
4450	else if ((decl = gimple_call_fndecl (stmt))
4451		 && fndecl_built_in_p (decl))
4452	  {
4453	    /* Do not special case builtins where we see the body.
4454	       This just confuse inliner.  */
4455	    struct cgraph_node *node;
4456	    if ((node = cgraph_node::get (decl))
4457		&& node->definition)
4458	      ;
4459	    /* For buitins that are likely expanded to nothing or
4460	       inlined do not account operand costs.  */
4461	    else if (is_simple_builtin (decl))
4462	      return 0;
4463	    else if (is_inexpensive_builtin (decl))
4464	      return weights->target_builtin_call_cost;
4465	    else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4466	      {
4467		/* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4468		   specialize the cheap expansion we do here.
4469		   ???  This asks for a more general solution.  */
4470		switch (DECL_FUNCTION_CODE (decl))
4471		  {
4472		    case BUILT_IN_POW:
4473		    case BUILT_IN_POWF:
4474		    case BUILT_IN_POWL:
4475		      if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4476			  && (real_equal
4477			      (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4478			       &dconst2)))
4479			return estimate_operator_cost
4480			    (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4481			     gimple_call_arg (stmt, 0));
4482		      break;
4483
4484		    default:
4485		      break;
4486		  }
4487	      }
4488	  }
4489
4490	cost = decl ? weights->call_cost : weights->indirect_call_cost;
4491	if (gimple_call_lhs (stmt))
4492	  cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4493				      weights->time_based);
4494	for (i = 0; i < gimple_call_num_args (stmt); i++)
4495	  {
4496	    tree arg = gimple_call_arg (stmt, i);
4497	    cost += estimate_move_cost (TREE_TYPE (arg),
4498					weights->time_based);
4499	  }
4500	break;
4501      }
4502
4503    case GIMPLE_RETURN:
4504      return weights->return_cost;
4505
4506    case GIMPLE_GOTO:
4507    case GIMPLE_LABEL:
4508    case GIMPLE_NOP:
4509    case GIMPLE_PHI:
4510    case GIMPLE_PREDICT:
4511    case GIMPLE_DEBUG:
4512      return 0;
4513
4514    case GIMPLE_ASM:
4515      {
4516	int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4517	/* 1000 means infinity. This avoids overflows later
4518	   with very long asm statements.  */
4519	if (count > 1000)
4520	  count = 1000;
4521	/* If this asm is asm inline, count anything as minimum size.  */
4522	if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4523	  count = MIN (1, count);
4524	return MAX (1, count);
4525      }
4526
4527    case GIMPLE_RESX:
4528      /* This is either going to be an external function call with one
4529	 argument, or two register copy statements plus a goto.  */
4530      return 2;
4531
4532    case GIMPLE_EH_DISPATCH:
4533      /* ??? This is going to turn into a switch statement.  Ideally
4534	 we'd have a look at the eh region and estimate the number of
4535	 edges involved.  */
4536      return 10;
4537
4538    case GIMPLE_BIND:
4539      return estimate_num_insns_seq (
4540	       gimple_bind_body (as_a <gbind *> (stmt)),
4541	       weights);
4542
4543    case GIMPLE_EH_FILTER:
4544      return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4545
4546    case GIMPLE_CATCH:
4547      return estimate_num_insns_seq (gimple_catch_handler (
4548				       as_a <gcatch *> (stmt)),
4549				     weights);
4550
4551    case GIMPLE_TRY:
4552      return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4553              + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4554
4555    /* OMP directives are generally very expensive.  */
4556
4557    case GIMPLE_OMP_RETURN:
4558    case GIMPLE_OMP_SECTIONS_SWITCH:
4559    case GIMPLE_OMP_ATOMIC_STORE:
4560    case GIMPLE_OMP_CONTINUE:
4561      /* ...except these, which are cheap.  */
4562      return 0;
4563
4564    case GIMPLE_OMP_ATOMIC_LOAD:
4565      return weights->omp_cost;
4566
4567    case GIMPLE_OMP_FOR:
4568      return (weights->omp_cost
4569              + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4570              + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4571
4572    case GIMPLE_OMP_PARALLEL:
4573    case GIMPLE_OMP_TASK:
4574    case GIMPLE_OMP_CRITICAL:
4575    case GIMPLE_OMP_MASTER:
4576    case GIMPLE_OMP_MASKED:
4577    case GIMPLE_OMP_SCOPE:
4578    case GIMPLE_OMP_TASKGROUP:
4579    case GIMPLE_OMP_ORDERED:
4580    case GIMPLE_OMP_SCAN:
4581    case GIMPLE_OMP_SECTION:
4582    case GIMPLE_OMP_SECTIONS:
4583    case GIMPLE_OMP_SINGLE:
4584    case GIMPLE_OMP_TARGET:
4585    case GIMPLE_OMP_TEAMS:
4586      return (weights->omp_cost
4587              + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4588
4589    case GIMPLE_TRANSACTION:
4590      return (weights->tm_cost
4591	      + estimate_num_insns_seq (gimple_transaction_body (
4592					  as_a <gtransaction *> (stmt)),
4593					weights));
4594
4595    default:
4596      gcc_unreachable ();
4597    }
4598
4599  return cost;
4600}
4601
4602/* Estimate number of instructions that will be created by expanding
4603   function FNDECL.  WEIGHTS contains weights attributed to various
4604   constructs.  */
4605
4606int
4607estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4608{
4609  struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4610  gimple_stmt_iterator bsi;
4611  basic_block bb;
4612  int n = 0;
4613
4614  gcc_assert (my_function && my_function->cfg);
4615  FOR_EACH_BB_FN (bb, my_function)
4616    {
4617      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4618	n += estimate_num_insns (gsi_stmt (bsi), weights);
4619    }
4620
4621  return n;
4622}
4623
4624
4625/* Initializes weights used by estimate_num_insns.  */
4626
4627void
4628init_inline_once (void)
4629{
4630  eni_size_weights.call_cost = 1;
4631  eni_size_weights.indirect_call_cost = 3;
4632  eni_size_weights.target_builtin_call_cost = 1;
4633  eni_size_weights.div_mod_cost = 1;
4634  eni_size_weights.omp_cost = 40;
4635  eni_size_weights.tm_cost = 10;
4636  eni_size_weights.time_based = false;
4637  eni_size_weights.return_cost = 1;
4638
4639  /* Estimating time for call is difficult, since we have no idea what the
4640     called function does.  In the current uses of eni_time_weights,
4641     underestimating the cost does less harm than overestimating it, so
4642     we choose a rather small value here.  */
4643  eni_time_weights.call_cost = 10;
4644  eni_time_weights.indirect_call_cost = 15;
4645  eni_time_weights.target_builtin_call_cost = 1;
4646  eni_time_weights.div_mod_cost = 10;
4647  eni_time_weights.omp_cost = 40;
4648  eni_time_weights.tm_cost = 40;
4649  eni_time_weights.time_based = true;
4650  eni_time_weights.return_cost = 2;
4651}
4652
4653
4654/* Install new lexical TREE_BLOCK underneath 'current_block'.  */
4655
4656static void
4657prepend_lexical_block (tree current_block, tree new_block)
4658{
4659  BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4660  BLOCK_SUBBLOCKS (current_block) = new_block;
4661  BLOCK_SUPERCONTEXT (new_block) = current_block;
4662}
4663
4664/* Add local variables from CALLEE to CALLER.  */
4665
4666static inline void
4667add_local_variables (struct function *callee, struct function *caller,
4668		     copy_body_data *id)
4669{
4670  tree var;
4671  unsigned ix;
4672
4673  FOR_EACH_LOCAL_DECL (callee, ix, var)
4674    if (!can_be_nonlocal (var, id))
4675      {
4676        tree new_var = remap_decl (var, id);
4677
4678        /* Remap debug-expressions.  */
4679	if (VAR_P (new_var)
4680	    && DECL_HAS_DEBUG_EXPR_P (var)
4681	    && new_var != var)
4682	  {
4683	    tree tem = DECL_DEBUG_EXPR (var);
4684	    bool old_regimplify = id->regimplify;
4685	    id->remapping_type_depth++;
4686	    walk_tree (&tem, copy_tree_body_r, id, NULL);
4687	    id->remapping_type_depth--;
4688	    id->regimplify = old_regimplify;
4689	    SET_DECL_DEBUG_EXPR (new_var, tem);
4690	    DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4691	  }
4692	add_local_decl (caller, new_var);
4693      }
4694}
4695
4696/* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4697   have brought in or introduced any debug stmts for SRCVAR.  */
4698
4699static inline void
4700reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4701{
4702  tree *remappedvarp = id->decl_map->get (srcvar);
4703
4704  if (!remappedvarp)
4705    return;
4706
4707  if (!VAR_P (*remappedvarp))
4708    return;
4709
4710  if (*remappedvarp == id->retvar)
4711    return;
4712
4713  tree tvar = target_for_debug_bind (*remappedvarp);
4714  if (!tvar)
4715    return;
4716
4717  gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4718					  id->call_stmt);
4719  gimple_seq_add_stmt (bindings, stmt);
4720}
4721
4722/* For each inlined variable for which we may have debug bind stmts,
4723   add before GSI a final debug stmt resetting it, marking the end of
4724   its life, so that var-tracking knows it doesn't have to compute
4725   further locations for it.  */
4726
4727static inline void
4728reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4729{
4730  tree var;
4731  unsigned ix;
4732  gimple_seq bindings = NULL;
4733
4734  if (!gimple_in_ssa_p (id->src_cfun))
4735    return;
4736
4737  if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4738    return;
4739
4740  for (var = DECL_ARGUMENTS (id->src_fn);
4741       var; var = DECL_CHAIN (var))
4742    reset_debug_binding (id, var, &bindings);
4743
4744  FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4745    reset_debug_binding (id, var, &bindings);
4746
4747  gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4748}
4749
4750/* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
4751
4752static bool
4753expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4754		    bitmap to_purge)
4755{
4756  tree use_retvar;
4757  tree fn;
4758  hash_map<tree, tree> *dst;
4759  hash_map<tree, tree> *st = NULL;
4760  tree return_slot;
4761  tree modify_dest;
4762  struct cgraph_edge *cg_edge;
4763  cgraph_inline_failed_t reason;
4764  basic_block return_block;
4765  edge e;
4766  gimple_stmt_iterator gsi, stmt_gsi;
4767  bool successfully_inlined = false;
4768  bool purge_dead_abnormal_edges;
4769  gcall *call_stmt;
4770  unsigned int prop_mask, src_properties;
4771  struct function *dst_cfun;
4772  tree simduid;
4773  use_operand_p use;
4774  gimple *simtenter_stmt = NULL;
4775  vec<tree> *simtvars_save;
4776
4777  /* The gimplifier uses input_location in too many places, such as
4778     internal_get_tmp_var ().  */
4779  location_t saved_location = input_location;
4780  input_location = gimple_location (stmt);
4781
4782  /* From here on, we're only interested in CALL_EXPRs.  */
4783  call_stmt = dyn_cast <gcall *> (stmt);
4784  if (!call_stmt)
4785    goto egress;
4786
4787  cg_edge = id->dst_node->get_edge (stmt);
4788  gcc_checking_assert (cg_edge);
4789  /* First, see if we can figure out what function is being called.
4790     If we cannot, then there is no hope of inlining the function.  */
4791  if (cg_edge->indirect_unknown_callee)
4792    goto egress;
4793  fn = cg_edge->callee->decl;
4794  gcc_checking_assert (fn);
4795
4796  /* If FN is a declaration of a function in a nested scope that was
4797     globally declared inline, we don't set its DECL_INITIAL.
4798     However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4799     C++ front-end uses it for cdtors to refer to their internal
4800     declarations, that are not real functions.  Fortunately those
4801     don't have trees to be saved, so we can tell by checking their
4802     gimple_body.  */
4803  if (!DECL_INITIAL (fn)
4804      && DECL_ABSTRACT_ORIGIN (fn)
4805      && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4806    fn = DECL_ABSTRACT_ORIGIN (fn);
4807
4808  /* Don't try to inline functions that are not well-suited to inlining.  */
4809  if (cg_edge->inline_failed)
4810    {
4811      reason = cg_edge->inline_failed;
4812      /* If this call was originally indirect, we do not want to emit any
4813	 inlining related warnings or sorry messages because there are no
4814	 guarantees regarding those.  */
4815      if (cg_edge->indirect_inlining_edge)
4816	goto egress;
4817
4818      if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4819          /* For extern inline functions that get redefined we always
4820	     silently ignored always_inline flag. Better behavior would
4821	     be to be able to keep both bodies and use extern inline body
4822	     for inlining, but we can't do that because frontends overwrite
4823	     the body.  */
4824	  && !cg_edge->callee->redefined_extern_inline
4825	  /* During early inline pass, report only when optimization is
4826	     not turned on.  */
4827	  && (symtab->global_info_ready
4828	      || !optimize
4829	      || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4830	  /* PR 20090218-1_0.c. Body can be provided by another module. */
4831	  && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4832	{
4833	  error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4834		 cgraph_inline_failed_string (reason));
4835	  if (gimple_location (stmt) != UNKNOWN_LOCATION)
4836	    inform (gimple_location (stmt), "called from here");
4837	  else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4838	    inform (DECL_SOURCE_LOCATION (cfun->decl),
4839                   "called from this function");
4840	}
4841      else if (opt_for_fn (fn, warn_inline)
4842	       && DECL_DECLARED_INLINE_P (fn)
4843	       && !DECL_NO_INLINE_WARNING_P (fn)
4844	       && !DECL_IN_SYSTEM_HEADER (fn)
4845	       && reason != CIF_UNSPECIFIED
4846	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4847	       /* Do not warn about not inlined recursive calls.  */
4848	       && !cg_edge->recursive_p ()
4849	       /* Avoid warnings during early inline pass. */
4850	       && symtab->global_info_ready)
4851	{
4852	  auto_diagnostic_group d;
4853	  if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4854		       fn, _(cgraph_inline_failed_string (reason))))
4855	    {
4856	      if (gimple_location (stmt) != UNKNOWN_LOCATION)
4857		inform (gimple_location (stmt), "called from here");
4858	      else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4859		inform (DECL_SOURCE_LOCATION (cfun->decl),
4860                       "called from this function");
4861	    }
4862	}
4863      goto egress;
4864    }
4865  id->src_node = cg_edge->callee;
4866
4867  /* If callee is thunk, all we need is to adjust the THIS pointer
4868     and redirect to function being thunked.  */
4869  if (id->src_node->thunk)
4870    {
4871      cgraph_edge *edge;
4872      tree virtual_offset = NULL;
4873      profile_count count = cg_edge->count;
4874      tree op;
4875      gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4876      thunk_info *info = thunk_info::get (id->src_node);
4877
4878      cgraph_edge::remove (cg_edge);
4879      edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4880		   		           gimple_uid (stmt),
4881				   	   profile_count::one (),
4882					   profile_count::one (),
4883				           true);
4884      edge->count = count;
4885      if (info->virtual_offset_p)
4886	virtual_offset = size_int (info->virtual_value);
4887      op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4888			      NULL);
4889      gsi_insert_before (&iter, gimple_build_assign (op,
4890						    gimple_call_arg (stmt, 0)),
4891			 GSI_NEW_STMT);
4892      gcc_assert (info->this_adjusting);
4893      op = thunk_adjust (&iter, op, 1, info->fixed_offset,
4894			 virtual_offset, info->indirect_offset);
4895
4896      gimple_call_set_arg (stmt, 0, op);
4897      gimple_call_set_fndecl (stmt, edge->callee->decl);
4898      update_stmt (stmt);
4899      id->src_node->remove ();
4900      successfully_inlined = expand_call_inline (bb, stmt, id, to_purge);
4901      maybe_remove_unused_call_args (cfun, stmt);
4902      /* This used to return true even though we do fail to inline in
4903	 some cases.  See PR98525.  */
4904      goto egress;
4905    }
4906  fn = cg_edge->callee->decl;
4907  cg_edge->callee->get_untransformed_body ();
4908
4909  if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4910    cg_edge->callee->verify ();
4911
4912  /* We will be inlining this callee.  */
4913  id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4914
4915  /* Update the callers EH personality.  */
4916  if (DECL_FUNCTION_PERSONALITY (fn))
4917    DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4918      = DECL_FUNCTION_PERSONALITY (fn);
4919
4920  /* Split the block before the GIMPLE_CALL.  */
4921  stmt_gsi = gsi_for_stmt (stmt);
4922  gsi_prev (&stmt_gsi);
4923  e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4924  bb = e->src;
4925  return_block = e->dest;
4926  remove_edge (e);
4927
4928  /* If the GIMPLE_CALL was in the last statement of BB, it may have
4929     been the source of abnormal edges.  In this case, schedule
4930     the removal of dead abnormal edges.  */
4931  gsi = gsi_start_bb (return_block);
4932  gsi_next (&gsi);
4933  purge_dead_abnormal_edges = gsi_end_p (gsi);
4934
4935  stmt_gsi = gsi_start_bb (return_block);
4936
4937  /* Build a block containing code to initialize the arguments, the
4938     actual inline expansion of the body, and a label for the return
4939     statements within the function to jump to.  The type of the
4940     statement expression is the return type of the function call.
4941     ???  If the call does not have an associated block then we will
4942     remap all callee blocks to NULL, effectively dropping most of
4943     its debug information.  This should only happen for calls to
4944     artificial decls inserted by the compiler itself.  We need to
4945     either link the inlined blocks into the caller block tree or
4946     not refer to them in any way to not break GC for locations.  */
4947  if (tree block = gimple_block (stmt))
4948    {
4949      /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4950         to make inlined_function_outer_scope_p return true on this BLOCK.  */
4951      location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4952      if (loc == UNKNOWN_LOCATION)
4953	loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4954      if (loc == UNKNOWN_LOCATION)
4955	loc = BUILTINS_LOCATION;
4956      id->block = make_node (BLOCK);
4957      BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4958      BLOCK_SOURCE_LOCATION (id->block) = loc;
4959      prepend_lexical_block (block, id->block);
4960    }
4961
4962  /* Local declarations will be replaced by their equivalents in this map.  */
4963  st = id->decl_map;
4964  id->decl_map = new hash_map<tree, tree>;
4965  dst = id->debug_map;
4966  id->debug_map = NULL;
4967  if (flag_stack_reuse != SR_NONE)
4968    id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4969
4970  /* Record the function we are about to inline.  */
4971  id->src_fn = fn;
4972  id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4973  id->reset_location = DECL_IGNORED_P (fn);
4974  id->call_stmt = call_stmt;
4975
4976  /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4977     variables to be added to IFN_GOMP_SIMT_ENTER argument list.  */
4978  dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4979  simtvars_save = id->dst_simt_vars;
4980  if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4981      && (simduid = bb->loop_father->simduid) != NULL_TREE
4982      && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4983      && single_imm_use (simduid, &use, &simtenter_stmt)
4984      && is_gimple_call (simtenter_stmt)
4985      && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4986    vec_alloc (id->dst_simt_vars, 0);
4987  else
4988    id->dst_simt_vars = NULL;
4989
4990  if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4991    profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4992
4993  /* If the src function contains an IFN_VA_ARG, then so will the dst
4994     function after inlining.  Likewise for IFN_GOMP_USE_SIMT.  */
4995  prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4996  src_properties = id->src_cfun->curr_properties & prop_mask;
4997  if (src_properties != prop_mask)
4998    dst_cfun->curr_properties &= src_properties | ~prop_mask;
4999  dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
5000  id->dst_node->calls_declare_variant_alt
5001    |= id->src_node->calls_declare_variant_alt;
5002
5003  gcc_assert (!id->src_cfun->after_inlining);
5004
5005  id->entry_bb = bb;
5006  if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
5007    {
5008      gimple_stmt_iterator si = gsi_last_bb (bb);
5009      gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
5010      						   NOT_TAKEN),
5011			GSI_NEW_STMT);
5012    }
5013  initialize_inlined_parameters (id, stmt, fn, bb);
5014  if (debug_nonbind_markers_p && debug_inline_points && id->block
5015      && inlined_function_outer_scope_p (id->block))
5016    {
5017      gimple_stmt_iterator si = gsi_last_bb (bb);
5018      gsi_insert_after (&si, gimple_build_debug_inline_entry
5019			(id->block, DECL_SOURCE_LOCATION (id->src_fn)),
5020			GSI_NEW_STMT);
5021    }
5022
5023  if (DECL_INITIAL (fn))
5024    {
5025      if (gimple_block (stmt))
5026	{
5027	  tree *var;
5028
5029	  prepend_lexical_block (id->block,
5030				 remap_blocks (DECL_INITIAL (fn), id));
5031	  gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
5032			       && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
5033				   == NULL_TREE));
5034	  /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
5035	     otherwise for DWARF DW_TAG_formal_parameter will not be children of
5036	     DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
5037	     under it.  The parameters can be then evaluated in the debugger,
5038	     but don't show in backtraces.  */
5039	  for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
5040	    if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
5041	      {
5042		tree v = *var;
5043		*var = TREE_CHAIN (v);
5044		TREE_CHAIN (v) = BLOCK_VARS (id->block);
5045		BLOCK_VARS (id->block) = v;
5046	      }
5047	    else
5048	      var = &TREE_CHAIN (*var);
5049	}
5050      else
5051	remap_blocks_to_null (DECL_INITIAL (fn), id);
5052    }
5053
5054  /* Return statements in the function body will be replaced by jumps
5055     to the RET_LABEL.  */
5056  gcc_assert (DECL_INITIAL (fn));
5057  gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
5058
5059  /* Find the LHS to which the result of this call is assigned.  */
5060  return_slot = NULL;
5061  if (gimple_call_lhs (stmt))
5062    {
5063      modify_dest = gimple_call_lhs (stmt);
5064
5065      /* The function which we are inlining might not return a value,
5066	 in which case we should issue a warning that the function
5067	 does not return a value.  In that case the optimizers will
5068	 see that the variable to which the value is assigned was not
5069	 initialized.  We do not want to issue a warning about that
5070	 uninitialized variable.  */
5071      if (DECL_P (modify_dest))
5072	suppress_warning (modify_dest, OPT_Wuninitialized);
5073
5074      if (gimple_call_return_slot_opt_p (call_stmt))
5075	{
5076	  return_slot = modify_dest;
5077	  modify_dest = NULL;
5078	}
5079    }
5080  else
5081    modify_dest = NULL;
5082
5083  /* If we are inlining a call to the C++ operator new, we don't want
5084     to use type based alias analysis on the return value.  Otherwise
5085     we may get confused if the compiler sees that the inlined new
5086     function returns a pointer which was just deleted.  See bug
5087     33407.  */
5088  if (DECL_IS_OPERATOR_NEW_P (fn))
5089    {
5090      return_slot = NULL;
5091      modify_dest = NULL;
5092    }
5093
5094  /* Declare the return variable for the function.  */
5095  use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
5096
5097  /* Add local vars in this inlined callee to caller.  */
5098  add_local_variables (id->src_cfun, cfun, id);
5099
5100  if (dump_enabled_p ())
5101    {
5102      char buf[128];
5103      snprintf (buf, sizeof(buf), "%4.2f",
5104		cg_edge->sreal_frequency ().to_double ());
5105      dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5106		       call_stmt,
5107		       "Inlining %C to %C with frequency %s\n",
5108		       id->src_node, id->dst_node, buf);
5109      if (dump_file && (dump_flags & TDF_DETAILS))
5110	{
5111	  id->src_node->dump (dump_file);
5112	  id->dst_node->dump (dump_file);
5113	}
5114    }
5115
5116  /* This is it.  Duplicate the callee body.  Assume callee is
5117     pre-gimplified.  Note that we must not alter the caller
5118     function in any way before this point, as this CALL_EXPR may be
5119     a self-referential call; if we're calling ourselves, we need to
5120     duplicate our body before altering anything.  */
5121  copy_body (id, bb, return_block, NULL);
5122
5123  reset_debug_bindings (id, stmt_gsi);
5124
5125  if (flag_stack_reuse != SR_NONE)
5126    for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5127      if (!TREE_THIS_VOLATILE (p))
5128	{
5129	  /* The value associated with P is a local temporary only if
5130	     there is no value associated with P in the debug map.  */
5131	  tree *varp = id->decl_map->get (p);
5132	  if (varp
5133	      && VAR_P (*varp)
5134	      && !is_gimple_reg (*varp)
5135	      && !(id->debug_map && id->debug_map->get (p)))
5136	    {
5137	      tree clobber = build_clobber (TREE_TYPE (*varp), CLOBBER_EOL);
5138	      gimple *clobber_stmt;
5139	      clobber_stmt = gimple_build_assign (*varp, clobber);
5140	      gimple_set_location (clobber_stmt, gimple_location (stmt));
5141	      gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5142	    }
5143	}
5144
5145  /* Reset the escaped solution.  */
5146  if (cfun->gimple_df)
5147    pt_solution_reset (&cfun->gimple_df->escaped);
5148
5149  /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments.  */
5150  if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5151    {
5152      size_t nargs = gimple_call_num_args (simtenter_stmt);
5153      vec<tree> *vars = id->dst_simt_vars;
5154      auto_vec<tree> newargs (nargs + vars->length ());
5155      for (size_t i = 0; i < nargs; i++)
5156	newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5157      for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5158	{
5159	  tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5160	  newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5161	}
5162      gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5163      gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5164      gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5165      gsi_replace (&gsi, g, false);
5166    }
5167  vec_free (id->dst_simt_vars);
5168  id->dst_simt_vars = simtvars_save;
5169
5170  /* Clean up.  */
5171  if (id->debug_map)
5172    {
5173      delete id->debug_map;
5174      id->debug_map = dst;
5175    }
5176  delete id->decl_map;
5177  id->decl_map = st;
5178
5179  /* Unlink the calls virtual operands before replacing it.  */
5180  unlink_stmt_vdef (stmt);
5181  if (gimple_vdef (stmt)
5182      && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5183    release_ssa_name (gimple_vdef (stmt));
5184
5185  /* If the inlined function returns a result that we care about,
5186     substitute the GIMPLE_CALL with an assignment of the return
5187     variable to the LHS of the call.  That is, if STMT was
5188     'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
5189  if (use_retvar && gimple_call_lhs (stmt))
5190    {
5191      gimple *old_stmt = stmt;
5192      stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5193      gimple_set_location (stmt, gimple_location (old_stmt));
5194      gsi_replace (&stmt_gsi, stmt, false);
5195      maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5196      /* Append a clobber for id->retvar if easily possible.  */
5197      if (flag_stack_reuse != SR_NONE
5198	  && id->retvar
5199	  && VAR_P (id->retvar)
5200	  && id->retvar != return_slot
5201	  && id->retvar != modify_dest
5202	  && !TREE_THIS_VOLATILE (id->retvar)
5203	  && !is_gimple_reg (id->retvar)
5204	  && !stmt_ends_bb_p (stmt))
5205	{
5206	  tree clobber = build_clobber (TREE_TYPE (id->retvar), CLOBBER_EOL);
5207	  gimple *clobber_stmt;
5208	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
5209	  gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5210	  gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5211	}
5212    }
5213  else
5214    {
5215      /* Handle the case of inlining a function with no return
5216	 statement, which causes the return value to become undefined.  */
5217      if (gimple_call_lhs (stmt)
5218	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5219	{
5220	  tree name = gimple_call_lhs (stmt);
5221	  tree var = SSA_NAME_VAR (name);
5222	  tree def = var ? ssa_default_def (cfun, var) : NULL;
5223
5224	  if (def)
5225	    {
5226	      /* If the variable is used undefined, make this name
5227		 undefined via a move.  */
5228	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5229	      gsi_replace (&stmt_gsi, stmt, true);
5230	    }
5231	  else
5232	    {
5233	      if (!var)
5234		{
5235		  var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5236		  SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5237		}
5238	      /* Otherwise make this variable undefined.  */
5239	      gsi_remove (&stmt_gsi, true);
5240	      set_ssa_default_def (cfun, var, name);
5241	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5242	    }
5243	}
5244      /* Replace with a clobber for id->retvar.  */
5245      else if (flag_stack_reuse != SR_NONE
5246	       && id->retvar
5247	       && VAR_P (id->retvar)
5248	       && id->retvar != return_slot
5249	       && id->retvar != modify_dest
5250	       && !TREE_THIS_VOLATILE (id->retvar)
5251	       && !is_gimple_reg (id->retvar))
5252	{
5253	  tree clobber = build_clobber (TREE_TYPE (id->retvar));
5254	  gimple *clobber_stmt;
5255	  clobber_stmt = gimple_build_assign (id->retvar, clobber);
5256	  gimple_set_location (clobber_stmt, gimple_location (stmt));
5257	  gsi_replace (&stmt_gsi, clobber_stmt, false);
5258	  maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5259	}
5260      else
5261	gsi_remove (&stmt_gsi, true);
5262    }
5263
5264  if (purge_dead_abnormal_edges)
5265    bitmap_set_bit (to_purge, return_block->index);
5266
5267  /* If the value of the new expression is ignored, that's OK.  We
5268     don't warn about this for CALL_EXPRs, so we shouldn't warn about
5269     the equivalent inlined version either.  */
5270  if (is_gimple_assign (stmt))
5271    {
5272      gcc_assert (gimple_assign_single_p (stmt)
5273		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5274      TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5275    }
5276
5277  id->add_clobbers_to_eh_landing_pads = 0;
5278
5279  /* Output the inlining info for this abstract function, since it has been
5280     inlined.  If we don't do this now, we can lose the information about the
5281     variables in the function when the blocks get blown away as soon as we
5282     remove the cgraph node.  */
5283  if (gimple_block (stmt))
5284    (*debug_hooks->outlining_inline_function) (fn);
5285
5286  /* Update callgraph if needed.  */
5287  cg_edge->callee->remove ();
5288
5289  id->block = NULL_TREE;
5290  id->retvar = NULL_TREE;
5291  successfully_inlined = true;
5292
5293 egress:
5294  input_location = saved_location;
5295  return successfully_inlined;
5296}
5297
5298/* Expand call statements reachable from STMT_P.
5299   We can only have CALL_EXPRs as the "toplevel" tree code or nested
5300   in a MODIFY_EXPR.  */
5301
5302static bool
5303gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5304			    bitmap to_purge)
5305{
5306  gimple_stmt_iterator gsi;
5307  bool inlined = false;
5308
5309  for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5310    {
5311      gimple *stmt = gsi_stmt (gsi);
5312      gsi_prev (&gsi);
5313
5314      if (is_gimple_call (stmt)
5315	  && !gimple_call_internal_p (stmt))
5316	inlined |= expand_call_inline (bb, stmt, id, to_purge);
5317    }
5318
5319  return inlined;
5320}
5321
5322
5323/* Walk all basic blocks created after FIRST and try to fold every statement
5324   in the STATEMENTS pointer set.  */
5325
5326static void
5327fold_marked_statements (int first, hash_set<gimple *> *statements)
5328{
5329  auto_bitmap to_purge;
5330
5331  auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
5332  auto_sbitmap visited (last_basic_block_for_fn (cfun));
5333  bitmap_clear (visited);
5334
5335  stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5336  while (!stack.is_empty ())
5337    {
5338      /* Look at the edge on the top of the stack.  */
5339      edge e = stack.pop ();
5340      basic_block dest = e->dest;
5341
5342      if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
5343	  || bitmap_bit_p (visited, dest->index))
5344	continue;
5345
5346      bitmap_set_bit (visited, dest->index);
5347
5348      if (dest->index >= first)
5349	for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
5350	     !gsi_end_p (gsi); gsi_next (&gsi))
5351	  {
5352	    if (!statements->contains (gsi_stmt (gsi)))
5353	      continue;
5354
5355	    gimple *old_stmt = gsi_stmt (gsi);
5356	    tree old_decl = (is_gimple_call (old_stmt)
5357			     ? gimple_call_fndecl (old_stmt) : 0);
5358	    if (old_decl && fndecl_built_in_p (old_decl))
5359	      {
5360		/* Folding builtins can create multiple instructions,
5361		   we need to look at all of them.  */
5362		gimple_stmt_iterator i2 = gsi;
5363		gsi_prev (&i2);
5364		if (fold_stmt (&gsi))
5365		  {
5366		    gimple *new_stmt;
5367		    /* If a builtin at the end of a bb folded into nothing,
5368		       the following loop won't work.  */
5369		    if (gsi_end_p (gsi))
5370		      {
5371			cgraph_update_edges_for_call_stmt (old_stmt,
5372							   old_decl, NULL);
5373			break;
5374		      }
5375		    if (gsi_end_p (i2))
5376		      i2 = gsi_start_bb (dest);
5377		    else
5378		      gsi_next (&i2);
5379		    while (1)
5380		      {
5381			new_stmt = gsi_stmt (i2);
5382			update_stmt (new_stmt);
5383			cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5384							   new_stmt);
5385
5386			if (new_stmt == gsi_stmt (gsi))
5387			  {
5388			    /* It is okay to check only for the very last
5389			       of these statements.  If it is a throwing
5390			       statement nothing will change.  If it isn't
5391			       this can remove EH edges.  If that weren't
5392			       correct then because some intermediate stmts
5393			       throw, but not the last one.  That would mean
5394			       we'd have to split the block, which we can't
5395			       here and we'd loose anyway.  And as builtins
5396			       probably never throw, this all
5397			       is mood anyway.  */
5398			    if (maybe_clean_or_replace_eh_stmt (old_stmt,
5399								new_stmt))
5400			      bitmap_set_bit (to_purge, dest->index);
5401			    break;
5402			  }
5403			gsi_next (&i2);
5404		      }
5405		  }
5406	      }
5407	    else if (fold_stmt (&gsi))
5408	      {
5409		/* Re-read the statement from GSI as fold_stmt() may
5410		   have changed it.  */
5411		gimple *new_stmt = gsi_stmt (gsi);
5412		update_stmt (new_stmt);
5413
5414		if (is_gimple_call (old_stmt)
5415		    || is_gimple_call (new_stmt))
5416		  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5417						     new_stmt);
5418
5419		if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5420		  bitmap_set_bit (to_purge, dest->index);
5421	      }
5422	  }
5423
5424      if (EDGE_COUNT (dest->succs) > 0)
5425	{
5426	  /* Avoid warnings emitted from folding statements that
5427	     became unreachable because of inlined function parameter
5428	     propagation.  */
5429	  e = find_taken_edge (dest, NULL_TREE);
5430	  if (e)
5431	    stack.quick_push (e);
5432	  else
5433	    {
5434	      edge_iterator ei;
5435	      FOR_EACH_EDGE (e, ei, dest->succs)
5436		stack.safe_push (e);
5437	    }
5438	}
5439    }
5440
5441  gimple_purge_all_dead_eh_edges (to_purge);
5442}
5443
5444/* Expand calls to inline functions in the body of FN.  */
5445
5446unsigned int
5447optimize_inline_calls (tree fn)
5448{
5449  copy_body_data id;
5450  basic_block bb;
5451  int last = n_basic_blocks_for_fn (cfun);
5452  bool inlined_p = false;
5453
5454  /* Clear out ID.  */
5455  memset (&id, 0, sizeof (id));
5456
5457  id.src_node = id.dst_node = cgraph_node::get (fn);
5458  gcc_assert (id.dst_node->definition);
5459  id.dst_fn = fn;
5460  /* Or any functions that aren't finished yet.  */
5461  if (current_function_decl)
5462    id.dst_fn = current_function_decl;
5463
5464  id.copy_decl = copy_decl_maybe_to_var;
5465  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5466  id.transform_new_cfg = false;
5467  id.transform_return_to_modify = true;
5468  id.transform_parameter = true;
5469  id.statements_to_fold = new hash_set<gimple *>;
5470
5471  push_gimplify_context ();
5472
5473  /* We make no attempts to keep dominance info up-to-date.  */
5474  free_dominance_info (CDI_DOMINATORS);
5475  free_dominance_info (CDI_POST_DOMINATORS);
5476
5477  /* Register specific gimple functions.  */
5478  gimple_register_cfg_hooks ();
5479
5480  /* Reach the trees by walking over the CFG, and note the
5481     enclosing basic-blocks in the call edges.  */
5482  /* We walk the blocks going forward, because inlined function bodies
5483     will split id->current_basic_block, and the new blocks will
5484     follow it; we'll trudge through them, processing their CALL_EXPRs
5485     along the way.  */
5486  auto_bitmap to_purge;
5487  FOR_EACH_BB_FN (bb, cfun)
5488    inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5489
5490  pop_gimplify_context (NULL);
5491
5492  if (flag_checking)
5493    {
5494      struct cgraph_edge *e;
5495
5496      id.dst_node->verify ();
5497
5498      /* Double check that we inlined everything we are supposed to inline.  */
5499      for (e = id.dst_node->callees; e; e = e->next_callee)
5500	gcc_assert (e->inline_failed);
5501    }
5502
5503  /* If we didn't inline into the function there is nothing to do.  */
5504  if (!inlined_p)
5505    {
5506      delete id.statements_to_fold;
5507      return 0;
5508    }
5509
5510  /* Fold queued statements.  */
5511  update_max_bb_count ();
5512  fold_marked_statements (last, id.statements_to_fold);
5513  delete id.statements_to_fold;
5514
5515  /* Finally purge EH and abnormal edges from the call stmts we inlined.
5516     We need to do this after fold_marked_statements since that may walk
5517     the SSA use-def chain.  */
5518  unsigned i;
5519  bitmap_iterator bi;
5520  EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5521    {
5522      basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5523      if (bb)
5524	{
5525	  gimple_purge_dead_eh_edges (bb);
5526	  gimple_purge_dead_abnormal_call_edges (bb);
5527	}
5528    }
5529
5530  gcc_assert (!id.debug_stmts.exists ());
5531
5532  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5533  number_blocks (fn);
5534
5535  delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5536  id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
5537
5538  if (flag_checking)
5539    id.dst_node->verify ();
5540
5541  /* It would be nice to check SSA/CFG/statement consistency here, but it is
5542     not possible yet - the IPA passes might make various functions to not
5543     throw and they don't care to proactively update local EH info.  This is
5544     done later in fixup_cfg pass that also execute the verification.  */
5545  return (TODO_update_ssa
5546	  | TODO_cleanup_cfg
5547	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5548	  | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5549	  | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5550	     ? TODO_rebuild_frequencies : 0));
5551}
5552
5553/* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
5554
5555tree
5556copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5557{
5558  enum tree_code code = TREE_CODE (*tp);
5559  enum tree_code_class cl = TREE_CODE_CLASS (code);
5560
5561  /* We make copies of most nodes.  */
5562  if (IS_EXPR_CODE_CLASS (cl)
5563      || code == TREE_LIST
5564      || code == TREE_VEC
5565      || code == TYPE_DECL
5566      || code == OMP_CLAUSE)
5567    {
5568      /* Because the chain gets clobbered when we make a copy, we save it
5569	 here.  */
5570      tree chain = NULL_TREE, new_tree;
5571
5572      if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5573	chain = TREE_CHAIN (*tp);
5574
5575      /* Copy the node.  */
5576      new_tree = copy_node (*tp);
5577
5578      *tp = new_tree;
5579
5580      /* Now, restore the chain, if appropriate.  That will cause
5581	 walk_tree to walk into the chain as well.  */
5582      if (code == PARM_DECL
5583	  || code == TREE_LIST
5584	  || code == OMP_CLAUSE)
5585	TREE_CHAIN (*tp) = chain;
5586
5587      /* For now, we don't update BLOCKs when we make copies.  So, we
5588	 have to nullify all BIND_EXPRs.  */
5589      if (TREE_CODE (*tp) == BIND_EXPR)
5590	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5591    }
5592  else if (code == CONSTRUCTOR)
5593    {
5594      /* CONSTRUCTOR nodes need special handling because
5595         we need to duplicate the vector of elements.  */
5596      tree new_tree;
5597
5598      new_tree = copy_node (*tp);
5599      CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5600      *tp = new_tree;
5601    }
5602  else if (code == STATEMENT_LIST)
5603    /* We used to just abort on STATEMENT_LIST, but we can run into them
5604       with statement-expressions (c++/40975).  */
5605    copy_statement_list (tp);
5606  else if (TREE_CODE_CLASS (code) == tcc_type)
5607    *walk_subtrees = 0;
5608  else if (TREE_CODE_CLASS (code) == tcc_declaration)
5609    *walk_subtrees = 0;
5610  else if (TREE_CODE_CLASS (code) == tcc_constant)
5611    *walk_subtrees = 0;
5612  return NULL_TREE;
5613}
5614
5615/* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
5616   information indicating to what new SAVE_EXPR this one should be mapped,
5617   use that one.  Otherwise, create a new node and enter it in ST.  FN is
5618   the function into which the copy will be placed.  */
5619
5620static void
5621remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5622{
5623  tree *n;
5624  tree t;
5625
5626  /* See if we already encountered this SAVE_EXPR.  */
5627  n = st->get (*tp);
5628
5629  /* If we didn't already remap this SAVE_EXPR, do so now.  */
5630  if (!n)
5631    {
5632      t = copy_node (*tp);
5633
5634      /* Remember this SAVE_EXPR.  */
5635      st->put (*tp, t);
5636      /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
5637      st->put (t, t);
5638    }
5639  else
5640    {
5641      /* We've already walked into this SAVE_EXPR; don't do it again.  */
5642      *walk_subtrees = 0;
5643      t = *n;
5644    }
5645
5646  /* Replace this SAVE_EXPR with the copy.  */
5647  *tp = t;
5648}
5649
5650/* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
5651   label, copies the declaration and enters it in the splay_tree in DATA (which
5652   is really a 'copy_body_data *'.  */
5653
5654static tree
5655mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5656		        bool *handled_ops_p ATTRIBUTE_UNUSED,
5657		        struct walk_stmt_info *wi)
5658{
5659  copy_body_data *id = (copy_body_data *) wi->info;
5660  glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5661
5662  if (stmt)
5663    {
5664      tree decl = gimple_label_label (stmt);
5665
5666      /* Copy the decl and remember the copy.  */
5667      insert_decl_map (id, decl, id->copy_decl (decl, id));
5668    }
5669
5670  return NULL_TREE;
5671}
5672
5673static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5674						  struct walk_stmt_info *wi);
5675
5676/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5677   Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5678   remaps all local declarations to appropriate replacements in gimple
5679   operands. */
5680
5681static tree
5682replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5683{
5684  struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5685  copy_body_data *id = (copy_body_data *) wi->info;
5686  hash_map<tree, tree> *st = id->decl_map;
5687  tree *n;
5688  tree expr = *tp;
5689
5690  /* For recursive invocations this is no longer the LHS itself.  */
5691  bool is_lhs = wi->is_lhs;
5692  wi->is_lhs = false;
5693
5694  if (TREE_CODE (expr) == SSA_NAME)
5695    {
5696      *tp = remap_ssa_name (*tp, id);
5697      *walk_subtrees = 0;
5698      if (is_lhs)
5699	SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5700    }
5701  /* Only a local declaration (variable or label).  */
5702  else if ((VAR_P (expr) && !TREE_STATIC (expr))
5703	   || TREE_CODE (expr) == LABEL_DECL)
5704    {
5705      /* Lookup the declaration.  */
5706      n = st->get (expr);
5707
5708      /* If it's there, remap it.  */
5709      if (n)
5710	*tp = *n;
5711      *walk_subtrees = 0;
5712    }
5713  else if (TREE_CODE (expr) == STATEMENT_LIST
5714	   || TREE_CODE (expr) == BIND_EXPR
5715	   || TREE_CODE (expr) == SAVE_EXPR)
5716    gcc_unreachable ();
5717  else if (TREE_CODE (expr) == TARGET_EXPR)
5718    {
5719      /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5720         It's OK for this to happen if it was part of a subtree that
5721         isn't immediately expanded, such as operand 2 of another
5722         TARGET_EXPR.  */
5723      if (!TREE_OPERAND (expr, 1))
5724	{
5725	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5726	  TREE_OPERAND (expr, 3) = NULL_TREE;
5727	}
5728    }
5729  else if (TREE_CODE (expr) == OMP_CLAUSE)
5730    {
5731      /* Before the omplower pass completes, some OMP clauses can contain
5732	 sequences that are neither copied by gimple_seq_copy nor walked by
5733	 walk_gimple_seq.  To make copy_gimple_seq_and_replace_locals work even
5734	 in those situations, we have to copy and process them explicitely.  */
5735
5736      if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5737	{
5738	  gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5739	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5740	  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5741	}
5742      else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5743	{
5744	  gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5745	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5746	  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5747	}
5748      else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5749	{
5750	  gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5751	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5752	  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5753	  seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5754	  seq = duplicate_remap_omp_clause_seq (seq, wi);
5755	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5756	}
5757    }
5758
5759  /* Keep iterating.  */
5760  return NULL_TREE;
5761}
5762
5763
5764/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5765   Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5766   remaps all local declarations to appropriate replacements in gimple
5767   statements. */
5768
5769static tree
5770replace_locals_stmt (gimple_stmt_iterator *gsip,
5771		     bool *handled_ops_p ATTRIBUTE_UNUSED,
5772		     struct walk_stmt_info *wi)
5773{
5774  copy_body_data *id = (copy_body_data *) wi->info;
5775  gimple *gs = gsi_stmt (*gsip);
5776
5777  if (gbind *stmt = dyn_cast <gbind *> (gs))
5778    {
5779      tree block = gimple_bind_block (stmt);
5780
5781      if (block)
5782	{
5783	  remap_block (&block, id);
5784	  gimple_bind_set_block (stmt, block);
5785	}
5786
5787      /* This will remap a lot of the same decls again, but this should be
5788	 harmless.  */
5789      if (gimple_bind_vars (stmt))
5790	{
5791	  tree old_var, decls = gimple_bind_vars (stmt);
5792
5793	  for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5794	    if (!can_be_nonlocal (old_var, id)
5795		&& ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5796	      remap_decl (old_var, id);
5797
5798	  gcc_checking_assert (!id->prevent_decl_creation_for_types);
5799	  id->prevent_decl_creation_for_types = true;
5800	  gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5801	  id->prevent_decl_creation_for_types = false;
5802	}
5803    }
5804
5805  /* Keep iterating.  */
5806  return NULL_TREE;
5807}
5808
5809/* Create a copy of SEQ and remap all decls in it.  */
5810
5811static gimple_seq
5812duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5813{
5814  if (!seq)
5815    return NULL;
5816
5817  /* If there are any labels in OMP sequences, they can be only referred to in
5818     the sequence itself and therefore we can do both here.  */
5819  walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5820  gimple_seq copy = gimple_seq_copy (seq);
5821  walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5822  return copy;
5823}
5824
5825/* Copies everything in SEQ and replaces variables and labels local to
5826   current_function_decl.  */
5827
5828gimple_seq
5829copy_gimple_seq_and_replace_locals (gimple_seq seq)
5830{
5831  copy_body_data id;
5832  struct walk_stmt_info wi;
5833  gimple_seq copy;
5834
5835  /* There's nothing to do for NULL_TREE.  */
5836  if (seq == NULL)
5837    return seq;
5838
5839  /* Set up ID.  */
5840  memset (&id, 0, sizeof (id));
5841  id.src_fn = current_function_decl;
5842  id.dst_fn = current_function_decl;
5843  id.src_cfun = cfun;
5844  id.decl_map = new hash_map<tree, tree>;
5845  id.debug_map = NULL;
5846
5847  id.copy_decl = copy_decl_no_change;
5848  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5849  id.transform_new_cfg = false;
5850  id.transform_return_to_modify = false;
5851  id.transform_parameter = false;
5852
5853  /* Walk the tree once to find local labels.  */
5854  memset (&wi, 0, sizeof (wi));
5855  hash_set<tree> visited;
5856  wi.info = &id;
5857  wi.pset = &visited;
5858  walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5859
5860  copy = gimple_seq_copy (seq);
5861
5862  /* Walk the copy, remapping decls.  */
5863  memset (&wi, 0, sizeof (wi));
5864  wi.info = &id;
5865  walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5866
5867  /* Clean up.  */
5868  delete id.decl_map;
5869  if (id.debug_map)
5870    delete id.debug_map;
5871  if (id.dependence_map)
5872    {
5873      delete id.dependence_map;
5874      id.dependence_map = NULL;
5875    }
5876
5877  return copy;
5878}
5879
5880
5881/* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
5882
5883static tree
5884debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5885{
5886  if (*tp == data)
5887    return (tree) data;
5888  else
5889    return NULL;
5890}
5891
5892DEBUG_FUNCTION bool
5893debug_find_tree (tree top, tree search)
5894{
5895  return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5896}
5897
5898
5899/* Declare the variables created by the inliner.  Add all the variables in
5900   VARS to BIND_EXPR.  */
5901
5902static void
5903declare_inline_vars (tree block, tree vars)
5904{
5905  tree t;
5906  for (t = vars; t; t = DECL_CHAIN (t))
5907    {
5908      DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5909      gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5910      add_local_decl (cfun, t);
5911    }
5912
5913  if (block)
5914    BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5915}
5916
5917/* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
5918   but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
5919   VAR_DECL translation.  */
5920
5921tree
5922copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5923{
5924  /* Don't generate debug information for the copy if we wouldn't have
5925     generated it for the copy either.  */
5926  DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5927  DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5928
5929  /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5930     declaration inspired this copy.  */
5931  DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5932
5933  /* The new variable/label has no RTL, yet.  */
5934  if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5935      && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5936    SET_DECL_RTL (copy, 0);
5937  /* For vector typed decls make sure to update DECL_MODE according
5938     to the new function context.  */
5939  if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5940    SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5941
5942  /* These args would always appear unused, if not for this.  */
5943  TREE_USED (copy) = 1;
5944
5945  /* Set the context for the new declaration.  */
5946  if (!DECL_CONTEXT (decl))
5947    /* Globals stay global.  */
5948    ;
5949  else if (DECL_CONTEXT (decl) != id->src_fn)
5950    /* Things that weren't in the scope of the function we're inlining
5951       from aren't in the scope we're inlining to, either.  */
5952    ;
5953  else if (TREE_STATIC (decl))
5954    /* Function-scoped static variables should stay in the original
5955       function.  */
5956    ;
5957  else
5958    {
5959      /* Ordinary automatic local variables are now in the scope of the
5960	 new function.  */
5961      DECL_CONTEXT (copy) = id->dst_fn;
5962      if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5963	{
5964	  if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5965	    DECL_ATTRIBUTES (copy)
5966	      = tree_cons (get_identifier ("omp simt private"), NULL,
5967			   DECL_ATTRIBUTES (copy));
5968	  id->dst_simt_vars->safe_push (copy);
5969	}
5970    }
5971
5972  return copy;
5973}
5974
5975/* Create a new VAR_DECL that is indentical in all respect to DECL except that
5976   DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL.  The original
5977   DECL must come from ID->src_fn and the copy will be part of ID->dst_fn.  */
5978
5979tree
5980copy_decl_to_var (tree decl, copy_body_data *id)
5981{
5982  tree copy, type;
5983
5984  gcc_assert (TREE_CODE (decl) == PARM_DECL
5985	      || TREE_CODE (decl) == RESULT_DECL);
5986
5987  type = TREE_TYPE (decl);
5988
5989  copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5990		     VAR_DECL, DECL_NAME (decl), type);
5991  if (DECL_PT_UID_SET_P (decl))
5992    SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5993  TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5994  TREE_READONLY (copy) = TREE_READONLY (decl);
5995  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5996  DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
5997  DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
5998
5999  return copy_decl_for_dup_finish (id, decl, copy);
6000}
6001
6002/* Like copy_decl_to_var, but create a return slot object instead of a
6003   pointer variable for return by invisible reference.  */
6004
6005static tree
6006copy_result_decl_to_var (tree decl, copy_body_data *id)
6007{
6008  tree copy, type;
6009
6010  gcc_assert (TREE_CODE (decl) == PARM_DECL
6011	      || TREE_CODE (decl) == RESULT_DECL);
6012
6013  type = TREE_TYPE (decl);
6014  if (DECL_BY_REFERENCE (decl))
6015    type = TREE_TYPE (type);
6016
6017  copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
6018		     VAR_DECL, DECL_NAME (decl), type);
6019  if (DECL_PT_UID_SET_P (decl))
6020    SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
6021  TREE_READONLY (copy) = TREE_READONLY (decl);
6022  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
6023  if (!DECL_BY_REFERENCE (decl))
6024    {
6025      TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
6026      DECL_NOT_GIMPLE_REG_P (copy)
6027	= (DECL_NOT_GIMPLE_REG_P (decl)
6028	   /* RESULT_DECLs are treated special by needs_to_live_in_memory,
6029	      mirror that to the created VAR_DECL.  */
6030	   || (TREE_CODE (decl) == RESULT_DECL
6031	       && aggregate_value_p (decl, id->src_fn)));
6032    }
6033
6034  return copy_decl_for_dup_finish (id, decl, copy);
6035}
6036
6037tree
6038copy_decl_no_change (tree decl, copy_body_data *id)
6039{
6040  tree copy;
6041
6042  copy = copy_node (decl);
6043
6044  /* The COPY is not abstract; it will be generated in DST_FN.  */
6045  DECL_ABSTRACT_P (copy) = false;
6046  lang_hooks.dup_lang_specific_decl (copy);
6047
6048  /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
6049     been taken; it's for internal bookkeeping in expand_goto_internal.  */
6050  if (TREE_CODE (copy) == LABEL_DECL)
6051    {
6052      TREE_ADDRESSABLE (copy) = 0;
6053      LABEL_DECL_UID (copy) = -1;
6054    }
6055
6056  return copy_decl_for_dup_finish (id, decl, copy);
6057}
6058
6059static tree
6060copy_decl_maybe_to_var (tree decl, copy_body_data *id)
6061{
6062  if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
6063    return copy_decl_to_var (decl, id);
6064  else
6065    return copy_decl_no_change (decl, id);
6066}
6067
6068/* Return a copy of the function's argument tree without any modifications.  */
6069
6070static tree
6071copy_arguments_nochange (tree orig_parm, copy_body_data * id)
6072{
6073  tree arg, *parg;
6074  tree new_parm = NULL;
6075
6076  parg = &new_parm;
6077  for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
6078    {
6079      tree new_tree = remap_decl (arg, id);
6080      if (TREE_CODE (new_tree) != PARM_DECL)
6081	new_tree = id->copy_decl (arg, id);
6082      lang_hooks.dup_lang_specific_decl (new_tree);
6083      *parg = new_tree;
6084      parg = &DECL_CHAIN (new_tree);
6085    }
6086  return new_parm;
6087}
6088
6089/* Return a copy of the function's static chain.  */
6090static tree
6091copy_static_chain (tree static_chain, copy_body_data * id)
6092{
6093  tree *chain_copy, *pvar;
6094
6095  chain_copy = &static_chain;
6096  for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
6097    {
6098      tree new_tree = remap_decl (*pvar, id);
6099      lang_hooks.dup_lang_specific_decl (new_tree);
6100      DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
6101      *pvar = new_tree;
6102    }
6103  return static_chain;
6104}
6105
6106/* Return true if the function is allowed to be versioned.
6107   This is a guard for the versioning functionality.  */
6108
6109bool
6110tree_versionable_function_p (tree fndecl)
6111{
6112  return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
6113	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6114}
6115
6116/* Update clone info after duplication.  */
6117
6118static void
6119update_clone_info (copy_body_data * id)
6120{
6121  struct cgraph_node *this_node = id->dst_node;
6122  if (!this_node->clones)
6123    return;
6124  for (cgraph_node *node = this_node->clones; node != this_node;)
6125    {
6126      /* First update replace maps to match the new body.  */
6127      clone_info *info = clone_info::get (node);
6128      if (info && info->tree_map)
6129	{
6130	  unsigned int i;
6131	  for (i = 0; i < vec_safe_length (info->tree_map); i++)
6132	    {
6133	      struct ipa_replace_map *replace_info;
6134	      replace_info = (*info->tree_map)[i];
6135	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6136	    }
6137	}
6138
6139      if (node->clones)
6140	node = node->clones;
6141      else if (node->next_sibling_clone)
6142	node = node->next_sibling_clone;
6143      else
6144	{
6145	  while (node != id->dst_node && !node->next_sibling_clone)
6146	    node = node->clone_of;
6147	  if (node != id->dst_node)
6148	    node = node->next_sibling_clone;
6149	}
6150    }
6151}
6152
6153/* Create a copy of a function's tree.
6154   OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6155   of the original function and the new copied function
6156   respectively.  In case we want to replace a DECL
6157   tree with another tree while duplicating the function's
6158   body, TREE_MAP represents the mapping between these
6159   trees. If UPDATE_CLONES is set, the call_stmt fields
6160   of edges of clones of the function will be updated.
6161
6162   If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6163   function parameters and return value) should be modified).
6164   If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6165   If non_NULL NEW_ENTRY determine new entry BB of the clone.
6166*/
6167void
6168tree_function_versioning (tree old_decl, tree new_decl,
6169			  vec<ipa_replace_map *, va_gc> *tree_map,
6170			  ipa_param_adjustments *param_adjustments,
6171			  bool update_clones, bitmap blocks_to_copy,
6172			  basic_block new_entry)
6173{
6174  struct cgraph_node *old_version_node;
6175  struct cgraph_node *new_version_node;
6176  copy_body_data id;
6177  tree p;
6178  unsigned i;
6179  struct ipa_replace_map *replace_info;
6180  basic_block old_entry_block, bb;
6181  auto_vec<gimple *, 10> init_stmts;
6182  tree vars = NULL_TREE;
6183
6184  /* We can get called recursively from expand_call_inline via clone
6185     materialization.  While expand_call_inline maintains input_location
6186     we cannot tolerate it to leak into the materialized clone.  */
6187  location_t saved_location = input_location;
6188  input_location = UNKNOWN_LOCATION;
6189
6190  gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6191	      && TREE_CODE (new_decl) == FUNCTION_DECL);
6192  DECL_POSSIBLY_INLINED (old_decl) = 1;
6193
6194  old_version_node = cgraph_node::get (old_decl);
6195  gcc_checking_assert (old_version_node);
6196  new_version_node = cgraph_node::get (new_decl);
6197  gcc_checking_assert (new_version_node);
6198
6199  /* Copy over debug args.  */
6200  if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6201    {
6202      vec<tree, va_gc> **new_debug_args, **old_debug_args;
6203      gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6204      DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6205      old_debug_args = decl_debug_args_lookup (old_decl);
6206      if (old_debug_args)
6207	{
6208	  new_debug_args = decl_debug_args_insert (new_decl);
6209	  *new_debug_args = vec_safe_copy (*old_debug_args);
6210	}
6211    }
6212
6213  /* Output the inlining info for this abstract function, since it has been
6214     inlined.  If we don't do this now, we can lose the information about the
6215     variables in the function when the blocks get blown away as soon as we
6216     remove the cgraph node.  */
6217  (*debug_hooks->outlining_inline_function) (old_decl);
6218
6219  DECL_ARTIFICIAL (new_decl) = 1;
6220  DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6221  if (DECL_ORIGIN (old_decl) == old_decl)
6222    old_version_node->used_as_abstract_origin = true;
6223  DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6224
6225  /* Prepare the data structures for the tree copy.  */
6226  memset (&id, 0, sizeof (id));
6227
6228  /* Generate a new name for the new version. */
6229  id.statements_to_fold = new hash_set<gimple *>;
6230
6231  id.decl_map = new hash_map<tree, tree>;
6232  id.debug_map = NULL;
6233  id.src_fn = old_decl;
6234  id.dst_fn = new_decl;
6235  id.src_node = old_version_node;
6236  id.dst_node = new_version_node;
6237  id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6238  id.blocks_to_copy = blocks_to_copy;
6239
6240  id.copy_decl = copy_decl_no_change;
6241  id.transform_call_graph_edges
6242    = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6243  id.transform_new_cfg = true;
6244  id.transform_return_to_modify = false;
6245  id.transform_parameter = false;
6246
6247  old_entry_block = ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (old_decl));
6248  DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6249  DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6250  initialize_cfun (new_decl, old_decl,
6251		   new_entry ? new_entry->count : old_entry_block->count);
6252  new_version_node->calls_declare_variant_alt
6253    = old_version_node->calls_declare_variant_alt;
6254  if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6255    DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6256      = id.src_cfun->gimple_df->ipa_pta;
6257
6258  /* Copy the function's static chain.  */
6259  p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6260  if (p)
6261    DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6262      = copy_static_chain (p, &id);
6263
6264  auto_vec<int, 16> new_param_indices;
6265  clone_info *info = clone_info::get (old_version_node);
6266  ipa_param_adjustments *old_param_adjustments
6267    = info ? info->param_adjustments : NULL;
6268  if (old_param_adjustments)
6269    old_param_adjustments->get_updated_indices (&new_param_indices);
6270
6271  /* If there's a tree_map, prepare for substitution.  */
6272  if (tree_map)
6273    for (i = 0; i < tree_map->length (); i++)
6274      {
6275	gimple *init;
6276	replace_info = (*tree_map)[i];
6277
6278	int p = replace_info->parm_num;
6279	if (old_param_adjustments)
6280	  p = new_param_indices[p];
6281
6282	tree parm;
6283	for (parm = DECL_ARGUMENTS (old_decl); p;
6284	     parm = DECL_CHAIN (parm))
6285	  p--;
6286	gcc_assert (parm);
6287	init = setup_one_parameter (&id, parm, replace_info->new_tree,
6288				    id.src_fn, NULL, &vars);
6289	if (init)
6290	  init_stmts.safe_push (init);
6291      }
6292
6293  ipa_param_body_adjustments *param_body_adjs = NULL;
6294  if (param_adjustments)
6295    {
6296      param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6297							new_decl, old_decl,
6298							&id, &vars, tree_map);
6299      id.param_body_adjs = param_body_adjs;
6300      DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6301    }
6302  else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6303    DECL_ARGUMENTS (new_decl)
6304      = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6305
6306  DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6307  BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6308
6309  declare_inline_vars (DECL_INITIAL (new_decl), vars);
6310
6311  if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6312    /* Add local vars.  */
6313    add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6314
6315  if (DECL_RESULT (old_decl) == NULL_TREE)
6316    ;
6317  else if (param_adjustments && param_adjustments->m_skip_return
6318	   && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6319    {
6320      tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6321						   &id);
6322      declare_inline_vars (NULL, resdecl_repl);
6323      if (DECL_BY_REFERENCE (DECL_RESULT (old_decl)))
6324	resdecl_repl = build_fold_addr_expr (resdecl_repl);
6325      insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6326
6327      DECL_RESULT (new_decl)
6328	= build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6329		      RESULT_DECL, NULL_TREE, void_type_node);
6330      DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6331      DECL_IS_MALLOC (new_decl) = false;
6332      cfun->returns_struct = 0;
6333      cfun->returns_pcc_struct = 0;
6334    }
6335  else
6336    {
6337      tree old_name;
6338      DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6339      lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6340      if (gimple_in_ssa_p (id.src_cfun)
6341	  && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6342	  && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6343	{
6344	  tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6345	  insert_decl_map (&id, old_name, new_name);
6346	  SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6347	  set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6348	}
6349    }
6350
6351  /* Set up the destination functions loop tree.  */
6352  if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6353    {
6354      cfun->curr_properties &= ~PROP_loops;
6355      loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6356      cfun->curr_properties |= PROP_loops;
6357    }
6358
6359  /* Copy the Function's body.  */
6360  copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6361	     new_entry);
6362
6363  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
6364  number_blocks (new_decl);
6365
6366  /* We want to create the BB unconditionally, so that the addition of
6367     debug stmts doesn't affect BB count, which may in the end cause
6368     codegen differences.  */
6369  bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6370  while (init_stmts.length ())
6371    insert_init_stmt (&id, bb, init_stmts.pop ());
6372  update_clone_info (&id);
6373
6374  /* Remap the nonlocal_goto_save_area, if any.  */
6375  if (cfun->nonlocal_goto_save_area)
6376    {
6377      struct walk_stmt_info wi;
6378
6379      memset (&wi, 0, sizeof (wi));
6380      wi.info = &id;
6381      walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6382    }
6383
6384  /* Clean up.  */
6385  delete id.decl_map;
6386  if (id.debug_map)
6387    delete id.debug_map;
6388  free_dominance_info (CDI_DOMINATORS);
6389  free_dominance_info (CDI_POST_DOMINATORS);
6390
6391  update_max_bb_count ();
6392  fold_marked_statements (0, id.statements_to_fold);
6393  delete id.statements_to_fold;
6394  delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6395  if (id.dst_node->definition)
6396    cgraph_edge::rebuild_references ();
6397  if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6398    {
6399      calculate_dominance_info (CDI_DOMINATORS);
6400      fix_loop_structure (NULL);
6401    }
6402  update_ssa (TODO_update_ssa);
6403
6404  /* After partial cloning we need to rescale frequencies, so they are
6405     within proper range in the cloned function.  */
6406  if (new_entry)
6407    {
6408      struct cgraph_edge *e;
6409      rebuild_frequencies ();
6410
6411      new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6412      for (e = new_version_node->callees; e; e = e->next_callee)
6413	{
6414	  basic_block bb = gimple_bb (e->call_stmt);
6415	  e->count = bb->count;
6416	}
6417      for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6418	{
6419	  basic_block bb = gimple_bb (e->call_stmt);
6420	  e->count = bb->count;
6421	}
6422    }
6423
6424  if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6425    {
6426      vec<tree, va_gc> **debug_args = NULL;
6427      unsigned int len = 0;
6428      unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6429
6430      for (i = 0; i < reset_len; i++)
6431	{
6432	  tree parm = param_body_adjs->m_reset_debug_decls[i];
6433	  gcc_assert (is_gimple_reg (parm));
6434	  tree ddecl;
6435
6436	  if (debug_args == NULL)
6437	    {
6438	      debug_args = decl_debug_args_insert (new_decl);
6439	      len = vec_safe_length (*debug_args);
6440	    }
6441	  ddecl = build_debug_expr_decl (TREE_TYPE (parm));
6442	  /* FIXME: Is setting the mode really necessary? */
6443	  SET_DECL_MODE (ddecl, DECL_MODE (parm));
6444	  vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6445	  vec_safe_push (*debug_args, ddecl);
6446	}
6447      if (debug_args != NULL)
6448	{
6449	  /* On the callee side, add
6450	     DEBUG D#Y s=> parm
6451	     DEBUG var => D#Y
6452	     stmts to the first bb where var is a VAR_DECL created for the
6453	     optimized away parameter in DECL_INITIAL block.  This hints
6454	     in the debug info that var (whole DECL_ORIGIN is the parm
6455	     PARM_DECL) is optimized away, but could be looked up at the
6456	     call site as value of D#X there.  */
6457	  gimple_stmt_iterator cgsi
6458	    = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6459	  gimple *def_temp;
6460	  tree var = vars;
6461	  i = vec_safe_length (*debug_args);
6462	  do
6463	    {
6464	      tree vexpr = NULL_TREE;
6465	      i -= 2;
6466	      while (var != NULL_TREE
6467		     && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6468		var = TREE_CHAIN (var);
6469	      if (var == NULL_TREE)
6470		break;
6471	      tree parm = (**debug_args)[i];
6472	      if (tree parm_ddef = ssa_default_def (id.src_cfun, parm))
6473		if (tree *d
6474		    = param_body_adjs->m_dead_ssa_debug_equiv.get (parm_ddef))
6475		  vexpr = *d;
6476	      if (!vexpr)
6477		{
6478		  vexpr = build_debug_expr_decl (TREE_TYPE (parm));
6479		  /* FIXME: Is setting the mode really necessary? */
6480		  SET_DECL_MODE (vexpr, DECL_MODE (parm));
6481		}
6482	      def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6483	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6484	      def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6485	      gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6486	    }
6487	  while (i > len);
6488	}
6489    }
6490  delete param_body_adjs;
6491  free_dominance_info (CDI_DOMINATORS);
6492  free_dominance_info (CDI_POST_DOMINATORS);
6493
6494  gcc_assert (!id.debug_stmts.exists ());
6495  pop_cfun ();
6496  input_location = saved_location;
6497  return;
6498}
6499
6500/* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
6501   the callee and return the inlined body on success.  */
6502
6503tree
6504maybe_inline_call_in_expr (tree exp)
6505{
6506  tree fn = get_callee_fndecl (exp);
6507
6508  /* We can only try to inline "const" functions.  */
6509  if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6510    {
6511      call_expr_arg_iterator iter;
6512      copy_body_data id;
6513      tree param, arg, t;
6514      hash_map<tree, tree> decl_map;
6515
6516      /* Remap the parameters.  */
6517      for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6518	   param;
6519	   param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6520	decl_map.put (param, arg);
6521
6522      memset (&id, 0, sizeof (id));
6523      id.src_fn = fn;
6524      id.dst_fn = current_function_decl;
6525      id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6526      id.decl_map = &decl_map;
6527
6528      id.copy_decl = copy_decl_no_change;
6529      id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6530      id.transform_new_cfg = false;
6531      id.transform_return_to_modify = true;
6532      id.transform_parameter = true;
6533
6534      /* Make sure not to unshare trees behind the front-end's back
6535	 since front-end specific mechanisms may rely on sharing.  */
6536      id.regimplify = false;
6537      id.do_not_unshare = true;
6538
6539      /* We're not inside any EH region.  */
6540      id.eh_lp_nr = 0;
6541
6542      t = copy_tree_body (&id);
6543
6544      /* We can only return something suitable for use in a GENERIC
6545	 expression tree.  */
6546      if (TREE_CODE (t) == MODIFY_EXPR)
6547	return TREE_OPERAND (t, 1);
6548    }
6549
6550   return NULL_TREE;
6551}
6552
6553/* Duplicate a type, fields and all.  */
6554
6555tree
6556build_duplicate_type (tree type)
6557{
6558  struct copy_body_data id;
6559
6560  memset (&id, 0, sizeof (id));
6561  id.src_fn = current_function_decl;
6562  id.dst_fn = current_function_decl;
6563  id.src_cfun = cfun;
6564  id.decl_map = new hash_map<tree, tree>;
6565  id.debug_map = NULL;
6566  id.copy_decl = copy_decl_no_change;
6567
6568  type = remap_type_1 (type, &id);
6569
6570  delete id.decl_map;
6571  if (id.debug_map)
6572    delete id.debug_map;
6573
6574  TYPE_CANONICAL (type) = type;
6575
6576  return type;
6577}
6578
6579/* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6580   parameters and RESULT_DECL in PARMS and RESULT.  Used by C++ constexpr
6581   evaluation.  */
6582
6583tree
6584copy_fn (tree fn, tree& parms, tree& result)
6585{
6586  copy_body_data id;
6587  tree param;
6588  hash_map<tree, tree> decl_map;
6589
6590  tree *p = &parms;
6591  *p = NULL_TREE;
6592
6593  memset (&id, 0, sizeof (id));
6594  id.src_fn = fn;
6595  id.dst_fn = current_function_decl;
6596  id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6597  id.decl_map = &decl_map;
6598
6599  id.copy_decl = copy_decl_no_change;
6600  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6601  id.transform_new_cfg = false;
6602  id.transform_return_to_modify = false;
6603  id.transform_parameter = true;
6604
6605  /* Make sure not to unshare trees behind the front-end's back
6606     since front-end specific mechanisms may rely on sharing.  */
6607  id.regimplify = false;
6608  id.do_not_unshare = true;
6609  id.do_not_fold = true;
6610
6611  /* We're not inside any EH region.  */
6612  id.eh_lp_nr = 0;
6613
6614  /* Remap the parameters and result and return them to the caller.  */
6615  for (param = DECL_ARGUMENTS (fn);
6616       param;
6617       param = DECL_CHAIN (param))
6618    {
6619      *p = remap_decl (param, &id);
6620      p = &DECL_CHAIN (*p);
6621    }
6622
6623  if (DECL_RESULT (fn))
6624    result = remap_decl (DECL_RESULT (fn), &id);
6625  else
6626    result = NULL_TREE;
6627
6628  return copy_tree_body (&id);
6629}
6630