1/* Tree inlining.
2   Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3   Free Software Foundation, Inc.
4   Contributed by Alexandre Oliva <aoliva@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 3, or (at your option)
11any later version.
12
13GCC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3.  If not see
20<http://www.gnu.org/licenses/>.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "toplev.h"
27#include "tree.h"
28#include "tree-inline.h"
29#include "rtl.h"
30#include "expr.h"
31#include "flags.h"
32#include "params.h"
33#include "input.h"
34#include "insn-config.h"
35#include "hashtab.h"
36#include "langhooks.h"
37#include "basic-block.h"
38#include "tree-iterator.h"
39#include "cgraph.h"
40#include "intl.h"
41#include "tree-mudflap.h"
42#include "tree-flow.h"
43#include "function.h"
44#include "ggc.h"
45#include "tree-flow.h"
46#include "diagnostic.h"
47#include "except.h"
48#include "debug.h"
49#include "pointer-set.h"
50#include "ipa-prop.h"
51#include "value-prof.h"
52#include "tree-pass.h"
53#include "target.h"
54#include "integrate.h"
55
56/* I'm not real happy about this, but we need to handle gimple and
57   non-gimple trees.  */
58#include "gimple.h"
59
60/* Inlining, Cloning, Versioning, Parallelization
61
62   Inlining: a function body is duplicated, but the PARM_DECLs are
63   remapped into VAR_DECLs, and non-void RETURN_EXPRs become
64   MODIFY_EXPRs that store to a dedicated returned-value variable.
65   The duplicated eh_region info of the copy will later be appended
66   to the info for the caller; the eh_region info in copied throwing
67   statements and RESX statements are adjusted accordingly.
68
69   Cloning: (only in C++) We have one body for a con/de/structor, and
70   multiple function decls, each with a unique parameter list.
71   Duplicate the body, using the given splay tree; some parameters
72   will become constants (like 0 or 1).
73
74   Versioning: a function body is duplicated and the result is a new
75   function rather than into blocks of an existing function as with
76   inlining.  Some parameters will become constants.
77
78   Parallelization: a region of a function is duplicated resulting in
79   a new function.  Variables may be replaced with complex expressions
80   to enable shared variable semantics.
81
82   All of these will simultaneously lookup any callgraph edges.  If
83   we're going to inline the duplicated function body, and the given
84   function has some cloned callgraph nodes (one for each place this
85   function will be inlined) those callgraph edges will be duplicated.
86   If we're cloning the body, those callgraph edges will be
87   updated to point into the new body.  (Note that the original
88   callgraph node and edge list will not be altered.)
89
90   See the CALL_EXPR handling case in copy_tree_body_r ().  */
91
92/* To Do:
93
94   o In order to make inlining-on-trees work, we pessimized
95     function-local static constants.  In particular, they are now
96     always output, even when not addressed.  Fix this by treating
97     function-local static constants just like global static
98     constants; the back-end already knows not to output them if they
99     are not needed.
100
101   o Provide heuristics to clamp inlining of recursive template
102     calls?  */
103
104
105/* Weights that estimate_num_insns uses for heuristics in inlining.  */
106
107eni_weights eni_inlining_weights;
108
109/* Weights that estimate_num_insns uses to estimate the size of the
110   produced code.  */
111
112eni_weights eni_size_weights;
113
114/* Weights that estimate_num_insns uses to estimate the time necessary
115   to execute the produced code.  */
116
117eni_weights eni_time_weights;
118
119/* Prototypes.  */
120
121static tree declare_return_variable (copy_body_data *, tree, tree);
122static void remap_block (tree *, copy_body_data *);
123static void copy_bind_expr (tree *, int *, copy_body_data *);
124static tree mark_local_for_remap_r (tree *, int *, void *);
125static void unsave_expr_1 (tree);
126static tree unsave_r (tree *, int *, void *);
127static void declare_inline_vars (tree, tree);
128static void remap_save_expr (tree *, void *, int *);
129static void prepend_lexical_block (tree current_block, tree new_block);
130static tree copy_decl_to_var (tree, copy_body_data *);
131static tree copy_result_decl_to_var (tree, copy_body_data *);
132static tree copy_decl_maybe_to_var (tree, copy_body_data *);
133static gimple remap_gimple_stmt (gimple, copy_body_data *);
134static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
135
136/* Insert a tree->tree mapping for ID.  Despite the name suggests
137   that the trees should be variables, it is used for more than that.  */
138
139void
140insert_decl_map (copy_body_data *id, tree key, tree value)
141{
142  *pointer_map_insert (id->decl_map, key) = value;
143
144  /* Always insert an identity map as well.  If we see this same new
145     node again, we won't want to duplicate it a second time.  */
146  if (key != value)
147    *pointer_map_insert (id->decl_map, value) = value;
148}
149
150/* Insert a tree->tree mapping for ID.  This is only used for
151   variables.  */
152
153static void
154insert_debug_decl_map (copy_body_data *id, tree key, tree value)
155{
156  if (!gimple_in_ssa_p (id->src_cfun))
157    return;
158
159  if (!MAY_HAVE_DEBUG_STMTS)
160    return;
161
162  if (!target_for_debug_bind (key))
163    return;
164
165  gcc_assert (TREE_CODE (key) == PARM_DECL);
166  gcc_assert (TREE_CODE (value) == VAR_DECL);
167
168  if (!id->debug_map)
169    id->debug_map = pointer_map_create ();
170
171  *pointer_map_insert (id->debug_map, key) = value;
172}
173
174/* If nonzero, we're remapping the contents of inlined debug
175   statements.  If negative, an error has occurred, such as a
176   reference to a variable that isn't available in the inlined
177   context.  */
178static int processing_debug_stmt = 0;
179
180/* Construct new SSA name for old NAME. ID is the inline context.  */
181
182static tree
183remap_ssa_name (tree name, copy_body_data *id)
184{
185  tree new_tree;
186  tree *n;
187
188  gcc_assert (TREE_CODE (name) == SSA_NAME);
189
190  n = (tree *) pointer_map_contains (id->decl_map, name);
191  if (n)
192    return unshare_expr (*n);
193
194  if (processing_debug_stmt)
195    {
196      processing_debug_stmt = -1;
197      return name;
198    }
199
200  /* Do not set DEF_STMT yet as statement is not copied yet. We do that
201     in copy_bb.  */
202  new_tree = remap_decl (SSA_NAME_VAR (name), id);
203
204  /* We might've substituted constant or another SSA_NAME for
205     the variable.
206
207     Replace the SSA name representing RESULT_DECL by variable during
208     inlining:  this saves us from need to introduce PHI node in a case
209     return value is just partly initialized.  */
210  if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
211      && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
212	  || !id->transform_return_to_modify))
213    {
214      new_tree = make_ssa_name (new_tree, NULL);
215      insert_decl_map (id, name, new_tree);
216      SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
217	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
218      TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
219      if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
220	{
221	  /* By inlining function having uninitialized variable, we might
222	     extend the lifetime (variable might get reused).  This cause
223	     ICE in the case we end up extending lifetime of SSA name across
224	     abnormal edge, but also increase register pressure.
225
226	     We simply initialize all uninitialized vars by 0 except
227	     for case we are inlining to very first BB.  We can avoid
228	     this for all BBs that are not inside strongly connected
229	     regions of the CFG, but this is expensive to test.  */
230	  if (id->entry_bb
231	      && is_gimple_reg (SSA_NAME_VAR (name))
232	      && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
233	      && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
234		  || EDGE_COUNT (id->entry_bb->preds) != 1))
235	    {
236	      gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
237	      gimple init_stmt;
238
239	      init_stmt = gimple_build_assign (new_tree,
240		                               fold_convert (TREE_TYPE (new_tree),
241					       		    integer_zero_node));
242	      gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
243	      SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
244	    }
245	  else
246	    {
247	      SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
248	      if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
249		  == name)
250	        set_default_def (SSA_NAME_VAR (new_tree), new_tree);
251	    }
252	}
253    }
254  else
255    insert_decl_map (id, name, new_tree);
256  return new_tree;
257}
258
259/* Remap DECL during the copying of the BLOCK tree for the function.  */
260
261tree
262remap_decl (tree decl, copy_body_data *id)
263{
264  tree *n;
265
266  /* We only remap local variables in the current function.  */
267
268  /* See if we have remapped this declaration.  */
269
270  n = (tree *) pointer_map_contains (id->decl_map, decl);
271
272  if (!n && processing_debug_stmt)
273    {
274      processing_debug_stmt = -1;
275      return decl;
276    }
277
278  /* If we didn't already have an equivalent for this declaration,
279     create one now.  */
280  if (!n)
281    {
282      /* Make a copy of the variable or label.  */
283      tree t = id->copy_decl (decl, id);
284
285      /* Remember it, so that if we encounter this local entity again
286	 we can reuse this copy.  Do this early because remap_type may
287	 need this decl for TYPE_STUB_DECL.  */
288      insert_decl_map (id, decl, t);
289
290      if (!DECL_P (t))
291	return t;
292
293      /* Remap types, if necessary.  */
294      TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
295      if (TREE_CODE (t) == TYPE_DECL)
296        DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
297
298      /* Remap sizes as necessary.  */
299      walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
300      walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
301
302      /* If fields, do likewise for offset and qualifier.  */
303      if (TREE_CODE (t) == FIELD_DECL)
304	{
305	  walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
306	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
307	    walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
308	}
309
310      if (cfun && gimple_in_ssa_p (cfun)
311	  && (TREE_CODE (t) == VAR_DECL
312	      || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
313	{
314	  get_var_ann (t);
315	  add_referenced_var (t);
316	}
317      return t;
318    }
319
320  if (id->do_not_unshare)
321    return *n;
322  else
323    return unshare_expr (*n);
324}
325
326static tree
327remap_type_1 (tree type, copy_body_data *id)
328{
329  tree new_tree, t;
330
331  /* We do need a copy.  build and register it now.  If this is a pointer or
332     reference type, remap the designated type and make a new pointer or
333     reference type.  */
334  if (TREE_CODE (type) == POINTER_TYPE)
335    {
336      new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
337					 TYPE_MODE (type),
338					 TYPE_REF_CAN_ALIAS_ALL (type));
339      if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
340	new_tree = build_type_attribute_qual_variant (new_tree,
341						      TYPE_ATTRIBUTES (type),
342						      TYPE_QUALS (type));
343      insert_decl_map (id, type, new_tree);
344      return new_tree;
345    }
346  else if (TREE_CODE (type) == REFERENCE_TYPE)
347    {
348      new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
349					    TYPE_MODE (type),
350					    TYPE_REF_CAN_ALIAS_ALL (type));
351      if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
352	new_tree = build_type_attribute_qual_variant (new_tree,
353						      TYPE_ATTRIBUTES (type),
354						      TYPE_QUALS (type));
355      insert_decl_map (id, type, new_tree);
356      return new_tree;
357    }
358  else
359    new_tree = copy_node (type);
360
361  insert_decl_map (id, type, new_tree);
362
363  /* This is a new type, not a copy of an old type.  Need to reassociate
364     variants.  We can handle everything except the main variant lazily.  */
365  t = TYPE_MAIN_VARIANT (type);
366  if (type != t)
367    {
368      t = remap_type (t, id);
369      TYPE_MAIN_VARIANT (new_tree) = t;
370      TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
371      TYPE_NEXT_VARIANT (t) = new_tree;
372    }
373  else
374    {
375      TYPE_MAIN_VARIANT (new_tree) = new_tree;
376      TYPE_NEXT_VARIANT (new_tree) = NULL;
377    }
378
379  if (TYPE_STUB_DECL (type))
380    TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
381
382  /* Lazily create pointer and reference types.  */
383  TYPE_POINTER_TO (new_tree) = NULL;
384  TYPE_REFERENCE_TO (new_tree) = NULL;
385
386  switch (TREE_CODE (new_tree))
387    {
388    case INTEGER_TYPE:
389    case REAL_TYPE:
390    case FIXED_POINT_TYPE:
391    case ENUMERAL_TYPE:
392    case BOOLEAN_TYPE:
393      t = TYPE_MIN_VALUE (new_tree);
394      if (t && TREE_CODE (t) != INTEGER_CST)
395        walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
396
397      t = TYPE_MAX_VALUE (new_tree);
398      if (t && TREE_CODE (t) != INTEGER_CST)
399        walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
400      return new_tree;
401
402    case FUNCTION_TYPE:
403      TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
404      walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
405      return new_tree;
406
407    case ARRAY_TYPE:
408      TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
409      TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
410      break;
411
412    case RECORD_TYPE:
413    case UNION_TYPE:
414    case QUAL_UNION_TYPE:
415      {
416	tree f, nf = NULL;
417
418	for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
419	  {
420	    t = remap_decl (f, id);
421	    DECL_CONTEXT (t) = new_tree;
422	    TREE_CHAIN (t) = nf;
423	    nf = t;
424	  }
425	TYPE_FIELDS (new_tree) = nreverse (nf);
426      }
427      break;
428
429    case OFFSET_TYPE:
430    default:
431      /* Shouldn't have been thought variable sized.  */
432      gcc_unreachable ();
433    }
434
435  walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
436  walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
437
438  return new_tree;
439}
440
441tree
442remap_type (tree type, copy_body_data *id)
443{
444  tree *node;
445  tree tmp;
446
447  if (type == NULL)
448    return type;
449
450  /* See if we have remapped this type.  */
451  node = (tree *) pointer_map_contains (id->decl_map, type);
452  if (node)
453    return *node;
454
455  /* The type only needs remapping if it's variably modified.  */
456  if (! variably_modified_type_p (type, id->src_fn))
457    {
458      insert_decl_map (id, type, type);
459      return type;
460    }
461
462  id->remapping_type_depth++;
463  tmp = remap_type_1 (type, id);
464  id->remapping_type_depth--;
465
466  return tmp;
467}
468
469/* Return previously remapped type of TYPE in ID.  Return NULL if TYPE
470   is NULL or TYPE has not been remapped before.  */
471
472static tree
473remapped_type (tree type, copy_body_data *id)
474{
475  tree *node;
476
477  if (type == NULL)
478    return type;
479
480  /* See if we have remapped this type.  */
481  node = (tree *) pointer_map_contains (id->decl_map, type);
482  if (node)
483    return *node;
484  else
485    return NULL;
486}
487
488  /* The type only needs remapping if it's variably modified.  */
489/* Decide if DECL can be put into BLOCK_NONLOCAL_VARs.  */
490
491static bool
492can_be_nonlocal (tree decl, copy_body_data *id)
493{
494  /* We can not duplicate function decls.  */
495  if (TREE_CODE (decl) == FUNCTION_DECL)
496    return true;
497
498  /* Local static vars must be non-local or we get multiple declaration
499     problems.  */
500  if (TREE_CODE (decl) == VAR_DECL
501      && !auto_var_in_fn_p (decl, id->src_fn))
502    return true;
503
504  /* At the moment dwarf2out can handle only these types of nodes.  We
505     can support more later.  */
506  if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
507    return false;
508
509  /* We must use global type.  We call remapped_type instead of
510     remap_type since we don't want to remap this type here if it
511     hasn't been remapped before.  */
512  if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
513    return false;
514
515  /* Wihtout SSA we can't tell if variable is used.  */
516  if (!gimple_in_ssa_p (cfun))
517    return false;
518
519  /* Live variables must be copied so we can attach DECL_RTL.  */
520  if (var_ann (decl))
521    return false;
522
523  return true;
524}
525
526static tree
527remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
528{
529  tree old_var;
530  tree new_decls = NULL_TREE;
531
532  /* Remap its variables.  */
533  for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
534    {
535      tree new_var;
536
537      if (can_be_nonlocal (old_var, id))
538	{
539	  if (TREE_CODE (old_var) == VAR_DECL
540	      && ! DECL_EXTERNAL (old_var)
541	      && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
542	    cfun->local_decls = tree_cons (NULL_TREE, old_var,
543						   cfun->local_decls);
544	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
545	      && !DECL_IGNORED_P (old_var)
546	      && nonlocalized_list)
547	    VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
548	  continue;
549	}
550
551      /* Remap the variable.  */
552      new_var = remap_decl (old_var, id);
553
554      /* If we didn't remap this variable, we can't mess with its
555	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
556	 already declared somewhere else, so don't declare it here.  */
557
558      if (new_var == id->retvar)
559	;
560      else if (!new_var)
561        {
562	  if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
563	      && !DECL_IGNORED_P (old_var)
564	      && nonlocalized_list)
565	    VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
566	}
567      else
568	{
569	  gcc_assert (DECL_P (new_var));
570	  TREE_CHAIN (new_var) = new_decls;
571	  new_decls = new_var;
572	}
573    }
574
575  return nreverse (new_decls);
576}
577
578/* Copy the BLOCK to contain remapped versions of the variables
579   therein.  And hook the new block into the block-tree.  */
580
581static void
582remap_block (tree *block, copy_body_data *id)
583{
584  tree old_block;
585  tree new_block;
586
587  /* Make the new block.  */
588  old_block = *block;
589  new_block = make_node (BLOCK);
590  TREE_USED (new_block) = TREE_USED (old_block);
591  BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
592  BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
593  BLOCK_NONLOCALIZED_VARS (new_block)
594    = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
595  *block = new_block;
596
597  /* Remap its variables.  */
598  BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
599  					&BLOCK_NONLOCALIZED_VARS (new_block),
600					id);
601
602  if (id->transform_lang_insert_block)
603    id->transform_lang_insert_block (new_block);
604
605  /* Remember the remapped block.  */
606  insert_decl_map (id, old_block, new_block);
607}
608
609/* Copy the whole block tree and root it in id->block.  */
610static tree
611remap_blocks (tree block, copy_body_data *id)
612{
613  tree t;
614  tree new_tree = block;
615
616  if (!block)
617    return NULL;
618
619  remap_block (&new_tree, id);
620  gcc_assert (new_tree != block);
621  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
622    prepend_lexical_block (new_tree, remap_blocks (t, id));
623  /* Blocks are in arbitrary order, but make things slightly prettier and do
624     not swap order when producing a copy.  */
625  BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
626  return new_tree;
627}
628
629static void
630copy_statement_list (tree *tp)
631{
632  tree_stmt_iterator oi, ni;
633  tree new_tree;
634
635  new_tree = alloc_stmt_list ();
636  ni = tsi_start (new_tree);
637  oi = tsi_start (*tp);
638  TREE_TYPE (new_tree) = TREE_TYPE (*tp);
639  *tp = new_tree;
640
641  for (; !tsi_end_p (oi); tsi_next (&oi))
642    {
643      tree stmt = tsi_stmt (oi);
644      if (TREE_CODE (stmt) == STATEMENT_LIST)
645	copy_statement_list (&stmt);
646      tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
647    }
648}
649
650static void
651copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
652{
653  tree block = BIND_EXPR_BLOCK (*tp);
654  tree t;
655  /* Copy (and replace) the statement.  */
656  copy_tree_r (tp, walk_subtrees, NULL);
657  if (block)
658    {
659      remap_block (&block, id);
660      BIND_EXPR_BLOCK (*tp) = block;
661    }
662
663  if (BIND_EXPR_VARS (*tp))
664    {
665      /* This will remap a lot of the same decls again, but this should be
666	 harmless.  */
667      BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
668
669      /* Also copy value-expressions.  */
670      for (t = BIND_EXPR_VARS (*tp); t; t = TREE_CHAIN (t))
671	if (TREE_CODE (t) == VAR_DECL
672	    && DECL_HAS_VALUE_EXPR_P (t))
673	  {
674	    tree tem = DECL_VALUE_EXPR (t);
675	    walk_tree (&tem, copy_tree_body_r, id, NULL);
676	    SET_DECL_VALUE_EXPR (t, tem);
677	  }
678    }
679}
680
681
682/* Create a new gimple_seq by remapping all the statements in BODY
683   using the inlining information in ID.  */
684
685gimple_seq
686remap_gimple_seq (gimple_seq body, copy_body_data *id)
687{
688  gimple_stmt_iterator si;
689  gimple_seq new_body = NULL;
690
691  for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
692    {
693      gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
694      gimple_seq_add_stmt (&new_body, new_stmt);
695    }
696
697  return new_body;
698}
699
700
701/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
702   block using the mapping information in ID.  */
703
704static gimple
705copy_gimple_bind (gimple stmt, copy_body_data *id)
706{
707  gimple new_bind;
708  tree new_block, new_vars;
709  gimple_seq body, new_body;
710
711  /* Copy the statement.  Note that we purposely don't use copy_stmt
712     here because we need to remap statements as we copy.  */
713  body = gimple_bind_body (stmt);
714  new_body = remap_gimple_seq (body, id);
715
716  new_block = gimple_bind_block (stmt);
717  if (new_block)
718    remap_block (&new_block, id);
719
720  /* This will remap a lot of the same decls again, but this should be
721     harmless.  */
722  new_vars = gimple_bind_vars (stmt);
723  if (new_vars)
724    new_vars = remap_decls (new_vars, NULL, id);
725
726  new_bind = gimple_build_bind (new_vars, new_body, new_block);
727
728  return new_bind;
729}
730
731
732/* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
733   'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
734   WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
735   recursing into the children nodes of *TP.  */
736
737static tree
738remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
739{
740  struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
741  copy_body_data *id = (copy_body_data *) wi_p->info;
742  tree fn = id->src_fn;
743
744  if (TREE_CODE (*tp) == SSA_NAME)
745    {
746      *tp = remap_ssa_name (*tp, id);
747      *walk_subtrees = 0;
748      return NULL;
749    }
750  else if (auto_var_in_fn_p (*tp, fn))
751    {
752      /* Local variables and labels need to be replaced by equivalent
753	 variables.  We don't want to copy static variables; there's
754	 only one of those, no matter how many times we inline the
755	 containing function.  Similarly for globals from an outer
756	 function.  */
757      tree new_decl;
758
759      /* Remap the declaration.  */
760      new_decl = remap_decl (*tp, id);
761      gcc_assert (new_decl);
762      /* Replace this variable with the copy.  */
763      STRIP_TYPE_NOPS (new_decl);
764      /* ???  The C++ frontend uses void * pointer zero to initialize
765         any other type.  This confuses the middle-end type verification.
766	 As cloned bodies do not go through gimplification again the fixup
767	 there doesn't trigger.  */
768      if (TREE_CODE (new_decl) == INTEGER_CST
769	  && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
770	new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
771      *tp = new_decl;
772      *walk_subtrees = 0;
773    }
774  else if (TREE_CODE (*tp) == STATEMENT_LIST)
775    gcc_unreachable ();
776  else if (TREE_CODE (*tp) == SAVE_EXPR)
777    gcc_unreachable ();
778  else if (TREE_CODE (*tp) == LABEL_DECL
779	   && (!DECL_CONTEXT (*tp)
780	       || decl_function_context (*tp) == id->src_fn))
781    /* These may need to be remapped for EH handling.  */
782    *tp = remap_decl (*tp, id);
783  else if (TYPE_P (*tp))
784    /* Types may need remapping as well.  */
785    *tp = remap_type (*tp, id);
786  else if (CONSTANT_CLASS_P (*tp))
787    {
788      /* If this is a constant, we have to copy the node iff the type
789	 will be remapped.  copy_tree_r will not copy a constant.  */
790      tree new_type = remap_type (TREE_TYPE (*tp), id);
791
792      if (new_type == TREE_TYPE (*tp))
793	*walk_subtrees = 0;
794
795      else if (TREE_CODE (*tp) == INTEGER_CST)
796	*tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
797				  TREE_INT_CST_HIGH (*tp));
798      else
799	{
800	  *tp = copy_node (*tp);
801	  TREE_TYPE (*tp) = new_type;
802	}
803    }
804  else
805    {
806      /* Otherwise, just copy the node.  Note that copy_tree_r already
807	 knows not to copy VAR_DECLs, etc., so this is safe.  */
808      if (TREE_CODE (*tp) == INDIRECT_REF)
809	{
810	  /* Get rid of *& from inline substitutions that can happen when a
811	     pointer argument is an ADDR_EXPR.  */
812	  tree decl = TREE_OPERAND (*tp, 0);
813	  tree *n;
814
815	  n = (tree *) pointer_map_contains (id->decl_map, decl);
816	  if (n)
817	    {
818	      tree type, new_tree, old;
819
820	      /* If we happen to get an ADDR_EXPR in n->value, strip
821		 it manually here as we'll eventually get ADDR_EXPRs
822		 which lie about their types pointed to.  In this case
823		 build_fold_indirect_ref wouldn't strip the
824		 INDIRECT_REF, but we absolutely rely on that.  As
825		 fold_indirect_ref does other useful transformations,
826		 try that first, though.  */
827	      type = TREE_TYPE (TREE_TYPE (*n));
828	      new_tree = unshare_expr (*n);
829	      old = *tp;
830	      *tp = gimple_fold_indirect_ref (new_tree);
831	      if (!*tp)
832	        {
833		  if (TREE_CODE (new_tree) == ADDR_EXPR)
834		    {
835		      *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
836						 type, new_tree);
837		      /* ???  We should either assert here or build
838			 a VIEW_CONVERT_EXPR instead of blindly leaking
839			 incompatible types to our IL.  */
840		      if (! *tp)
841			*tp = TREE_OPERAND (new_tree, 0);
842		    }
843	          else
844		    {
845	              *tp = build1 (INDIRECT_REF, type, new_tree);
846		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
847		      TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
848		    }
849		}
850	      *walk_subtrees = 0;
851	      return NULL;
852	    }
853	}
854
855      /* Here is the "usual case".  Copy this tree node, and then
856	 tweak some special cases.  */
857      copy_tree_r (tp, walk_subtrees, NULL);
858
859      /* Global variables we haven't seen yet need to go into referenced
860	 vars.  If not referenced from types only.  */
861      if (gimple_in_ssa_p (cfun)
862	  && TREE_CODE (*tp) == VAR_DECL
863	  && id->remapping_type_depth == 0
864	  && !processing_debug_stmt)
865	add_referenced_var (*tp);
866
867      /* We should never have TREE_BLOCK set on non-statements.  */
868      if (EXPR_P (*tp))
869	gcc_assert (!TREE_BLOCK (*tp));
870
871      if (TREE_CODE (*tp) != OMP_CLAUSE)
872	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
873
874      if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
875	{
876	  /* The copied TARGET_EXPR has never been expanded, even if the
877	     original node was expanded already.  */
878	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
879	  TREE_OPERAND (*tp, 3) = NULL_TREE;
880	}
881      else if (TREE_CODE (*tp) == ADDR_EXPR)
882	{
883	  /* Variable substitution need not be simple.  In particular,
884	     the INDIRECT_REF substitution above.  Make sure that
885	     TREE_CONSTANT and friends are up-to-date.  But make sure
886	     to not improperly set TREE_BLOCK on some sub-expressions.  */
887	  int invariant = is_gimple_min_invariant (*tp);
888	  tree block = id->block;
889	  id->block = NULL_TREE;
890	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
891	  id->block = block;
892
893	  /* Handle the case where we substituted an INDIRECT_REF
894	     into the operand of the ADDR_EXPR.  */
895	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
896	    *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
897	  else
898	    recompute_tree_invariant_for_addr_expr (*tp);
899
900	  /* If this used to be invariant, but is not any longer,
901	     then regimplification is probably needed.  */
902	  if (invariant && !is_gimple_min_invariant (*tp))
903	    id->regimplify = true;
904
905	  *walk_subtrees = 0;
906	}
907    }
908
909  /* Keep iterating.  */
910  return NULL_TREE;
911}
912
913
914/* Called from copy_body_id via walk_tree.  DATA is really a
915   `copy_body_data *'.  */
916
917tree
918copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
919{
920  copy_body_data *id = (copy_body_data *) data;
921  tree fn = id->src_fn;
922  tree new_block;
923
924  /* Begin by recognizing trees that we'll completely rewrite for the
925     inlining context.  Our output for these trees is completely
926     different from out input (e.g. RETURN_EXPR is deleted, and morphs
927     into an edge).  Further down, we'll handle trees that get
928     duplicated and/or tweaked.  */
929
930  /* When requested, RETURN_EXPRs should be transformed to just the
931     contained MODIFY_EXPR.  The branch semantics of the return will
932     be handled elsewhere by manipulating the CFG rather than a statement.  */
933  if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
934    {
935      tree assignment = TREE_OPERAND (*tp, 0);
936
937      /* If we're returning something, just turn that into an
938	 assignment into the equivalent of the original RESULT_DECL.
939	 If the "assignment" is just the result decl, the result
940	 decl has already been set (e.g. a recent "foo (&result_decl,
941	 ...)"); just toss the entire RETURN_EXPR.  */
942      if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
943	{
944	  /* Replace the RETURN_EXPR with (a copy of) the
945	     MODIFY_EXPR hanging underneath.  */
946	  *tp = copy_node (assignment);
947	}
948      else /* Else the RETURN_EXPR returns no value.  */
949	{
950	  *tp = NULL;
951	  return (tree) (void *)1;
952	}
953    }
954  else if (TREE_CODE (*tp) == SSA_NAME)
955    {
956      *tp = remap_ssa_name (*tp, id);
957      *walk_subtrees = 0;
958      return NULL;
959    }
960
961  /* Local variables and labels need to be replaced by equivalent
962     variables.  We don't want to copy static variables; there's only
963     one of those, no matter how many times we inline the containing
964     function.  Similarly for globals from an outer function.  */
965  else if (auto_var_in_fn_p (*tp, fn))
966    {
967      tree new_decl;
968
969      /* Remap the declaration.  */
970      new_decl = remap_decl (*tp, id);
971      gcc_assert (new_decl);
972      /* Replace this variable with the copy.  */
973      STRIP_TYPE_NOPS (new_decl);
974      *tp = new_decl;
975      *walk_subtrees = 0;
976    }
977  else if (TREE_CODE (*tp) == STATEMENT_LIST)
978    copy_statement_list (tp);
979  else if (TREE_CODE (*tp) == SAVE_EXPR
980	   || TREE_CODE (*tp) == TARGET_EXPR)
981    remap_save_expr (tp, id->decl_map, walk_subtrees);
982  else if (TREE_CODE (*tp) == LABEL_DECL
983	   && (! DECL_CONTEXT (*tp)
984	       || decl_function_context (*tp) == id->src_fn))
985    /* These may need to be remapped for EH handling.  */
986    *tp = remap_decl (*tp, id);
987  else if (TREE_CODE (*tp) == BIND_EXPR)
988    copy_bind_expr (tp, walk_subtrees, id);
989  /* Types may need remapping as well.  */
990  else if (TYPE_P (*tp))
991    *tp = remap_type (*tp, id);
992
993  /* If this is a constant, we have to copy the node iff the type will be
994     remapped.  copy_tree_r will not copy a constant.  */
995  else if (CONSTANT_CLASS_P (*tp))
996    {
997      tree new_type = remap_type (TREE_TYPE (*tp), id);
998
999      if (new_type == TREE_TYPE (*tp))
1000	*walk_subtrees = 0;
1001
1002      else if (TREE_CODE (*tp) == INTEGER_CST)
1003	*tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1004				  TREE_INT_CST_HIGH (*tp));
1005      else
1006	{
1007	  *tp = copy_node (*tp);
1008	  TREE_TYPE (*tp) = new_type;
1009	}
1010    }
1011
1012  /* Otherwise, just copy the node.  Note that copy_tree_r already
1013     knows not to copy VAR_DECLs, etc., so this is safe.  */
1014  else
1015    {
1016      /* Here we handle trees that are not completely rewritten.
1017	 First we detect some inlining-induced bogosities for
1018	 discarding.  */
1019      if (TREE_CODE (*tp) == MODIFY_EXPR
1020	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1021	  && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1022	{
1023	  /* Some assignments VAR = VAR; don't generate any rtl code
1024	     and thus don't count as variable modification.  Avoid
1025	     keeping bogosities like 0 = 0.  */
1026	  tree decl = TREE_OPERAND (*tp, 0), value;
1027	  tree *n;
1028
1029	  n = (tree *) pointer_map_contains (id->decl_map, decl);
1030	  if (n)
1031	    {
1032	      value = *n;
1033	      STRIP_TYPE_NOPS (value);
1034	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1035		{
1036		  *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1037		  return copy_tree_body_r (tp, walk_subtrees, data);
1038		}
1039	    }
1040	}
1041      else if (TREE_CODE (*tp) == INDIRECT_REF)
1042	{
1043	  /* Get rid of *& from inline substitutions that can happen when a
1044	     pointer argument is an ADDR_EXPR.  */
1045	  tree decl = TREE_OPERAND (*tp, 0);
1046	  tree *n;
1047
1048	  n = (tree *) pointer_map_contains (id->decl_map, decl);
1049	  if (n)
1050	    {
1051	      tree new_tree;
1052	      tree old;
1053	      /* If we happen to get an ADDR_EXPR in n->value, strip
1054	         it manually here as we'll eventually get ADDR_EXPRs
1055		 which lie about their types pointed to.  In this case
1056		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1057		 but we absolutely rely on that.  As fold_indirect_ref
1058	         does other useful transformations, try that first, though.  */
1059	      tree type = TREE_TYPE (TREE_TYPE (*n));
1060	      if (id->do_not_unshare)
1061		new_tree = *n;
1062	      else
1063		new_tree = unshare_expr (*n);
1064	      old = *tp;
1065	      *tp = gimple_fold_indirect_ref (new_tree);
1066	      if (! *tp)
1067	        {
1068		  if (TREE_CODE (new_tree) == ADDR_EXPR)
1069		    {
1070		      *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1071						 type, new_tree);
1072		      /* ???  We should either assert here or build
1073			 a VIEW_CONVERT_EXPR instead of blindly leaking
1074			 incompatible types to our IL.  */
1075		      if (! *tp)
1076			*tp = TREE_OPERAND (new_tree, 0);
1077		    }
1078	          else
1079		    {
1080	              *tp = build1 (INDIRECT_REF, type, new_tree);
1081		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1082		      TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1083		    }
1084		}
1085	      *walk_subtrees = 0;
1086	      return NULL;
1087	    }
1088	}
1089
1090      /* Here is the "usual case".  Copy this tree node, and then
1091	 tweak some special cases.  */
1092      copy_tree_r (tp, walk_subtrees, NULL);
1093
1094      /* Global variables we haven't seen yet needs to go into referenced
1095	 vars.  If not referenced from types or debug stmts only.  */
1096      if (gimple_in_ssa_p (cfun)
1097	  && TREE_CODE (*tp) == VAR_DECL
1098	  && id->remapping_type_depth == 0
1099	  && !processing_debug_stmt)
1100	add_referenced_var (*tp);
1101
1102      /* If EXPR has block defined, map it to newly constructed block.
1103         When inlining we want EXPRs without block appear in the block
1104	 of function call if we are not remapping a type.  */
1105      if (EXPR_P (*tp))
1106	{
1107	  new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1108	  if (TREE_BLOCK (*tp))
1109	    {
1110	      tree *n;
1111	      n = (tree *) pointer_map_contains (id->decl_map,
1112						 TREE_BLOCK (*tp));
1113	      gcc_assert (n);
1114	      new_block = *n;
1115	    }
1116	  TREE_BLOCK (*tp) = new_block;
1117	}
1118
1119      if (TREE_CODE (*tp) != OMP_CLAUSE)
1120	TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1121
1122      /* The copied TARGET_EXPR has never been expanded, even if the
1123	 original node was expanded already.  */
1124      if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1125	{
1126	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1127	  TREE_OPERAND (*tp, 3) = NULL_TREE;
1128	}
1129
1130      /* Variable substitution need not be simple.  In particular, the
1131	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
1132	 and friends are up-to-date.  */
1133      else if (TREE_CODE (*tp) == ADDR_EXPR)
1134	{
1135	  int invariant = is_gimple_min_invariant (*tp);
1136	  walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1137
1138	  /* Handle the case where we substituted an INDIRECT_REF
1139	     into the operand of the ADDR_EXPR.  */
1140	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1141	    *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1142	  else
1143	    recompute_tree_invariant_for_addr_expr (*tp);
1144
1145	  /* If this used to be invariant, but is not any longer,
1146	     then regimplification is probably needed.  */
1147	  if (invariant && !is_gimple_min_invariant (*tp))
1148	    id->regimplify = true;
1149
1150	  *walk_subtrees = 0;
1151	}
1152    }
1153
1154  /* Keep iterating.  */
1155  return NULL_TREE;
1156}
1157
1158/* Helper for remap_gimple_stmt.  Given an EH region number for the
1159   source function, map that to the duplicate EH region number in
1160   the destination function.  */
1161
1162static int
1163remap_eh_region_nr (int old_nr, copy_body_data *id)
1164{
1165  eh_region old_r, new_r;
1166  void **slot;
1167
1168  old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1169  slot = pointer_map_contains (id->eh_map, old_r);
1170  new_r = (eh_region) *slot;
1171
1172  return new_r->index;
1173}
1174
1175/* Similar, but operate on INTEGER_CSTs.  */
1176
1177static tree
1178remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1179{
1180  int old_nr, new_nr;
1181
1182  old_nr = tree_low_cst (old_t_nr, 0);
1183  new_nr = remap_eh_region_nr (old_nr, id);
1184
1185  return build_int_cst (NULL, new_nr);
1186}
1187
1188/* Helper for copy_bb.  Remap statement STMT using the inlining
1189   information in ID.  Return the new statement copy.  */
1190
1191static gimple
1192remap_gimple_stmt (gimple stmt, copy_body_data *id)
1193{
1194  gimple copy = NULL;
1195  struct walk_stmt_info wi;
1196  tree new_block;
1197  bool skip_first = false;
1198
1199  /* Begin by recognizing trees that we'll completely rewrite for the
1200     inlining context.  Our output for these trees is completely
1201     different from out input (e.g. RETURN_EXPR is deleted, and morphs
1202     into an edge).  Further down, we'll handle trees that get
1203     duplicated and/or tweaked.  */
1204
1205  /* When requested, GIMPLE_RETURNs should be transformed to just the
1206     contained GIMPLE_ASSIGN.  The branch semantics of the return will
1207     be handled elsewhere by manipulating the CFG rather than the
1208     statement.  */
1209  if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1210    {
1211      tree retval = gimple_return_retval (stmt);
1212
1213      /* If we're returning something, just turn that into an
1214	 assignment into the equivalent of the original RESULT_DECL.
1215	 If RETVAL is just the result decl, the result decl has
1216	 already been set (e.g. a recent "foo (&result_decl, ...)");
1217	 just toss the entire GIMPLE_RETURN.  */
1218      if (retval && TREE_CODE (retval) != RESULT_DECL)
1219        {
1220	  copy = gimple_build_assign (id->retvar, retval);
1221	  /* id->retvar is already substituted.  Skip it on later remapping.  */
1222	  skip_first = true;
1223	}
1224      else
1225	return gimple_build_nop ();
1226    }
1227  else if (gimple_has_substatements (stmt))
1228    {
1229      gimple_seq s1, s2;
1230
1231      /* When cloning bodies from the C++ front end, we will be handed bodies
1232	 in High GIMPLE form.  Handle here all the High GIMPLE statements that
1233	 have embedded statements.  */
1234      switch (gimple_code (stmt))
1235	{
1236	case GIMPLE_BIND:
1237	  copy = copy_gimple_bind (stmt, id);
1238	  break;
1239
1240	case GIMPLE_CATCH:
1241	  s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1242	  copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1243	  break;
1244
1245	case GIMPLE_EH_FILTER:
1246	  s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1247	  copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1248	  break;
1249
1250	case GIMPLE_TRY:
1251	  s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1252	  s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1253	  copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1254	  break;
1255
1256	case GIMPLE_WITH_CLEANUP_EXPR:
1257	  s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1258	  copy = gimple_build_wce (s1);
1259	  break;
1260
1261	case GIMPLE_OMP_PARALLEL:
1262	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1263	  copy = gimple_build_omp_parallel
1264	           (s1,
1265		    gimple_omp_parallel_clauses (stmt),
1266		    gimple_omp_parallel_child_fn (stmt),
1267		    gimple_omp_parallel_data_arg (stmt));
1268	  break;
1269
1270	case GIMPLE_OMP_TASK:
1271	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1272	  copy = gimple_build_omp_task
1273	           (s1,
1274		    gimple_omp_task_clauses (stmt),
1275		    gimple_omp_task_child_fn (stmt),
1276		    gimple_omp_task_data_arg (stmt),
1277		    gimple_omp_task_copy_fn (stmt),
1278		    gimple_omp_task_arg_size (stmt),
1279		    gimple_omp_task_arg_align (stmt));
1280	  break;
1281
1282	case GIMPLE_OMP_FOR:
1283	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1284	  s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1285	  copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1286				       gimple_omp_for_collapse (stmt), s2);
1287	  {
1288	    size_t i;
1289	    for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1290	      {
1291		gimple_omp_for_set_index (copy, i,
1292					  gimple_omp_for_index (stmt, i));
1293		gimple_omp_for_set_initial (copy, i,
1294					    gimple_omp_for_initial (stmt, i));
1295		gimple_omp_for_set_final (copy, i,
1296					  gimple_omp_for_final (stmt, i));
1297		gimple_omp_for_set_incr (copy, i,
1298					 gimple_omp_for_incr (stmt, i));
1299		gimple_omp_for_set_cond (copy, i,
1300					 gimple_omp_for_cond (stmt, i));
1301	      }
1302	  }
1303	  break;
1304
1305	case GIMPLE_OMP_MASTER:
1306	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1307	  copy = gimple_build_omp_master (s1);
1308	  break;
1309
1310	case GIMPLE_OMP_ORDERED:
1311	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1312	  copy = gimple_build_omp_ordered (s1);
1313	  break;
1314
1315	case GIMPLE_OMP_SECTION:
1316	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1317	  copy = gimple_build_omp_section (s1);
1318	  break;
1319
1320	case GIMPLE_OMP_SECTIONS:
1321	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1322	  copy = gimple_build_omp_sections
1323	           (s1, gimple_omp_sections_clauses (stmt));
1324	  break;
1325
1326	case GIMPLE_OMP_SINGLE:
1327	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1328	  copy = gimple_build_omp_single
1329	           (s1, gimple_omp_single_clauses (stmt));
1330	  break;
1331
1332	case GIMPLE_OMP_CRITICAL:
1333	  s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1334	  copy
1335	    = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1336	  break;
1337
1338	default:
1339	  gcc_unreachable ();
1340	}
1341    }
1342  else
1343    {
1344      if (gimple_assign_copy_p (stmt)
1345	  && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1346	  && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1347	{
1348	  /* Here we handle statements that are not completely rewritten.
1349	     First we detect some inlining-induced bogosities for
1350	     discarding.  */
1351
1352	  /* Some assignments VAR = VAR; don't generate any rtl code
1353	     and thus don't count as variable modification.  Avoid
1354	     keeping bogosities like 0 = 0.  */
1355	  tree decl = gimple_assign_lhs (stmt), value;
1356	  tree *n;
1357
1358	  n = (tree *) pointer_map_contains (id->decl_map, decl);
1359	  if (n)
1360	    {
1361	      value = *n;
1362	      STRIP_TYPE_NOPS (value);
1363	      if (TREE_CONSTANT (value) || TREE_READONLY (value))
1364		return gimple_build_nop ();
1365	    }
1366	}
1367
1368      if (gimple_debug_bind_p (stmt))
1369	{
1370	  copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1371					  gimple_debug_bind_get_value (stmt),
1372					  stmt);
1373	  VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1374	  return copy;
1375	}
1376
1377      /* Create a new deep copy of the statement.  */
1378      copy = gimple_copy (stmt);
1379
1380      /* Remap the region numbers for __builtin_eh_{pointer,filter},
1381	 RESX and EH_DISPATCH.  */
1382      if (id->eh_map)
1383	switch (gimple_code (copy))
1384	  {
1385	  case GIMPLE_CALL:
1386	    {
1387	      tree r, fndecl = gimple_call_fndecl (copy);
1388	      if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1389		switch (DECL_FUNCTION_CODE (fndecl))
1390		  {
1391		  case BUILT_IN_EH_COPY_VALUES:
1392		    r = gimple_call_arg (copy, 1);
1393		    r = remap_eh_region_tree_nr (r, id);
1394		    gimple_call_set_arg (copy, 1, r);
1395		    /* FALLTHRU */
1396
1397		  case BUILT_IN_EH_POINTER:
1398		  case BUILT_IN_EH_FILTER:
1399		    r = gimple_call_arg (copy, 0);
1400		    r = remap_eh_region_tree_nr (r, id);
1401		    gimple_call_set_arg (copy, 0, r);
1402		    break;
1403
1404		  default:
1405		    break;
1406		  }
1407	    }
1408	    break;
1409
1410	  case GIMPLE_RESX:
1411	    {
1412	      int r = gimple_resx_region (copy);
1413	      r = remap_eh_region_nr (r, id);
1414	      gimple_resx_set_region (copy, r);
1415	    }
1416	    break;
1417
1418	  case GIMPLE_EH_DISPATCH:
1419	    {
1420	      int r = gimple_eh_dispatch_region (copy);
1421	      r = remap_eh_region_nr (r, id);
1422	      gimple_eh_dispatch_set_region (copy, r);
1423	    }
1424	    break;
1425
1426	  default:
1427	    break;
1428	  }
1429    }
1430
1431  /* If STMT has a block defined, map it to the newly constructed
1432     block.  When inlining we want statements without a block to
1433     appear in the block of the function call.  */
1434  new_block = id->block;
1435  if (gimple_block (copy))
1436    {
1437      tree *n;
1438      n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1439      gcc_assert (n);
1440      new_block = *n;
1441    }
1442
1443  gimple_set_block (copy, new_block);
1444
1445  if (gimple_debug_bind_p (copy))
1446    return copy;
1447
1448  /* Remap all the operands in COPY.  */
1449  memset (&wi, 0, sizeof (wi));
1450  wi.info = id;
1451  if (skip_first)
1452    walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1453  else
1454    walk_gimple_op (copy, remap_gimple_op_r, &wi);
1455
1456  /* Clear the copied virtual operands.  We are not remapping them here
1457     but are going to recreate them from scratch.  */
1458  if (gimple_has_mem_ops (copy))
1459    {
1460      gimple_set_vdef (copy, NULL_TREE);
1461      gimple_set_vuse (copy, NULL_TREE);
1462    }
1463
1464  return copy;
1465}
1466
1467
1468/* Copy basic block, scale profile accordingly.  Edges will be taken care of
1469   later  */
1470
1471static basic_block
1472copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1473         gcov_type count_scale)
1474{
1475  gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1476  basic_block copy_basic_block;
1477  tree decl;
1478  gcov_type freq;
1479
1480  /* create_basic_block() will append every new block to
1481     basic_block_info automatically.  */
1482  copy_basic_block = create_basic_block (NULL, (void *) 0,
1483                                         (basic_block) bb->prev_bb->aux);
1484  copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1485
1486  /* We are going to rebuild frequencies from scratch.  These values
1487     have just small importance to drive canonicalize_loop_headers.  */
1488  freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1489
1490  /* We recompute frequencies after inlining, so this is quite safe.  */
1491  if (freq > BB_FREQ_MAX)
1492    freq = BB_FREQ_MAX;
1493  copy_basic_block->frequency = freq;
1494
1495  copy_gsi = gsi_start_bb (copy_basic_block);
1496
1497  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1498    {
1499      gimple stmt = gsi_stmt (gsi);
1500      gimple orig_stmt = stmt;
1501
1502      id->regimplify = false;
1503      stmt = remap_gimple_stmt (stmt, id);
1504      if (gimple_nop_p (stmt))
1505	continue;
1506
1507      gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1508      seq_gsi = copy_gsi;
1509
1510      /* With return slot optimization we can end up with
1511	 non-gimple (foo *)&this->m, fix that here.  */
1512      if (is_gimple_assign (stmt)
1513	  && gimple_assign_rhs_code (stmt) == NOP_EXPR
1514	  && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1515	{
1516	  tree new_rhs;
1517	  new_rhs = force_gimple_operand_gsi (&seq_gsi,
1518					      gimple_assign_rhs1 (stmt),
1519					      true, NULL, false,
1520					      GSI_CONTINUE_LINKING);
1521	  gimple_assign_set_rhs1 (stmt, new_rhs);
1522	  id->regimplify = false;
1523	}
1524
1525      gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1526
1527      if (id->regimplify)
1528	gimple_regimplify_operands (stmt, &seq_gsi);
1529
1530      /* If copy_basic_block has been empty at the start of this iteration,
1531	 call gsi_start_bb again to get at the newly added statements.  */
1532      if (gsi_end_p (copy_gsi))
1533	copy_gsi = gsi_start_bb (copy_basic_block);
1534      else
1535	gsi_next (&copy_gsi);
1536
1537      /* Process the new statement.  The call to gimple_regimplify_operands
1538	 possibly turned the statement into multiple statements, we
1539	 need to process all of them.  */
1540      do
1541	{
1542	  tree fn;
1543
1544	  stmt = gsi_stmt (copy_gsi);
1545	  if (is_gimple_call (stmt)
1546	      && gimple_call_va_arg_pack_p (stmt)
1547	      && id->gimple_call)
1548	    {
1549	      /* __builtin_va_arg_pack () should be replaced by
1550		 all arguments corresponding to ... in the caller.  */
1551	      tree p;
1552	      gimple new_call;
1553	      VEC(tree, heap) *argarray;
1554	      size_t nargs = gimple_call_num_args (id->gimple_call);
1555	      size_t n;
1556
1557	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1558		nargs--;
1559
1560	      /* Create the new array of arguments.  */
1561	      n = nargs + gimple_call_num_args (stmt);
1562	      argarray = VEC_alloc (tree, heap, n);
1563	      VEC_safe_grow (tree, heap, argarray, n);
1564
1565	      /* Copy all the arguments before '...'  */
1566	      memcpy (VEC_address (tree, argarray),
1567		      gimple_call_arg_ptr (stmt, 0),
1568		      gimple_call_num_args (stmt) * sizeof (tree));
1569
1570	      /* Append the arguments passed in '...'  */
1571	      memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1572		      gimple_call_arg_ptr (id->gimple_call, 0)
1573			+ (gimple_call_num_args (id->gimple_call) - nargs),
1574		      nargs * sizeof (tree));
1575
1576	      new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1577						argarray);
1578
1579	      VEC_free (tree, heap, argarray);
1580
1581	      /* Copy all GIMPLE_CALL flags, location and block, except
1582		 GF_CALL_VA_ARG_PACK.  */
1583	      gimple_call_copy_flags (new_call, stmt);
1584	      gimple_call_set_va_arg_pack (new_call, false);
1585	      gimple_set_location (new_call, gimple_location (stmt));
1586	      gimple_set_block (new_call, gimple_block (stmt));
1587	      gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1588
1589	      gsi_replace (&copy_gsi, new_call, false);
1590	      stmt = new_call;
1591	    }
1592	  else if (is_gimple_call (stmt)
1593		   && id->gimple_call
1594		   && (decl = gimple_call_fndecl (stmt))
1595		   && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1596		   && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1597	    {
1598	      /* __builtin_va_arg_pack_len () should be replaced by
1599		 the number of anonymous arguments.  */
1600	      size_t nargs = gimple_call_num_args (id->gimple_call);
1601	      tree count, p;
1602	      gimple new_stmt;
1603
1604	      for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1605		nargs--;
1606
1607	      count = build_int_cst (integer_type_node, nargs);
1608	      new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1609	      gsi_replace (&copy_gsi, new_stmt, false);
1610	      stmt = new_stmt;
1611	    }
1612
1613	  /* Statements produced by inlining can be unfolded, especially
1614	     when we constant propagated some operands.  We can't fold
1615	     them right now for two reasons:
1616	     1) folding require SSA_NAME_DEF_STMTs to be correct
1617	     2) we can't change function calls to builtins.
1618	     So we just mark statement for later folding.  We mark
1619	     all new statements, instead just statements that has changed
1620	     by some nontrivial substitution so even statements made
1621	     foldable indirectly are updated.  If this turns out to be
1622	     expensive, copy_body can be told to watch for nontrivial
1623	     changes.  */
1624	  if (id->statements_to_fold)
1625	    pointer_set_insert (id->statements_to_fold, stmt);
1626
1627	  /* We're duplicating a CALL_EXPR.  Find any corresponding
1628	     callgraph edges and update or duplicate them.  */
1629	  if (is_gimple_call (stmt))
1630	    {
1631	      struct cgraph_edge *edge;
1632	      int flags;
1633
1634	      switch (id->transform_call_graph_edges)
1635		{
1636		case CB_CGE_DUPLICATE:
1637		  edge = cgraph_edge (id->src_node, orig_stmt);
1638		  if (edge)
1639		    {
1640		      int edge_freq = edge->frequency;
1641		      edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1642					        gimple_uid (stmt),
1643					        REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1644					        edge->frequency, true);
1645		      /* We could also just rescale the frequency, but
1646		         doing so would introduce roundoff errors and make
1647			 verifier unhappy.  */
1648		      edge->frequency
1649		        = compute_call_stmt_bb_frequency (id->dst_node->decl,
1650							  copy_basic_block);
1651		      if (dump_file
1652		      	  && profile_status_for_function (cfun) != PROFILE_ABSENT
1653			  && (edge_freq > edge->frequency + 10
1654			      || edge_freq < edge->frequency - 10))
1655			{
1656			  fprintf (dump_file, "Edge frequency estimated by "
1657			           "cgraph %i diverge from inliner's estimate %i\n",
1658			  	   edge_freq,
1659				   edge->frequency);
1660			  fprintf (dump_file,
1661			  	   "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1662				   bb->index,
1663				   bb->frequency,
1664				   copy_basic_block->frequency);
1665			}
1666		      stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1667		    }
1668		  break;
1669
1670		case CB_CGE_MOVE_CLONES:
1671		  cgraph_set_call_stmt_including_clones (id->dst_node,
1672							 orig_stmt, stmt);
1673		  edge = cgraph_edge (id->dst_node, stmt);
1674		  break;
1675
1676		case CB_CGE_MOVE:
1677		  edge = cgraph_edge (id->dst_node, orig_stmt);
1678		  if (edge)
1679		    cgraph_set_call_stmt (edge, stmt);
1680		  break;
1681
1682		default:
1683		  gcc_unreachable ();
1684		}
1685
1686	      /* Constant propagation on argument done during inlining
1687		 may create new direct call.  Produce an edge for it.  */
1688	      if ((!edge
1689		   || (edge->indirect_call
1690		       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1691		  && is_gimple_call (stmt)
1692		  && (fn = gimple_call_fndecl (stmt)) != NULL)
1693		{
1694		  struct cgraph_node *dest = cgraph_node (fn);
1695
1696		  /* We have missing edge in the callgraph.  This can happen
1697		     when previous inlining turned an indirect call into a
1698		     direct call by constant propagating arguments or we are
1699		     producing dead clone (for further clonning).  In all
1700		     other cases we hit a bug (incorrect node sharing is the
1701		     most common reason for missing edges).  */
1702		  gcc_assert (dest->needed || !dest->analyzed
1703		  	      || !id->src_node->analyzed);
1704		  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1705		    cgraph_create_edge_including_clones
1706		      (id->dst_node, dest, orig_stmt, stmt, bb->count,
1707		       compute_call_stmt_bb_frequency (id->dst_node->decl,
1708		       				       copy_basic_block),
1709		       bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1710		  else
1711		    cgraph_create_edge (id->dst_node, dest, stmt,
1712					bb->count,
1713					compute_call_stmt_bb_frequency
1714					  (id->dst_node->decl, copy_basic_block),
1715					bb->loop_depth)->inline_failed
1716		      = CIF_ORIGINALLY_INDIRECT_CALL;
1717		  if (dump_file)
1718		    {
1719		      fprintf (dump_file, "Created new direct edge to %s",
1720			       cgraph_node_name (dest));
1721		    }
1722		}
1723
1724	      flags = gimple_call_flags (stmt);
1725	      if (flags & ECF_MAY_BE_ALLOCA)
1726		cfun->calls_alloca = true;
1727	      if (flags & ECF_RETURNS_TWICE)
1728		cfun->calls_setjmp = true;
1729	    }
1730
1731	  maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1732				      id->eh_map, id->eh_lp_nr);
1733
1734	  if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1735	    {
1736	      ssa_op_iter i;
1737	      tree def;
1738
1739	      find_new_referenced_vars (gsi_stmt (copy_gsi));
1740	      FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1741		if (TREE_CODE (def) == SSA_NAME)
1742		  SSA_NAME_DEF_STMT (def) = stmt;
1743	    }
1744
1745	  gsi_next (&copy_gsi);
1746	}
1747      while (!gsi_end_p (copy_gsi));
1748
1749      copy_gsi = gsi_last_bb (copy_basic_block);
1750    }
1751
1752  return copy_basic_block;
1753}
1754
1755/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1756   form is quite easy, since dominator relationship for old basic blocks does
1757   not change.
1758
1759   There is however exception where inlining might change dominator relation
1760   across EH edges from basic block within inlined functions destinating
1761   to landing pads in function we inline into.
1762
1763   The function fills in PHI_RESULTs of such PHI nodes if they refer
1764   to gimple regs.  Otherwise, the function mark PHI_RESULT of such
1765   PHI nodes for renaming.  For non-gimple regs, renaming is safe: the
1766   EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1767   set, and this means that there will be no overlapping live ranges
1768   for the underlying symbol.
1769
1770   This might change in future if we allow redirecting of EH edges and
1771   we might want to change way build CFG pre-inlining to include
1772   all the possible edges then.  */
1773static void
1774update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1775				  bool can_throw, bool nonlocal_goto)
1776{
1777  edge e;
1778  edge_iterator ei;
1779
1780  FOR_EACH_EDGE (e, ei, bb->succs)
1781    if (!e->dest->aux
1782	|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1783      {
1784	gimple phi;
1785	gimple_stmt_iterator si;
1786
1787	if (!nonlocal_goto)
1788	  gcc_assert (e->flags & EDGE_EH);
1789
1790	if (!can_throw)
1791	  gcc_assert (!(e->flags & EDGE_EH));
1792
1793	for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1794	  {
1795	    edge re;
1796
1797	    phi = gsi_stmt (si);
1798
1799	    /* There shouldn't be any PHI nodes in the ENTRY_BLOCK.  */
1800	    gcc_assert (!e->dest->aux);
1801
1802	    gcc_assert ((e->flags & EDGE_EH)
1803			|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1804
1805	    if (!is_gimple_reg (PHI_RESULT (phi)))
1806	      {
1807		mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1808		continue;
1809	      }
1810
1811	    re = find_edge (ret_bb, e->dest);
1812	    gcc_assert (re);
1813	    gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1814			== (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1815
1816	    SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1817		     USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1818	  }
1819      }
1820}
1821
1822
1823/* Copy edges from BB into its copy constructed earlier, scale profile
1824   accordingly.  Edges will be taken care of later.  Assume aux
1825   pointers to point to the copies of each BB.  Return true if any
1826   debug stmts are left after a statement that must end the basic block.  */
1827
1828static bool
1829copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1830{
1831  basic_block new_bb = (basic_block) bb->aux;
1832  edge_iterator ei;
1833  edge old_edge;
1834  gimple_stmt_iterator si;
1835  int flags;
1836  bool need_debug_cleanup = false;
1837
1838  /* Use the indices from the original blocks to create edges for the
1839     new ones.  */
1840  FOR_EACH_EDGE (old_edge, ei, bb->succs)
1841    if (!(old_edge->flags & EDGE_EH))
1842      {
1843	edge new_edge;
1844
1845	flags = old_edge->flags;
1846
1847	/* Return edges do get a FALLTHRU flag when the get inlined.  */
1848	if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1849	    && old_edge->dest->aux != EXIT_BLOCK_PTR)
1850	  flags |= EDGE_FALLTHRU;
1851	new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1852	new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1853	new_edge->probability = old_edge->probability;
1854      }
1855
1856  if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1857    return false;
1858
1859  for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1860    {
1861      gimple copy_stmt;
1862      bool can_throw, nonlocal_goto;
1863
1864      copy_stmt = gsi_stmt (si);
1865      if (!is_gimple_debug (copy_stmt))
1866	{
1867	  update_stmt (copy_stmt);
1868	  if (gimple_in_ssa_p (cfun))
1869	    mark_symbols_for_renaming (copy_stmt);
1870	}
1871
1872      /* Do this before the possible split_block.  */
1873      gsi_next (&si);
1874
1875      /* If this tree could throw an exception, there are two
1876         cases where we need to add abnormal edge(s): the
1877         tree wasn't in a region and there is a "current
1878         region" in the caller; or the original tree had
1879         EH edges.  In both cases split the block after the tree,
1880         and add abnormal edge(s) as needed; we need both
1881         those from the callee and the caller.
1882         We check whether the copy can throw, because the const
1883         propagation can change an INDIRECT_REF which throws
1884         into a COMPONENT_REF which doesn't.  If the copy
1885         can throw, the original could also throw.  */
1886      can_throw = stmt_can_throw_internal (copy_stmt);
1887      nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1888
1889      if (can_throw || nonlocal_goto)
1890	{
1891	  if (!gsi_end_p (si))
1892	    {
1893	      while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1894		gsi_next (&si);
1895	      if (gsi_end_p (si))
1896		need_debug_cleanup = true;
1897	    }
1898	  if (!gsi_end_p (si))
1899	    /* Note that bb's predecessor edges aren't necessarily
1900	       right at this point; split_block doesn't care.  */
1901	    {
1902	      edge e = split_block (new_bb, copy_stmt);
1903
1904	      new_bb = e->dest;
1905	      new_bb->aux = e->src->aux;
1906	      si = gsi_start_bb (new_bb);
1907	    }
1908	}
1909
1910      if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1911	make_eh_dispatch_edges (copy_stmt);
1912      else if (can_throw)
1913	make_eh_edges (copy_stmt);
1914
1915      if (nonlocal_goto)
1916	make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1917
1918      if ((can_throw || nonlocal_goto)
1919	  && gimple_in_ssa_p (cfun))
1920	update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1921					  can_throw, nonlocal_goto);
1922    }
1923  return need_debug_cleanup;
1924}
1925
1926/* Copy the PHIs.  All blocks and edges are copied, some blocks
1927   was possibly split and new outgoing EH edges inserted.
1928   BB points to the block of original function and AUX pointers links
1929   the original and newly copied blocks.  */
1930
1931static void
1932copy_phis_for_bb (basic_block bb, copy_body_data *id)
1933{
1934  basic_block const new_bb = (basic_block) bb->aux;
1935  edge_iterator ei;
1936  gimple phi;
1937  gimple_stmt_iterator si;
1938  edge new_edge;
1939  bool inserted = false;
1940
1941  for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1942    {
1943      tree res, new_res;
1944      gimple new_phi;
1945
1946      phi = gsi_stmt (si);
1947      res = PHI_RESULT (phi);
1948      new_res = res;
1949      if (is_gimple_reg (res))
1950	{
1951	  walk_tree (&new_res, copy_tree_body_r, id, NULL);
1952	  SSA_NAME_DEF_STMT (new_res)
1953	    = new_phi = create_phi_node (new_res, new_bb);
1954	  FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1955	    {
1956	      edge const old_edge
1957		= find_edge ((basic_block) new_edge->src->aux, bb);
1958	      tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1959	      tree new_arg = arg;
1960	      tree block = id->block;
1961	      id->block = NULL_TREE;
1962	      walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1963	      id->block = block;
1964	      gcc_assert (new_arg);
1965	      /* With return slot optimization we can end up with
1966	         non-gimple (foo *)&this->m, fix that here.  */
1967	      if (TREE_CODE (new_arg) != SSA_NAME
1968		  && TREE_CODE (new_arg) != FUNCTION_DECL
1969		  && !is_gimple_val (new_arg))
1970		{
1971		  gimple_seq stmts = NULL;
1972		  new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1973		  gsi_insert_seq_on_edge (new_edge, stmts);
1974		  inserted = true;
1975		}
1976	      add_phi_arg (new_phi, new_arg, new_edge,
1977			   gimple_phi_arg_location_from_edge (phi, old_edge));
1978	    }
1979	}
1980    }
1981
1982  /* Commit the delayed edge insertions.  */
1983  if (inserted)
1984    FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1985      gsi_commit_one_edge_insert (new_edge, NULL);
1986}
1987
1988
1989/* Wrapper for remap_decl so it can be used as a callback.  */
1990
1991static tree
1992remap_decl_1 (tree decl, void *data)
1993{
1994  return remap_decl (decl, (copy_body_data *) data);
1995}
1996
1997/* Build struct function and associated datastructures for the new clone
1998   NEW_FNDECL to be build.  CALLEE_FNDECL is the original */
1999
2000static void
2001initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2002{
2003  struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2004  gcov_type count_scale;
2005
2006  if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2007    count_scale = (REG_BR_PROB_BASE * count
2008		   / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2009  else
2010    count_scale = REG_BR_PROB_BASE;
2011
2012  /* Register specific tree functions.  */
2013  gimple_register_cfg_hooks ();
2014
2015  /* Get clean struct function.  */
2016  push_struct_function (new_fndecl);
2017
2018  /* We will rebuild these, so just sanity check that they are empty.  */
2019  gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2020  gcc_assert (cfun->local_decls == NULL);
2021  gcc_assert (cfun->cfg == NULL);
2022  gcc_assert (cfun->decl == new_fndecl);
2023
2024  /* Copy items we preserve during clonning.  */
2025  cfun->static_chain_decl = src_cfun->static_chain_decl;
2026  cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2027  cfun->function_end_locus = src_cfun->function_end_locus;
2028  cfun->curr_properties = src_cfun->curr_properties;
2029  cfun->last_verified = src_cfun->last_verified;
2030  cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2031  cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2032  cfun->function_frequency = src_cfun->function_frequency;
2033  cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2034  cfun->stdarg = src_cfun->stdarg;
2035  cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
2036  cfun->after_inlining = src_cfun->after_inlining;
2037  cfun->returns_struct = src_cfun->returns_struct;
2038  cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2039  cfun->after_tree_profile = src_cfun->after_tree_profile;
2040
2041  init_empty_tree_cfg ();
2042
2043  profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2044  ENTRY_BLOCK_PTR->count =
2045    (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2046     REG_BR_PROB_BASE);
2047  ENTRY_BLOCK_PTR->frequency
2048    = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2049  EXIT_BLOCK_PTR->count =
2050    (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2051     REG_BR_PROB_BASE);
2052  EXIT_BLOCK_PTR->frequency =
2053    EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2054  if (src_cfun->eh)
2055    init_eh_for_function ();
2056
2057  if (src_cfun->gimple_df)
2058    {
2059      init_tree_ssa (cfun);
2060      cfun->gimple_df->in_ssa_p = true;
2061      init_ssa_operands ();
2062    }
2063  pop_cfun ();
2064}
2065
2066/* Helper function for copy_cfg_body.  Move debug stmts from the end
2067   of NEW_BB to the beginning of successor basic blocks when needed.  If the
2068   successor has multiple predecessors, reset them, otherwise keep
2069   their value.  */
2070
2071static void
2072maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2073{
2074  edge e;
2075  edge_iterator ei;
2076  gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2077
2078  if (gsi_end_p (si)
2079      || gsi_one_before_end_p (si)
2080      || !(stmt_can_throw_internal (gsi_stmt (si))
2081	   || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2082    return;
2083
2084  FOR_EACH_EDGE (e, ei, new_bb->succs)
2085    {
2086      gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2087      gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2088      while (is_gimple_debug (gsi_stmt (ssi)))
2089	{
2090	  gimple stmt = gsi_stmt (ssi), new_stmt;
2091	  tree var;
2092	  tree value;
2093
2094	  /* For the last edge move the debug stmts instead of copying
2095	     them.  */
2096	  if (ei_one_before_end_p (ei))
2097	    {
2098	      si = ssi;
2099	      gsi_prev (&ssi);
2100	      if (!single_pred_p (e->dest))
2101		gimple_debug_bind_reset_value (stmt);
2102	      gsi_remove (&si, false);
2103	      gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2104	      continue;
2105	    }
2106
2107	  var = gimple_debug_bind_get_var (stmt);
2108	  if (single_pred_p (e->dest))
2109	    {
2110	      value = gimple_debug_bind_get_value (stmt);
2111	      value = unshare_expr (value);
2112	    }
2113	  else
2114	    value = NULL_TREE;
2115	  new_stmt = gimple_build_debug_bind (var, value, stmt);
2116	  gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2117	  VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2118	  gsi_prev (&ssi);
2119	}
2120    }
2121}
2122
2123/* Make a copy of the body of FN so that it can be inserted inline in
2124   another function.  Walks FN via CFG, returns new fndecl.  */
2125
2126static tree
2127copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2128	       basic_block entry_block_map, basic_block exit_block_map)
2129{
2130  tree callee_fndecl = id->src_fn;
2131  /* Original cfun for the callee, doesn't change.  */
2132  struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2133  struct function *cfun_to_copy;
2134  basic_block bb;
2135  tree new_fndecl = NULL;
2136  bool need_debug_cleanup = false;
2137  gcov_type count_scale;
2138  int last;
2139
2140  if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2141    count_scale = (REG_BR_PROB_BASE * count
2142		   / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2143  else
2144    count_scale = REG_BR_PROB_BASE;
2145
2146  /* Register specific tree functions.  */
2147  gimple_register_cfg_hooks ();
2148
2149  /* Must have a CFG here at this point.  */
2150  gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2151	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
2152
2153  cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2154
2155  ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2156  EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2157  entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2158  exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2159
2160  /* Duplicate any exception-handling regions.  */
2161  if (cfun->eh)
2162    id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2163				       remap_decl_1, id);
2164
2165  /* Use aux pointers to map the original blocks to copy.  */
2166  FOR_EACH_BB_FN (bb, cfun_to_copy)
2167    {
2168      basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2169      bb->aux = new_bb;
2170      new_bb->aux = bb;
2171    }
2172
2173  last = last_basic_block;
2174
2175  /* Now that we've duplicated the blocks, duplicate their edges.  */
2176  FOR_ALL_BB_FN (bb, cfun_to_copy)
2177    need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
2178
2179  if (gimple_in_ssa_p (cfun))
2180    FOR_ALL_BB_FN (bb, cfun_to_copy)
2181      copy_phis_for_bb (bb, id);
2182
2183  FOR_ALL_BB_FN (bb, cfun_to_copy)
2184    {
2185      if (need_debug_cleanup
2186	  && bb->index != ENTRY_BLOCK
2187	  && bb->index != EXIT_BLOCK)
2188	maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2189      ((basic_block)bb->aux)->aux = NULL;
2190      bb->aux = NULL;
2191    }
2192
2193  /* Zero out AUX fields of newly created block during EH edge
2194     insertion. */
2195  for (; last < last_basic_block; last++)
2196    {
2197      if (need_debug_cleanup)
2198	maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2199      BASIC_BLOCK (last)->aux = NULL;
2200    }
2201  entry_block_map->aux = NULL;
2202  exit_block_map->aux = NULL;
2203
2204  if (id->eh_map)
2205    {
2206      pointer_map_destroy (id->eh_map);
2207      id->eh_map = NULL;
2208    }
2209
2210  return new_fndecl;
2211}
2212
2213/* Copy the debug STMT using ID.  We deal with these statements in a
2214   special way: if any variable in their VALUE expression wasn't
2215   remapped yet, we won't remap it, because that would get decl uids
2216   out of sync, causing codegen differences between -g and -g0.  If
2217   this arises, we drop the VALUE expression altogether.  */
2218
2219static void
2220copy_debug_stmt (gimple stmt, copy_body_data *id)
2221{
2222  tree t, *n;
2223  struct walk_stmt_info wi;
2224
2225  t = id->block;
2226  if (gimple_block (stmt))
2227    {
2228      tree *n;
2229      n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2230      if (n)
2231	t = *n;
2232    }
2233  gimple_set_block (stmt, t);
2234
2235  /* Remap all the operands in COPY.  */
2236  memset (&wi, 0, sizeof (wi));
2237  wi.info = id;
2238
2239  processing_debug_stmt = 1;
2240
2241  t = gimple_debug_bind_get_var (stmt);
2242
2243  if (TREE_CODE (t) == PARM_DECL && id->debug_map
2244      && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2245    {
2246      gcc_assert (TREE_CODE (*n) == VAR_DECL);
2247      t = *n;
2248    }
2249  else if (TREE_CODE (t) == VAR_DECL
2250	   && !TREE_STATIC (t)
2251	   && gimple_in_ssa_p (cfun)
2252	   && !pointer_map_contains (id->decl_map, t)
2253	   && !var_ann (t))
2254    /* T is a non-localized variable.  */;
2255  else
2256    walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2257
2258  gimple_debug_bind_set_var (stmt, t);
2259
2260  if (gimple_debug_bind_has_value_p (stmt))
2261    walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2262	       remap_gimple_op_r, &wi, NULL);
2263
2264  /* Punt if any decl couldn't be remapped.  */
2265  if (processing_debug_stmt < 0)
2266    gimple_debug_bind_reset_value (stmt);
2267
2268  processing_debug_stmt = 0;
2269
2270  update_stmt (stmt);
2271  if (gimple_in_ssa_p (cfun))
2272    mark_symbols_for_renaming (stmt);
2273}
2274
2275/* Process deferred debug stmts.  In order to give values better odds
2276   of being successfully remapped, we delay the processing of debug
2277   stmts until all other stmts that might require remapping are
2278   processed.  */
2279
2280static void
2281copy_debug_stmts (copy_body_data *id)
2282{
2283  size_t i;
2284  gimple stmt;
2285
2286  if (!id->debug_stmts)
2287    return;
2288
2289  for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2290    copy_debug_stmt (stmt, id);
2291
2292  VEC_free (gimple, heap, id->debug_stmts);
2293}
2294
2295/* Make a copy of the body of SRC_FN so that it can be inserted inline in
2296   another function.  */
2297
2298static tree
2299copy_tree_body (copy_body_data *id)
2300{
2301  tree fndecl = id->src_fn;
2302  tree body = DECL_SAVED_TREE (fndecl);
2303
2304  walk_tree (&body, copy_tree_body_r, id, NULL);
2305
2306  return body;
2307}
2308
2309/* Make a copy of the body of FN so that it can be inserted inline in
2310   another function.  */
2311
2312static tree
2313copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2314	   basic_block entry_block_map, basic_block exit_block_map)
2315{
2316  tree fndecl = id->src_fn;
2317  tree body;
2318
2319  /* If this body has a CFG, walk CFG and copy.  */
2320  gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2321  body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map);
2322  copy_debug_stmts (id);
2323
2324  return body;
2325}
2326
2327/* Return true if VALUE is an ADDR_EXPR of an automatic variable
2328   defined in function FN, or of a data member thereof.  */
2329
2330static bool
2331self_inlining_addr_expr (tree value, tree fn)
2332{
2333  tree var;
2334
2335  if (TREE_CODE (value) != ADDR_EXPR)
2336    return false;
2337
2338  var = get_base_address (TREE_OPERAND (value, 0));
2339
2340  return var && auto_var_in_fn_p (var, fn);
2341}
2342
2343/* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2344   lexical block and line number information from base_stmt, if given,
2345   or from the last stmt of the block otherwise.  */
2346
2347static gimple
2348insert_init_debug_bind (copy_body_data *id,
2349			basic_block bb, tree var, tree value,
2350			gimple base_stmt)
2351{
2352  gimple note;
2353  gimple_stmt_iterator gsi;
2354  tree tracked_var;
2355
2356  if (!gimple_in_ssa_p (id->src_cfun))
2357    return NULL;
2358
2359  if (!MAY_HAVE_DEBUG_STMTS)
2360    return NULL;
2361
2362  tracked_var = target_for_debug_bind (var);
2363  if (!tracked_var)
2364    return NULL;
2365
2366  if (bb)
2367    {
2368      gsi = gsi_last_bb (bb);
2369      if (!base_stmt && !gsi_end_p (gsi))
2370	base_stmt = gsi_stmt (gsi);
2371    }
2372
2373  note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2374
2375  if (bb)
2376    {
2377      if (!gsi_end_p (gsi))
2378	gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2379      else
2380	gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2381    }
2382
2383  return note;
2384}
2385
2386static void
2387insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2388{
2389  /* If VAR represents a zero-sized variable, it's possible that the
2390     assignment statement may result in no gimple statements.  */
2391  if (init_stmt)
2392    {
2393      gimple_stmt_iterator si = gsi_last_bb (bb);
2394
2395      /* We can end up with init statements that store to a non-register
2396         from a rhs with a conversion.  Handle that here by forcing the
2397	 rhs into a temporary.  gimple_regimplify_operands is not
2398	 prepared to do this for us.  */
2399      if (!is_gimple_debug (init_stmt)
2400	  && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2401	  && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2402	  && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2403	{
2404	  tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2405			     gimple_expr_type (init_stmt),
2406			     gimple_assign_rhs1 (init_stmt));
2407	  rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2408					  GSI_NEW_STMT);
2409	  gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2410	  gimple_assign_set_rhs1 (init_stmt, rhs);
2411	}
2412      gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2413      gimple_regimplify_operands (init_stmt, &si);
2414      mark_symbols_for_renaming (init_stmt);
2415
2416      if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2417	{
2418	  tree var, def = gimple_assign_lhs (init_stmt);
2419
2420	  if (TREE_CODE (def) == SSA_NAME)
2421	    var = SSA_NAME_VAR (def);
2422	  else
2423	    var = def;
2424
2425	  insert_init_debug_bind (id, bb, var, def, init_stmt);
2426	}
2427    }
2428}
2429
2430/* Initialize parameter P with VALUE.  If needed, produce init statement
2431   at the end of BB.  When BB is NULL, we return init statement to be
2432   output later.  */
2433static gimple
2434setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2435		     basic_block bb, tree *vars)
2436{
2437  gimple init_stmt = NULL;
2438  tree var;
2439  tree rhs = value;
2440  tree def = (gimple_in_ssa_p (cfun)
2441	      ? gimple_default_def (id->src_cfun, p) : NULL);
2442
2443  if (value
2444      && value != error_mark_node
2445      && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2446    {
2447      if (fold_convertible_p (TREE_TYPE (p), value))
2448	rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2449      else
2450	/* ???  For valid (GIMPLE) programs we should not end up here.
2451	   Still if something has gone wrong and we end up with truly
2452	   mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2453	   to not leak invalid GIMPLE to the following passes.  */
2454	rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2455    }
2456
2457  /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
2458     here since the type of this decl must be visible to the calling
2459     function.  */
2460  var = copy_decl_to_var (p, id);
2461
2462  /* We're actually using the newly-created var.  */
2463  if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2464    {
2465      get_var_ann (var);
2466      add_referenced_var (var);
2467    }
2468
2469  /* Declare this new variable.  */
2470  TREE_CHAIN (var) = *vars;
2471  *vars = var;
2472
2473  /* Make gimplifier happy about this variable.  */
2474  DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2475
2476  /* If the parameter is never assigned to, has no SSA_NAMEs created,
2477     we would not need to create a new variable here at all, if it
2478     weren't for debug info.  Still, we can just use the argument
2479     value.  */
2480  if (TREE_READONLY (p)
2481      && !TREE_ADDRESSABLE (p)
2482      && value && !TREE_SIDE_EFFECTS (value)
2483      && !def)
2484    {
2485      /* We may produce non-gimple trees by adding NOPs or introduce
2486	 invalid sharing when operand is not really constant.
2487	 It is not big deal to prohibit constant propagation here as
2488	 we will constant propagate in DOM1 pass anyway.  */
2489      if (is_gimple_min_invariant (value)
2490	  && useless_type_conversion_p (TREE_TYPE (p),
2491						 TREE_TYPE (value))
2492	  /* We have to be very careful about ADDR_EXPR.  Make sure
2493	     the base variable isn't a local variable of the inlined
2494	     function, e.g., when doing recursive inlining, direct or
2495	     mutually-recursive or whatever, which is why we don't
2496	     just test whether fn == current_function_decl.  */
2497	  && ! self_inlining_addr_expr (value, fn))
2498	{
2499	  insert_decl_map (id, p, value);
2500	  insert_debug_decl_map (id, p, var);
2501	  return insert_init_debug_bind (id, bb, var, value, NULL);
2502	}
2503    }
2504
2505  /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2506     that way, when the PARM_DECL is encountered, it will be
2507     automatically replaced by the VAR_DECL.  */
2508  insert_decl_map (id, p, var);
2509
2510  /* Even if P was TREE_READONLY, the new VAR should not be.
2511     In the original code, we would have constructed a
2512     temporary, and then the function body would have never
2513     changed the value of P.  However, now, we will be
2514     constructing VAR directly.  The constructor body may
2515     change its value multiple times as it is being
2516     constructed.  Therefore, it must not be TREE_READONLY;
2517     the back-end assumes that TREE_READONLY variable is
2518     assigned to only once.  */
2519  if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2520    TREE_READONLY (var) = 0;
2521
2522  /* If there is no setup required and we are in SSA, take the easy route
2523     replacing all SSA names representing the function parameter by the
2524     SSA name passed to function.
2525
2526     We need to construct map for the variable anyway as it might be used
2527     in different SSA names when parameter is set in function.
2528
2529     Do replacement at -O0 for const arguments replaced by constant.
2530     This is important for builtin_constant_p and other construct requiring
2531     constant argument to be visible in inlined function body.  */
2532  if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2533      && (optimize
2534          || (TREE_READONLY (p)
2535	      && is_gimple_min_invariant (rhs)))
2536      && (TREE_CODE (rhs) == SSA_NAME
2537	  || is_gimple_min_invariant (rhs))
2538      && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2539    {
2540      insert_decl_map (id, def, rhs);
2541      return insert_init_debug_bind (id, bb, var, rhs, NULL);
2542    }
2543
2544  /* If the value of argument is never used, don't care about initializing
2545     it.  */
2546  if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2547    {
2548      gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2549      return insert_init_debug_bind (id, bb, var, rhs, NULL);
2550    }
2551
2552  /* Initialize this VAR_DECL from the equivalent argument.  Convert
2553     the argument to the proper type in case it was promoted.  */
2554  if (value)
2555    {
2556      if (rhs == error_mark_node)
2557	{
2558	  insert_decl_map (id, p, var);
2559	  return insert_init_debug_bind (id, bb, var, rhs, NULL);
2560	}
2561
2562      STRIP_USELESS_TYPE_CONVERSION (rhs);
2563
2564      /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2565	 keep our trees in gimple form.  */
2566      if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2567	{
2568	  def = remap_ssa_name (def, id);
2569          init_stmt = gimple_build_assign (def, rhs);
2570	  SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2571	  set_default_def (var, NULL);
2572	}
2573      else
2574        init_stmt = gimple_build_assign (var, rhs);
2575
2576      if (bb && init_stmt)
2577        insert_init_stmt (id, bb, init_stmt);
2578    }
2579  return init_stmt;
2580}
2581
2582/* Generate code to initialize the parameters of the function at the
2583   top of the stack in ID from the GIMPLE_CALL STMT.  */
2584
2585static void
2586initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2587			       tree fn, basic_block bb)
2588{
2589  tree parms;
2590  size_t i;
2591  tree p;
2592  tree vars = NULL_TREE;
2593  tree static_chain = gimple_call_chain (stmt);
2594
2595  /* Figure out what the parameters are.  */
2596  parms = DECL_ARGUMENTS (fn);
2597
2598  /* Loop through the parameter declarations, replacing each with an
2599     equivalent VAR_DECL, appropriately initialized.  */
2600  for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2601    {
2602      tree val;
2603      val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2604      setup_one_parameter (id, p, val, fn, bb, &vars);
2605    }
2606
2607  /* Initialize the static chain.  */
2608  p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2609  gcc_assert (fn != current_function_decl);
2610  if (p)
2611    {
2612      /* No static chain?  Seems like a bug in tree-nested.c.  */
2613      gcc_assert (static_chain);
2614
2615      setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2616    }
2617
2618  declare_inline_vars (id->block, vars);
2619}
2620
2621
2622/* Declare a return variable to replace the RESULT_DECL for the
2623   function we are calling.  An appropriate DECL_STMT is returned.
2624   The USE_STMT is filled to contain a use of the declaration to
2625   indicate the return value of the function.
2626
2627   RETURN_SLOT, if non-null is place where to store the result.  It
2628   is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
2629   was the LHS of the MODIFY_EXPR to which this call is the RHS.
2630
2631   The return value is a (possibly null) value that holds the result
2632   as seen by the caller.  */
2633
2634static tree
2635declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest)
2636{
2637  tree callee = id->src_fn;
2638  tree caller = id->dst_fn;
2639  tree result = DECL_RESULT (callee);
2640  tree callee_type = TREE_TYPE (result);
2641  tree caller_type;
2642  tree var, use;
2643
2644  /* Handle type-mismatches in the function declaration return type
2645     vs. the call expression.  */
2646  if (modify_dest)
2647    caller_type = TREE_TYPE (modify_dest);
2648  else
2649    caller_type = TREE_TYPE (TREE_TYPE (callee));
2650
2651  /* We don't need to do anything for functions that don't return
2652     anything.  */
2653  if (!result || VOID_TYPE_P (callee_type))
2654    return NULL_TREE;
2655
2656  /* If there was a return slot, then the return value is the
2657     dereferenced address of that object.  */
2658  if (return_slot)
2659    {
2660      /* The front end shouldn't have used both return_slot and
2661	 a modify expression.  */
2662      gcc_assert (!modify_dest);
2663      if (DECL_BY_REFERENCE (result))
2664	{
2665	  tree return_slot_addr = build_fold_addr_expr (return_slot);
2666	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2667
2668	  /* We are going to construct *&return_slot and we can't do that
2669	     for variables believed to be not addressable.
2670
2671	     FIXME: This check possibly can match, because values returned
2672	     via return slot optimization are not believed to have address
2673	     taken by alias analysis.  */
2674	  gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2675	  if (gimple_in_ssa_p (cfun))
2676	    {
2677	      HOST_WIDE_INT bitsize;
2678	      HOST_WIDE_INT bitpos;
2679	      tree offset;
2680	      enum machine_mode mode;
2681	      int unsignedp;
2682	      int volatilep;
2683	      tree base;
2684	      base = get_inner_reference (return_slot, &bitsize, &bitpos,
2685					  &offset,
2686					  &mode, &unsignedp, &volatilep,
2687					  false);
2688	      if (TREE_CODE (base) == INDIRECT_REF)
2689		base = TREE_OPERAND (base, 0);
2690	      if (TREE_CODE (base) == SSA_NAME)
2691		base = SSA_NAME_VAR (base);
2692	      mark_sym_for_renaming (base);
2693	    }
2694	  var = return_slot_addr;
2695	}
2696      else
2697	{
2698	  var = return_slot;
2699	  gcc_assert (TREE_CODE (var) != SSA_NAME);
2700	  TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2701	}
2702      if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2703           || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2704	  && !DECL_GIMPLE_REG_P (result)
2705	  && DECL_P (var))
2706	DECL_GIMPLE_REG_P (var) = 0;
2707      use = NULL;
2708      goto done;
2709    }
2710
2711  /* All types requiring non-trivial constructors should have been handled.  */
2712  gcc_assert (!TREE_ADDRESSABLE (callee_type));
2713
2714  /* Attempt to avoid creating a new temporary variable.  */
2715  if (modify_dest
2716      && TREE_CODE (modify_dest) != SSA_NAME)
2717    {
2718      bool use_it = false;
2719
2720      /* We can't use MODIFY_DEST if there's type promotion involved.  */
2721      if (!useless_type_conversion_p (callee_type, caller_type))
2722	use_it = false;
2723
2724      /* ??? If we're assigning to a variable sized type, then we must
2725	 reuse the destination variable, because we've no good way to
2726	 create variable sized temporaries at this point.  */
2727      else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2728	use_it = true;
2729
2730      /* If the callee cannot possibly modify MODIFY_DEST, then we can
2731	 reuse it as the result of the call directly.  Don't do this if
2732	 it would promote MODIFY_DEST to addressable.  */
2733      else if (TREE_ADDRESSABLE (result))
2734	use_it = false;
2735      else
2736	{
2737	  tree base_m = get_base_address (modify_dest);
2738
2739	  /* If the base isn't a decl, then it's a pointer, and we don't
2740	     know where that's going to go.  */
2741	  if (!DECL_P (base_m))
2742	    use_it = false;
2743	  else if (is_global_var (base_m))
2744	    use_it = false;
2745	  else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2746		    || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2747		   && !DECL_GIMPLE_REG_P (result)
2748		   && DECL_GIMPLE_REG_P (base_m))
2749	    use_it = false;
2750	  else if (!TREE_ADDRESSABLE (base_m))
2751	    use_it = true;
2752	}
2753
2754      if (use_it)
2755	{
2756	  var = modify_dest;
2757	  use = NULL;
2758	  goto done;
2759	}
2760    }
2761
2762  gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2763
2764  var = copy_result_decl_to_var (result, id);
2765  if (gimple_in_ssa_p (cfun))
2766    {
2767      get_var_ann (var);
2768      add_referenced_var (var);
2769    }
2770
2771  DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2772  DECL_STRUCT_FUNCTION (caller)->local_decls
2773    = tree_cons (NULL_TREE, var,
2774		 DECL_STRUCT_FUNCTION (caller)->local_decls);
2775
2776  /* Do not have the rest of GCC warn about this variable as it should
2777     not be visible to the user.  */
2778  TREE_NO_WARNING (var) = 1;
2779
2780  declare_inline_vars (id->block, var);
2781
2782  /* Build the use expr.  If the return type of the function was
2783     promoted, convert it back to the expected type.  */
2784  use = var;
2785  if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2786    use = fold_convert (caller_type, var);
2787
2788  STRIP_USELESS_TYPE_CONVERSION (use);
2789
2790  if (DECL_BY_REFERENCE (result))
2791    {
2792      TREE_ADDRESSABLE (var) = 1;
2793      var = build_fold_addr_expr (var);
2794    }
2795
2796 done:
2797  /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2798     way, when the RESULT_DECL is encountered, it will be
2799     automatically replaced by the VAR_DECL.  */
2800  insert_decl_map (id, result, var);
2801
2802  /* Remember this so we can ignore it in remap_decls.  */
2803  id->retvar = var;
2804
2805  return use;
2806}
2807
2808/* Callback through walk_tree.  Determine if a DECL_INITIAL makes reference
2809   to a local label.  */
2810
2811static tree
2812has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2813{
2814  tree node = *nodep;
2815  tree fn = (tree) fnp;
2816
2817  if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2818    return node;
2819
2820  if (TYPE_P (node))
2821    *walk_subtrees = 0;
2822
2823  return NULL_TREE;
2824}
2825
2826/* Determine if the function can be copied.  If so return NULL.  If
2827   not return a string describng the reason for failure.  */
2828
2829static const char *
2830copy_forbidden (struct function *fun, tree fndecl)
2831{
2832  const char *reason = fun->cannot_be_copied_reason;
2833  tree step;
2834
2835  /* Only examine the function once.  */
2836  if (fun->cannot_be_copied_set)
2837    return reason;
2838
2839  /* We cannot copy a function that receives a non-local goto
2840     because we cannot remap the destination label used in the
2841     function that is performing the non-local goto.  */
2842  /* ??? Actually, this should be possible, if we work at it.
2843     No doubt there's just a handful of places that simply
2844     assume it doesn't happen and don't substitute properly.  */
2845  if (fun->has_nonlocal_label)
2846    {
2847      reason = G_("function %q+F can never be copied "
2848		  "because it receives a non-local goto");
2849      goto fail;
2850    }
2851
2852  for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2853    {
2854      tree decl = TREE_VALUE (step);
2855
2856      if (TREE_CODE (decl) == VAR_DECL
2857	  && TREE_STATIC (decl)
2858	  && !DECL_EXTERNAL (decl)
2859	  && DECL_INITIAL (decl)
2860	  && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2861					   has_label_address_in_static_1,
2862					   fndecl))
2863	{
2864	  reason = G_("function %q+F can never be copied because it saves "
2865		      "address of local label in a static variable");
2866	  goto fail;
2867	}
2868    }
2869
2870 fail:
2871  fun->cannot_be_copied_reason = reason;
2872  fun->cannot_be_copied_set = true;
2873  return reason;
2874}
2875
2876
2877static const char *inline_forbidden_reason;
2878
2879/* A callback for walk_gimple_seq to handle statements.  Returns non-null
2880   iff a function can not be inlined.  Also sets the reason why. */
2881
2882static tree
2883inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2884			 struct walk_stmt_info *wip)
2885{
2886  tree fn = (tree) wip->info;
2887  tree t;
2888  gimple stmt = gsi_stmt (*gsi);
2889
2890  switch (gimple_code (stmt))
2891    {
2892    case GIMPLE_CALL:
2893      /* Refuse to inline alloca call unless user explicitly forced so as
2894	 this may change program's memory overhead drastically when the
2895	 function using alloca is called in loop.  In GCC present in
2896	 SPEC2000 inlining into schedule_block cause it to require 2GB of
2897	 RAM instead of 256MB.  */
2898      if (gimple_alloca_call_p (stmt)
2899	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2900	{
2901	  inline_forbidden_reason
2902	    = G_("function %q+F can never be inlined because it uses "
2903		 "alloca (override using the always_inline attribute)");
2904	  *handled_ops_p = true;
2905	  return fn;
2906	}
2907
2908      t = gimple_call_fndecl (stmt);
2909      if (t == NULL_TREE)
2910	break;
2911
2912      /* We cannot inline functions that call setjmp.  */
2913      if (setjmp_call_p (t))
2914	{
2915	  inline_forbidden_reason
2916	    = G_("function %q+F can never be inlined because it uses setjmp");
2917	  *handled_ops_p = true;
2918	  return t;
2919	}
2920
2921      if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2922	switch (DECL_FUNCTION_CODE (t))
2923	  {
2924	    /* We cannot inline functions that take a variable number of
2925	       arguments.  */
2926	  case BUILT_IN_VA_START:
2927	  case BUILT_IN_NEXT_ARG:
2928	  case BUILT_IN_VA_END:
2929	    inline_forbidden_reason
2930	      = G_("function %q+F can never be inlined because it "
2931		   "uses variable argument lists");
2932	    *handled_ops_p = true;
2933	    return t;
2934
2935	  case BUILT_IN_LONGJMP:
2936	    /* We can't inline functions that call __builtin_longjmp at
2937	       all.  The non-local goto machinery really requires the
2938	       destination be in a different function.  If we allow the
2939	       function calling __builtin_longjmp to be inlined into the
2940	       function calling __builtin_setjmp, Things will Go Awry.  */
2941	    inline_forbidden_reason
2942	      = G_("function %q+F can never be inlined because "
2943		   "it uses setjmp-longjmp exception handling");
2944	    *handled_ops_p = true;
2945	    return t;
2946
2947	  case BUILT_IN_NONLOCAL_GOTO:
2948	    /* Similarly.  */
2949	    inline_forbidden_reason
2950	      = G_("function %q+F can never be inlined because "
2951		   "it uses non-local goto");
2952	    *handled_ops_p = true;
2953	    return t;
2954
2955	  case BUILT_IN_RETURN:
2956	  case BUILT_IN_APPLY_ARGS:
2957	    /* If a __builtin_apply_args caller would be inlined,
2958	       it would be saving arguments of the function it has
2959	       been inlined into.  Similarly __builtin_return would
2960	       return from the function the inline has been inlined into.  */
2961	    inline_forbidden_reason
2962	      = G_("function %q+F can never be inlined because "
2963		   "it uses __builtin_return or __builtin_apply_args");
2964	    *handled_ops_p = true;
2965	    return t;
2966
2967	  default:
2968	    break;
2969	  }
2970      break;
2971
2972    case GIMPLE_GOTO:
2973      t = gimple_goto_dest (stmt);
2974
2975      /* We will not inline a function which uses computed goto.  The
2976	 addresses of its local labels, which may be tucked into
2977	 global storage, are of course not constant across
2978	 instantiations, which causes unexpected behavior.  */
2979      if (TREE_CODE (t) != LABEL_DECL)
2980	{
2981	  inline_forbidden_reason
2982	    = G_("function %q+F can never be inlined "
2983		 "because it contains a computed goto");
2984	  *handled_ops_p = true;
2985	  return t;
2986	}
2987      break;
2988
2989    default:
2990      break;
2991    }
2992
2993  *handled_ops_p = false;
2994  return NULL_TREE;
2995}
2996
2997/* Return true if FNDECL is a function that cannot be inlined into
2998   another one.  */
2999
3000static bool
3001inline_forbidden_p (tree fndecl)
3002{
3003  struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3004  struct walk_stmt_info wi;
3005  struct pointer_set_t *visited_nodes;
3006  basic_block bb;
3007  bool forbidden_p = false;
3008
3009  /* First check for shared reasons not to copy the code.  */
3010  inline_forbidden_reason = copy_forbidden (fun, fndecl);
3011  if (inline_forbidden_reason != NULL)
3012    return true;
3013
3014  /* Next, walk the statements of the function looking for
3015     constraucts we can't handle, or are non-optimal for inlining.  */
3016  visited_nodes = pointer_set_create ();
3017  memset (&wi, 0, sizeof (wi));
3018  wi.info = (void *) fndecl;
3019  wi.pset = visited_nodes;
3020
3021  FOR_EACH_BB_FN (bb, fun)
3022    {
3023      gimple ret;
3024      gimple_seq seq = bb_seq (bb);
3025      ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3026      forbidden_p = (ret != NULL);
3027      if (forbidden_p)
3028	break;
3029    }
3030
3031  pointer_set_destroy (visited_nodes);
3032  return forbidden_p;
3033}
3034
3035/* Returns nonzero if FN is a function that does not have any
3036   fundamental inline blocking properties.  */
3037
3038bool
3039tree_inlinable_function_p (tree fn)
3040{
3041  bool inlinable = true;
3042  bool do_warning;
3043  tree always_inline;
3044
3045  /* If we've already decided this function shouldn't be inlined,
3046     there's no need to check again.  */
3047  if (DECL_UNINLINABLE (fn))
3048    return false;
3049
3050  /* We only warn for functions declared `inline' by the user.  */
3051  do_warning = (warn_inline
3052		&& DECL_DECLARED_INLINE_P (fn)
3053		&& !DECL_NO_INLINE_WARNING_P (fn)
3054		&& !DECL_IN_SYSTEM_HEADER (fn));
3055
3056  always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3057
3058  if (flag_no_inline
3059      && always_inline == NULL)
3060    {
3061      if (do_warning)
3062        warning (OPT_Winline, "function %q+F can never be inlined because it "
3063                 "is suppressed using -fno-inline", fn);
3064      inlinable = false;
3065    }
3066
3067  /* Don't auto-inline anything that might not be bound within
3068     this unit of translation.  */
3069  else if (!DECL_DECLARED_INLINE_P (fn)
3070	   && decl_replaceable_p (fn))
3071    inlinable = false;
3072
3073  else if (!function_attribute_inlinable_p (fn))
3074    {
3075      if (do_warning)
3076        warning (OPT_Winline, "function %q+F can never be inlined because it "
3077                 "uses attributes conflicting with inlining", fn);
3078      inlinable = false;
3079    }
3080
3081  else if (inline_forbidden_p (fn))
3082    {
3083      /* See if we should warn about uninlinable functions.  Previously,
3084	 some of these warnings would be issued while trying to expand
3085	 the function inline, but that would cause multiple warnings
3086	 about functions that would for example call alloca.  But since
3087	 this a property of the function, just one warning is enough.
3088	 As a bonus we can now give more details about the reason why a
3089	 function is not inlinable.  */
3090      if (always_inline)
3091	sorry (inline_forbidden_reason, fn);
3092      else if (do_warning)
3093	warning (OPT_Winline, inline_forbidden_reason, fn);
3094
3095      inlinable = false;
3096    }
3097
3098  /* Squirrel away the result so that we don't have to check again.  */
3099  DECL_UNINLINABLE (fn) = !inlinable;
3100
3101  return inlinable;
3102}
3103
3104/* Estimate the cost of a memory move.  Use machine dependent
3105   word size and take possible memcpy call into account.  */
3106
3107int
3108estimate_move_cost (tree type)
3109{
3110  HOST_WIDE_INT size;
3111
3112  gcc_assert (!VOID_TYPE_P (type));
3113
3114  size = int_size_in_bytes (type);
3115
3116  if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3117    /* Cost of a memcpy call, 3 arguments and the call.  */
3118    return 4;
3119  else
3120    return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3121}
3122
3123/* Returns cost of operation CODE, according to WEIGHTS  */
3124
3125static int
3126estimate_operator_cost (enum tree_code code, eni_weights *weights,
3127			tree op1 ATTRIBUTE_UNUSED, tree op2)
3128{
3129  switch (code)
3130    {
3131    /* These are "free" conversions, or their presumed cost
3132       is folded into other operations.  */
3133    case RANGE_EXPR:
3134    CASE_CONVERT:
3135    case COMPLEX_EXPR:
3136    case PAREN_EXPR:
3137      return 0;
3138
3139    /* Assign cost of 1 to usual operations.
3140       ??? We may consider mapping RTL costs to this.  */
3141    case COND_EXPR:
3142    case VEC_COND_EXPR:
3143
3144    case PLUS_EXPR:
3145    case POINTER_PLUS_EXPR:
3146    case MINUS_EXPR:
3147    case MULT_EXPR:
3148
3149    case ADDR_SPACE_CONVERT_EXPR:
3150    case FIXED_CONVERT_EXPR:
3151    case FIX_TRUNC_EXPR:
3152
3153    case NEGATE_EXPR:
3154    case FLOAT_EXPR:
3155    case MIN_EXPR:
3156    case MAX_EXPR:
3157    case ABS_EXPR:
3158
3159    case LSHIFT_EXPR:
3160    case RSHIFT_EXPR:
3161    case LROTATE_EXPR:
3162    case RROTATE_EXPR:
3163    case VEC_LSHIFT_EXPR:
3164    case VEC_RSHIFT_EXPR:
3165
3166    case BIT_IOR_EXPR:
3167    case BIT_XOR_EXPR:
3168    case BIT_AND_EXPR:
3169    case BIT_NOT_EXPR:
3170
3171    case TRUTH_ANDIF_EXPR:
3172    case TRUTH_ORIF_EXPR:
3173    case TRUTH_AND_EXPR:
3174    case TRUTH_OR_EXPR:
3175    case TRUTH_XOR_EXPR:
3176    case TRUTH_NOT_EXPR:
3177
3178    case LT_EXPR:
3179    case LE_EXPR:
3180    case GT_EXPR:
3181    case GE_EXPR:
3182    case EQ_EXPR:
3183    case NE_EXPR:
3184    case ORDERED_EXPR:
3185    case UNORDERED_EXPR:
3186
3187    case UNLT_EXPR:
3188    case UNLE_EXPR:
3189    case UNGT_EXPR:
3190    case UNGE_EXPR:
3191    case UNEQ_EXPR:
3192    case LTGT_EXPR:
3193
3194    case CONJ_EXPR:
3195
3196    case PREDECREMENT_EXPR:
3197    case PREINCREMENT_EXPR:
3198    case POSTDECREMENT_EXPR:
3199    case POSTINCREMENT_EXPR:
3200
3201    case REALIGN_LOAD_EXPR:
3202
3203    case REDUC_MAX_EXPR:
3204    case REDUC_MIN_EXPR:
3205    case REDUC_PLUS_EXPR:
3206    case WIDEN_SUM_EXPR:
3207    case WIDEN_MULT_EXPR:
3208    case DOT_PROD_EXPR:
3209
3210    case VEC_WIDEN_MULT_HI_EXPR:
3211    case VEC_WIDEN_MULT_LO_EXPR:
3212    case VEC_UNPACK_HI_EXPR:
3213    case VEC_UNPACK_LO_EXPR:
3214    case VEC_UNPACK_FLOAT_HI_EXPR:
3215    case VEC_UNPACK_FLOAT_LO_EXPR:
3216    case VEC_PACK_TRUNC_EXPR:
3217    case VEC_PACK_SAT_EXPR:
3218    case VEC_PACK_FIX_TRUNC_EXPR:
3219    case VEC_EXTRACT_EVEN_EXPR:
3220    case VEC_EXTRACT_ODD_EXPR:
3221    case VEC_INTERLEAVE_HIGH_EXPR:
3222    case VEC_INTERLEAVE_LOW_EXPR:
3223
3224      return 1;
3225
3226    /* Few special cases of expensive operations.  This is useful
3227       to avoid inlining on functions having too many of these.  */
3228    case TRUNC_DIV_EXPR:
3229    case CEIL_DIV_EXPR:
3230    case FLOOR_DIV_EXPR:
3231    case ROUND_DIV_EXPR:
3232    case EXACT_DIV_EXPR:
3233    case TRUNC_MOD_EXPR:
3234    case CEIL_MOD_EXPR:
3235    case FLOOR_MOD_EXPR:
3236    case ROUND_MOD_EXPR:
3237    case RDIV_EXPR:
3238      if (TREE_CODE (op2) != INTEGER_CST)
3239        return weights->div_mod_cost;
3240      return 1;
3241
3242    default:
3243      /* We expect a copy assignment with no operator.  */
3244      gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3245      return 0;
3246    }
3247}
3248
3249
3250/* Estimate number of instructions that will be created by expanding
3251   the statements in the statement sequence STMTS.
3252   WEIGHTS contains weights attributed to various constructs.  */
3253
3254static
3255int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3256{
3257  int cost;
3258  gimple_stmt_iterator gsi;
3259
3260  cost = 0;
3261  for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3262    cost += estimate_num_insns (gsi_stmt (gsi), weights);
3263
3264  return cost;
3265}
3266
3267
3268/* Estimate number of instructions that will be created by expanding STMT.
3269   WEIGHTS contains weights attributed to various constructs.  */
3270
3271int
3272estimate_num_insns (gimple stmt, eni_weights *weights)
3273{
3274  unsigned cost, i;
3275  enum gimple_code code = gimple_code (stmt);
3276  tree lhs;
3277  tree rhs;
3278
3279  switch (code)
3280    {
3281    case GIMPLE_ASSIGN:
3282      /* Try to estimate the cost of assignments.  We have three cases to
3283	 deal with:
3284	 1) Simple assignments to registers;
3285	 2) Stores to things that must live in memory.  This includes
3286	    "normal" stores to scalars, but also assignments of large
3287	    structures, or constructors of big arrays;
3288
3289	 Let us look at the first two cases, assuming we have "a = b + C":
3290	 <GIMPLE_ASSIGN <var_decl "a">
3291	        <plus_expr <var_decl "b"> <constant C>>
3292	 If "a" is a GIMPLE register, the assignment to it is free on almost
3293	 any target, because "a" usually ends up in a real register.  Hence
3294	 the only cost of this expression comes from the PLUS_EXPR, and we
3295	 can ignore the GIMPLE_ASSIGN.
3296	 If "a" is not a GIMPLE register, the assignment to "a" will most
3297	 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3298	 of moving something into "a", which we compute using the function
3299	 estimate_move_cost.  */
3300      lhs = gimple_assign_lhs (stmt);
3301      rhs = gimple_assign_rhs1 (stmt);
3302
3303      if (is_gimple_reg (lhs))
3304	cost = 0;
3305      else
3306	cost = estimate_move_cost (TREE_TYPE (lhs));
3307
3308      if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3309	cost += estimate_move_cost (TREE_TYPE (rhs));
3310
3311      cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3312      				      gimple_assign_rhs1 (stmt),
3313				      get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3314				      == GIMPLE_BINARY_RHS
3315				      ? gimple_assign_rhs2 (stmt) : NULL);
3316      break;
3317
3318    case GIMPLE_COND:
3319      cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3320      				         gimple_op (stmt, 0),
3321				         gimple_op (stmt, 1));
3322      break;
3323
3324    case GIMPLE_SWITCH:
3325      /* Take into account cost of the switch + guess 2 conditional jumps for
3326         each case label.
3327
3328	 TODO: once the switch expansion logic is sufficiently separated, we can
3329	 do better job on estimating cost of the switch.  */
3330      if (weights->time_based)
3331        cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3332      else
3333        cost = gimple_switch_num_labels (stmt) * 2;
3334      break;
3335
3336    case GIMPLE_CALL:
3337      {
3338	tree decl = gimple_call_fndecl (stmt);
3339	tree addr = gimple_call_fn (stmt);
3340	tree funtype = TREE_TYPE (addr);
3341
3342	if (POINTER_TYPE_P (funtype))
3343	  funtype = TREE_TYPE (funtype);
3344
3345	if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3346	  cost = weights->target_builtin_call_cost;
3347	else
3348	  cost = weights->call_cost;
3349
3350	if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3351	  switch (DECL_FUNCTION_CODE (decl))
3352	    {
3353	    case BUILT_IN_CONSTANT_P:
3354	      return 0;
3355	    case BUILT_IN_EXPECT:
3356	      return 0;
3357
3358	    /* Prefetch instruction is not expensive.  */
3359	    case BUILT_IN_PREFETCH:
3360	      cost = weights->target_builtin_call_cost;
3361	      break;
3362
3363	    /* Exception state returns or moves registers around.  */
3364	    case BUILT_IN_EH_FILTER:
3365	    case BUILT_IN_EH_POINTER:
3366	    case BUILT_IN_EH_COPY_VALUES:
3367	      return 0;
3368
3369	    default:
3370	      break;
3371	    }
3372
3373	if (decl)
3374	  funtype = TREE_TYPE (decl);
3375
3376	if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3377	  cost += estimate_move_cost (TREE_TYPE (funtype));
3378	/* Our cost must be kept in sync with
3379	   cgraph_estimate_size_after_inlining that does use function
3380	   declaration to figure out the arguments.  */
3381	if (decl && DECL_ARGUMENTS (decl))
3382	  {
3383	    tree arg;
3384	    for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3385	      if (!VOID_TYPE_P (TREE_TYPE (arg)))
3386	        cost += estimate_move_cost (TREE_TYPE (arg));
3387	  }
3388	else if (funtype && prototype_p (funtype))
3389	  {
3390	    tree t;
3391	    for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3392	    	 t = TREE_CHAIN (t))
3393	      if (!VOID_TYPE_P (TREE_VALUE (t)))
3394	        cost += estimate_move_cost (TREE_VALUE (t));
3395	  }
3396	else
3397	  {
3398	    for (i = 0; i < gimple_call_num_args (stmt); i++)
3399	      {
3400		tree arg = gimple_call_arg (stmt, i);
3401	        if (!VOID_TYPE_P (TREE_TYPE (arg)))
3402		  cost += estimate_move_cost (TREE_TYPE (arg));
3403	      }
3404	  }
3405
3406	break;
3407      }
3408
3409    case GIMPLE_GOTO:
3410    case GIMPLE_LABEL:
3411    case GIMPLE_NOP:
3412    case GIMPLE_PHI:
3413    case GIMPLE_RETURN:
3414    case GIMPLE_PREDICT:
3415    case GIMPLE_DEBUG:
3416      return 0;
3417
3418    case GIMPLE_ASM:
3419      return asm_str_count (gimple_asm_string (stmt));
3420
3421    case GIMPLE_RESX:
3422      /* This is either going to be an external function call with one
3423	 argument, or two register copy statements plus a goto.  */
3424      return 2;
3425
3426    case GIMPLE_EH_DISPATCH:
3427      /* ??? This is going to turn into a switch statement.  Ideally
3428	 we'd have a look at the eh region and estimate the number of
3429	 edges involved.  */
3430      return 10;
3431
3432    case GIMPLE_BIND:
3433      return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3434
3435    case GIMPLE_EH_FILTER:
3436      return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3437
3438    case GIMPLE_CATCH:
3439      return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3440
3441    case GIMPLE_TRY:
3442      return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3443              + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3444
3445    /* OpenMP directives are generally very expensive.  */
3446
3447    case GIMPLE_OMP_RETURN:
3448    case GIMPLE_OMP_SECTIONS_SWITCH:
3449    case GIMPLE_OMP_ATOMIC_STORE:
3450    case GIMPLE_OMP_CONTINUE:
3451      /* ...except these, which are cheap.  */
3452      return 0;
3453
3454    case GIMPLE_OMP_ATOMIC_LOAD:
3455      return weights->omp_cost;
3456
3457    case GIMPLE_OMP_FOR:
3458      return (weights->omp_cost
3459              + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3460              + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3461
3462    case GIMPLE_OMP_PARALLEL:
3463    case GIMPLE_OMP_TASK:
3464    case GIMPLE_OMP_CRITICAL:
3465    case GIMPLE_OMP_MASTER:
3466    case GIMPLE_OMP_ORDERED:
3467    case GIMPLE_OMP_SECTION:
3468    case GIMPLE_OMP_SECTIONS:
3469    case GIMPLE_OMP_SINGLE:
3470      return (weights->omp_cost
3471              + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3472
3473    default:
3474      gcc_unreachable ();
3475    }
3476
3477  return cost;
3478}
3479
3480/* Estimate number of instructions that will be created by expanding
3481   function FNDECL.  WEIGHTS contains weights attributed to various
3482   constructs.  */
3483
3484int
3485estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3486{
3487  struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3488  gimple_stmt_iterator bsi;
3489  basic_block bb;
3490  int n = 0;
3491
3492  gcc_assert (my_function && my_function->cfg);
3493  FOR_EACH_BB_FN (bb, my_function)
3494    {
3495      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3496	n += estimate_num_insns (gsi_stmt (bsi), weights);
3497    }
3498
3499  return n;
3500}
3501
3502
3503/* Initializes weights used by estimate_num_insns.  */
3504
3505void
3506init_inline_once (void)
3507{
3508  eni_size_weights.call_cost = 1;
3509  eni_size_weights.target_builtin_call_cost = 1;
3510  eni_size_weights.div_mod_cost = 1;
3511  eni_size_weights.omp_cost = 40;
3512  eni_size_weights.time_based = false;
3513
3514  /* Estimating time for call is difficult, since we have no idea what the
3515     called function does.  In the current uses of eni_time_weights,
3516     underestimating the cost does less harm than overestimating it, so
3517     we choose a rather small value here.  */
3518  eni_time_weights.call_cost = 10;
3519  eni_time_weights.target_builtin_call_cost = 10;
3520  eni_time_weights.div_mod_cost = 10;
3521  eni_time_weights.omp_cost = 40;
3522  eni_time_weights.time_based = true;
3523}
3524
3525/* Estimate the number of instructions in a gimple_seq. */
3526
3527int
3528count_insns_seq (gimple_seq seq, eni_weights *weights)
3529{
3530  gimple_stmt_iterator gsi;
3531  int n = 0;
3532  for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3533    n += estimate_num_insns (gsi_stmt (gsi), weights);
3534
3535  return n;
3536}
3537
3538
3539/* Install new lexical TREE_BLOCK underneath 'current_block'.  */
3540
3541static void
3542prepend_lexical_block (tree current_block, tree new_block)
3543{
3544  BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3545  BLOCK_SUBBLOCKS (current_block) = new_block;
3546  BLOCK_SUPERCONTEXT (new_block) = current_block;
3547}
3548
3549/* Fetch callee declaration from the call graph edge going from NODE and
3550   associated with STMR call statement.  Return NULL_TREE if not found.  */
3551static tree
3552get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3553{
3554  struct cgraph_edge *cs;
3555
3556  cs = cgraph_edge (node, stmt);
3557  if (cs)
3558    return cs->callee->decl;
3559
3560  return NULL_TREE;
3561}
3562
3563/* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
3564
3565static bool
3566expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3567{
3568  tree use_retvar;
3569  tree fn;
3570  struct pointer_map_t *st, *dst;
3571  tree return_slot;
3572  tree modify_dest;
3573  location_t saved_location;
3574  struct cgraph_edge *cg_edge;
3575  cgraph_inline_failed_t reason;
3576  basic_block return_block;
3577  edge e;
3578  gimple_stmt_iterator gsi, stmt_gsi;
3579  bool successfully_inlined = FALSE;
3580  bool purge_dead_abnormal_edges;
3581  tree t_step;
3582  tree var;
3583
3584  /* Set input_location here so we get the right instantiation context
3585     if we call instantiate_decl from inlinable_function_p.  */
3586  saved_location = input_location;
3587  if (gimple_has_location (stmt))
3588    input_location = gimple_location (stmt);
3589
3590  /* From here on, we're only interested in CALL_EXPRs.  */
3591  if (gimple_code (stmt) != GIMPLE_CALL)
3592    goto egress;
3593
3594  /* First, see if we can figure out what function is being called.
3595     If we cannot, then there is no hope of inlining the function.  */
3596  fn = gimple_call_fndecl (stmt);
3597  if (!fn)
3598    {
3599      fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3600      if (!fn)
3601	goto egress;
3602    }
3603
3604  /* Turn forward declarations into real ones.  */
3605  fn = cgraph_node (fn)->decl;
3606
3607  /* If FN is a declaration of a function in a nested scope that was
3608     globally declared inline, we don't set its DECL_INITIAL.
3609     However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3610     C++ front-end uses it for cdtors to refer to their internal
3611     declarations, that are not real functions.  Fortunately those
3612     don't have trees to be saved, so we can tell by checking their
3613     gimple_body.  */
3614  if (!DECL_INITIAL (fn)
3615      && DECL_ABSTRACT_ORIGIN (fn)
3616      && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3617    fn = DECL_ABSTRACT_ORIGIN (fn);
3618
3619  /* Objective C and fortran still calls tree_rest_of_compilation directly.
3620     Kill this check once this is fixed.  */
3621  if (!id->dst_node->analyzed)
3622    goto egress;
3623
3624  cg_edge = cgraph_edge (id->dst_node, stmt);
3625
3626  /* Don't inline functions with different EH personalities.  */
3627  if (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3628      && DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)
3629      && (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3630	  != DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)))
3631    goto egress;
3632
3633  /* Don't try to inline functions that are not well-suited to
3634     inlining.  */
3635  if (!cgraph_inline_p (cg_edge, &reason))
3636    {
3637      /* If this call was originally indirect, we do not want to emit any
3638	 inlining related warnings or sorry messages because there are no
3639	 guarantees regarding those.  */
3640      if (cg_edge->indirect_call)
3641	goto egress;
3642
3643      if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3644	  /* Avoid warnings during early inline pass. */
3645	  && cgraph_global_info_ready)
3646	{
3647	  sorry ("inlining failed in call to %q+F: %s", fn,
3648		 cgraph_inline_failed_string (reason));
3649	  sorry ("called from here");
3650	}
3651      else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3652	       && !DECL_IN_SYSTEM_HEADER (fn)
3653	       && reason != CIF_UNSPECIFIED
3654	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3655	       /* Avoid warnings during early inline pass. */
3656	       && cgraph_global_info_ready)
3657	{
3658	  warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3659		   fn, cgraph_inline_failed_string (reason));
3660	  warning (OPT_Winline, "called from here");
3661	}
3662      goto egress;
3663    }
3664  fn = cg_edge->callee->decl;
3665
3666#ifdef ENABLE_CHECKING
3667  if (cg_edge->callee->decl != id->dst_node->decl)
3668    verify_cgraph_node (cg_edge->callee);
3669#endif
3670
3671  /* We will be inlining this callee.  */
3672  id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3673
3674  /* Update the callers EH personality.  */
3675  if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3676    DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3677      = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3678
3679  /* Split the block holding the GIMPLE_CALL.  */
3680  e = split_block (bb, stmt);
3681  bb = e->src;
3682  return_block = e->dest;
3683  remove_edge (e);
3684
3685  /* split_block splits after the statement; work around this by
3686     moving the call into the second block manually.  Not pretty,
3687     but seems easier than doing the CFG manipulation by hand
3688     when the GIMPLE_CALL is in the last statement of BB.  */
3689  stmt_gsi = gsi_last_bb (bb);
3690  gsi_remove (&stmt_gsi, false);
3691
3692  /* If the GIMPLE_CALL was in the last statement of BB, it may have
3693     been the source of abnormal edges.  In this case, schedule
3694     the removal of dead abnormal edges.  */
3695  gsi = gsi_start_bb (return_block);
3696  if (gsi_end_p (gsi))
3697    {
3698      gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3699      purge_dead_abnormal_edges = true;
3700    }
3701  else
3702    {
3703      gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3704      purge_dead_abnormal_edges = false;
3705    }
3706
3707  stmt_gsi = gsi_start_bb (return_block);
3708
3709  /* Build a block containing code to initialize the arguments, the
3710     actual inline expansion of the body, and a label for the return
3711     statements within the function to jump to.  The type of the
3712     statement expression is the return type of the function call.  */
3713  id->block = make_node (BLOCK);
3714  BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3715  BLOCK_SOURCE_LOCATION (id->block) = input_location;
3716  prepend_lexical_block (gimple_block (stmt), id->block);
3717
3718  /* Local declarations will be replaced by their equivalents in this
3719     map.  */
3720  st = id->decl_map;
3721  id->decl_map = pointer_map_create ();
3722  dst = id->debug_map;
3723  id->debug_map = NULL;
3724
3725  /* Record the function we are about to inline.  */
3726  id->src_fn = fn;
3727  id->src_node = cg_edge->callee;
3728  id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3729  id->gimple_call = stmt;
3730
3731  gcc_assert (!id->src_cfun->after_inlining);
3732
3733  id->entry_bb = bb;
3734  if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3735    {
3736      gimple_stmt_iterator si = gsi_last_bb (bb);
3737      gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3738      						   NOT_TAKEN),
3739			GSI_NEW_STMT);
3740    }
3741  initialize_inlined_parameters (id, stmt, fn, bb);
3742
3743  if (DECL_INITIAL (fn))
3744    prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3745
3746  /* Return statements in the function body will be replaced by jumps
3747     to the RET_LABEL.  */
3748  gcc_assert (DECL_INITIAL (fn));
3749  gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3750
3751  /* Find the LHS to which the result of this call is assigned.  */
3752  return_slot = NULL;
3753  if (gimple_call_lhs (stmt))
3754    {
3755      modify_dest = gimple_call_lhs (stmt);
3756
3757      /* The function which we are inlining might not return a value,
3758	 in which case we should issue a warning that the function
3759	 does not return a value.  In that case the optimizers will
3760	 see that the variable to which the value is assigned was not
3761	 initialized.  We do not want to issue a warning about that
3762	 uninitialized variable.  */
3763      if (DECL_P (modify_dest))
3764	TREE_NO_WARNING (modify_dest) = 1;
3765
3766      if (gimple_call_return_slot_opt_p (stmt))
3767	{
3768	  return_slot = modify_dest;
3769	  modify_dest = NULL;
3770	}
3771    }
3772  else
3773    modify_dest = NULL;
3774
3775  /* If we are inlining a call to the C++ operator new, we don't want
3776     to use type based alias analysis on the return value.  Otherwise
3777     we may get confused if the compiler sees that the inlined new
3778     function returns a pointer which was just deleted.  See bug
3779     33407.  */
3780  if (DECL_IS_OPERATOR_NEW (fn))
3781    {
3782      return_slot = NULL;
3783      modify_dest = NULL;
3784    }
3785
3786  /* Declare the return variable for the function.  */
3787  use_retvar = declare_return_variable (id, return_slot, modify_dest);
3788
3789  /* Add local vars in this inlined callee to caller.  */
3790  t_step = id->src_cfun->local_decls;
3791  for (; t_step; t_step = TREE_CHAIN (t_step))
3792    {
3793      var = TREE_VALUE (t_step);
3794      if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3795	{
3796	  if (var_ann (var) && add_referenced_var (var))
3797	    cfun->local_decls = tree_cons (NULL_TREE, var,
3798					   cfun->local_decls);
3799	}
3800      else if (!can_be_nonlocal (var, id))
3801	cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3802				       cfun->local_decls);
3803    }
3804
3805  if (dump_file && (dump_flags & TDF_DETAILS))
3806    {
3807      fprintf (dump_file, "Inlining ");
3808      print_generic_expr (dump_file, id->src_fn, 0);
3809      fprintf (dump_file, " to ");
3810      print_generic_expr (dump_file, id->dst_fn, 0);
3811      fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3812    }
3813
3814  /* This is it.  Duplicate the callee body.  Assume callee is
3815     pre-gimplified.  Note that we must not alter the caller
3816     function in any way before this point, as this CALL_EXPR may be
3817     a self-referential call; if we're calling ourselves, we need to
3818     duplicate our body before altering anything.  */
3819  copy_body (id, bb->count,
3820  	     cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3821	     bb, return_block);
3822
3823  /* Reset the escaped and callused solutions.  */
3824  if (cfun->gimple_df)
3825    {
3826      pt_solution_reset (&cfun->gimple_df->escaped);
3827      pt_solution_reset (&cfun->gimple_df->callused);
3828    }
3829
3830  /* Clean up.  */
3831  if (id->debug_map)
3832    {
3833      pointer_map_destroy (id->debug_map);
3834      id->debug_map = dst;
3835    }
3836  pointer_map_destroy (id->decl_map);
3837  id->decl_map = st;
3838
3839  /* Unlink the calls virtual operands before replacing it.  */
3840  unlink_stmt_vdef (stmt);
3841
3842  /* If the inlined function returns a result that we care about,
3843     substitute the GIMPLE_CALL with an assignment of the return
3844     variable to the LHS of the call.  That is, if STMT was
3845     'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
3846  if (use_retvar && gimple_call_lhs (stmt))
3847    {
3848      gimple old_stmt = stmt;
3849      stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3850      gsi_replace (&stmt_gsi, stmt, false);
3851      if (gimple_in_ssa_p (cfun))
3852	mark_symbols_for_renaming (stmt);
3853      maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3854    }
3855  else
3856    {
3857      /* Handle the case of inlining a function with no return
3858	 statement, which causes the return value to become undefined.  */
3859      if (gimple_call_lhs (stmt)
3860	  && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3861	{
3862	  tree name = gimple_call_lhs (stmt);
3863	  tree var = SSA_NAME_VAR (name);
3864	  tree def = gimple_default_def (cfun, var);
3865
3866	  if (def)
3867	    {
3868	      /* If the variable is used undefined, make this name
3869		 undefined via a move.  */
3870	      stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3871	      gsi_replace (&stmt_gsi, stmt, true);
3872	    }
3873	  else
3874	    {
3875	      /* Otherwise make this variable undefined.  */
3876	      gsi_remove (&stmt_gsi, true);
3877	      set_default_def (var, name);
3878	      SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3879	    }
3880	}
3881      else
3882        gsi_remove (&stmt_gsi, true);
3883    }
3884
3885  if (purge_dead_abnormal_edges)
3886    gimple_purge_dead_abnormal_call_edges (return_block);
3887
3888  /* If the value of the new expression is ignored, that's OK.  We
3889     don't warn about this for CALL_EXPRs, so we shouldn't warn about
3890     the equivalent inlined version either.  */
3891  if (is_gimple_assign (stmt))
3892    {
3893      gcc_assert (gimple_assign_single_p (stmt)
3894		  || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
3895      TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3896    }
3897
3898  /* Output the inlining info for this abstract function, since it has been
3899     inlined.  If we don't do this now, we can lose the information about the
3900     variables in the function when the blocks get blown away as soon as we
3901     remove the cgraph node.  */
3902  (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
3903
3904  /* Update callgraph if needed.  */
3905  cgraph_remove_node (cg_edge->callee);
3906
3907  id->block = NULL_TREE;
3908  successfully_inlined = TRUE;
3909
3910 egress:
3911  input_location = saved_location;
3912  return successfully_inlined;
3913}
3914
3915/* Expand call statements reachable from STMT_P.
3916   We can only have CALL_EXPRs as the "toplevel" tree code or nested
3917   in a MODIFY_EXPR.  See tree-gimple.c:get_call_expr_in().  We can
3918   unfortunately not use that function here because we need a pointer
3919   to the CALL_EXPR, not the tree itself.  */
3920
3921static bool
3922gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
3923{
3924  gimple_stmt_iterator gsi;
3925
3926  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3927    {
3928      gimple stmt = gsi_stmt (gsi);
3929
3930      if (is_gimple_call (stmt)
3931	  && expand_call_inline (bb, stmt, id))
3932	return true;
3933    }
3934
3935  return false;
3936}
3937
3938
3939/* Walk all basic blocks created after FIRST and try to fold every statement
3940   in the STATEMENTS pointer set.  */
3941
3942static void
3943fold_marked_statements (int first, struct pointer_set_t *statements)
3944{
3945  for (; first < n_basic_blocks; first++)
3946    if (BASIC_BLOCK (first))
3947      {
3948        gimple_stmt_iterator gsi;
3949
3950	for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3951	     !gsi_end_p (gsi);
3952	     gsi_next (&gsi))
3953	  if (pointer_set_contains (statements, gsi_stmt (gsi)))
3954	    {
3955	      gimple old_stmt = gsi_stmt (gsi);
3956	      tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
3957
3958	      if (old_decl && DECL_BUILT_IN (old_decl))
3959		{
3960		  /* Folding builtins can create multiple instructions,
3961		     we need to look at all of them.  */
3962		  gimple_stmt_iterator i2 = gsi;
3963		  gsi_prev (&i2);
3964		  if (fold_stmt (&gsi))
3965		    {
3966		      gimple new_stmt;
3967		      if (gsi_end_p (i2))
3968			i2 = gsi_start_bb (BASIC_BLOCK (first));
3969		      else
3970			gsi_next (&i2);
3971		      while (1)
3972			{
3973			  new_stmt = gsi_stmt (i2);
3974			  update_stmt (new_stmt);
3975			  cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3976							     new_stmt);
3977
3978			  if (new_stmt == gsi_stmt (gsi))
3979			    {
3980			      /* It is okay to check only for the very last
3981				 of these statements.  If it is a throwing
3982				 statement nothing will change.  If it isn't
3983				 this can remove EH edges.  If that weren't
3984				 correct then because some intermediate stmts
3985				 throw, but not the last one.  That would mean
3986				 we'd have to split the block, which we can't
3987				 here and we'd loose anyway.  And as builtins
3988				 probably never throw, this all
3989				 is mood anyway.  */
3990			      if (maybe_clean_or_replace_eh_stmt (old_stmt,
3991								  new_stmt))
3992				gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3993			      break;
3994			    }
3995			  gsi_next (&i2);
3996			}
3997		    }
3998		}
3999	      else if (fold_stmt (&gsi))
4000		{
4001		  /* Re-read the statement from GSI as fold_stmt() may
4002		     have changed it.  */
4003		  gimple new_stmt = gsi_stmt (gsi);
4004		  update_stmt (new_stmt);
4005
4006		  if (is_gimple_call (old_stmt)
4007		      || is_gimple_call (new_stmt))
4008		    cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4009						       new_stmt);
4010
4011		  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4012		    gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4013		}
4014	    }
4015      }
4016}
4017
4018/* Return true if BB has at least one abnormal outgoing edge.  */
4019
4020static inline bool
4021has_abnormal_outgoing_edge_p (basic_block bb)
4022{
4023  edge e;
4024  edge_iterator ei;
4025
4026  FOR_EACH_EDGE (e, ei, bb->succs)
4027    if (e->flags & EDGE_ABNORMAL)
4028      return true;
4029
4030  return false;
4031}
4032
4033/* Expand calls to inline functions in the body of FN.  */
4034
4035unsigned int
4036optimize_inline_calls (tree fn)
4037{
4038  copy_body_data id;
4039  basic_block bb;
4040  int last = n_basic_blocks;
4041  struct gimplify_ctx gctx;
4042
4043  /* There is no point in performing inlining if errors have already
4044     occurred -- and we might crash if we try to inline invalid
4045     code.  */
4046  if (errorcount || sorrycount)
4047    return 0;
4048
4049  /* Clear out ID.  */
4050  memset (&id, 0, sizeof (id));
4051
4052  id.src_node = id.dst_node = cgraph_node (fn);
4053  id.dst_fn = fn;
4054  /* Or any functions that aren't finished yet.  */
4055  if (current_function_decl)
4056    id.dst_fn = current_function_decl;
4057
4058  id.copy_decl = copy_decl_maybe_to_var;
4059  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4060  id.transform_new_cfg = false;
4061  id.transform_return_to_modify = true;
4062  id.transform_lang_insert_block = NULL;
4063  id.statements_to_fold = pointer_set_create ();
4064
4065  push_gimplify_context (&gctx);
4066
4067  /* We make no attempts to keep dominance info up-to-date.  */
4068  free_dominance_info (CDI_DOMINATORS);
4069  free_dominance_info (CDI_POST_DOMINATORS);
4070
4071  /* Register specific gimple functions.  */
4072  gimple_register_cfg_hooks ();
4073
4074  /* Reach the trees by walking over the CFG, and note the
4075     enclosing basic-blocks in the call edges.  */
4076  /* We walk the blocks going forward, because inlined function bodies
4077     will split id->current_basic_block, and the new blocks will
4078     follow it; we'll trudge through them, processing their CALL_EXPRs
4079     along the way.  */
4080  FOR_EACH_BB (bb)
4081    gimple_expand_calls_inline (bb, &id);
4082
4083  pop_gimplify_context (NULL);
4084
4085#ifdef ENABLE_CHECKING
4086    {
4087      struct cgraph_edge *e;
4088
4089      verify_cgraph_node (id.dst_node);
4090
4091      /* Double check that we inlined everything we are supposed to inline.  */
4092      for (e = id.dst_node->callees; e; e = e->next_callee)
4093	gcc_assert (e->inline_failed);
4094    }
4095#endif
4096
4097  /* Fold the statements before compacting/renumbering the basic blocks.  */
4098  fold_marked_statements (last, id.statements_to_fold);
4099  pointer_set_destroy (id.statements_to_fold);
4100
4101  gcc_assert (!id.debug_stmts);
4102
4103  /* Renumber the (code) basic_blocks consecutively.  */
4104  compact_blocks ();
4105  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
4106  number_blocks (fn);
4107
4108  fold_cond_expr_cond ();
4109  delete_unreachable_blocks_update_callgraph (&id);
4110#ifdef ENABLE_CHECKING
4111  verify_cgraph_node (id.dst_node);
4112#endif
4113
4114  /* It would be nice to check SSA/CFG/statement consistency here, but it is
4115     not possible yet - the IPA passes might make various functions to not
4116     throw and they don't care to proactively update local EH info.  This is
4117     done later in fixup_cfg pass that also execute the verification.  */
4118  return (TODO_update_ssa
4119	  | TODO_cleanup_cfg
4120	  | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4121	  | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4122}
4123
4124/* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
4125
4126tree
4127copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4128{
4129  enum tree_code code = TREE_CODE (*tp);
4130  enum tree_code_class cl = TREE_CODE_CLASS (code);
4131
4132  /* We make copies of most nodes.  */
4133  if (IS_EXPR_CODE_CLASS (cl)
4134      || code == TREE_LIST
4135      || code == TREE_VEC
4136      || code == TYPE_DECL
4137      || code == OMP_CLAUSE)
4138    {
4139      /* Because the chain gets clobbered when we make a copy, we save it
4140	 here.  */
4141      tree chain = NULL_TREE, new_tree;
4142
4143      chain = TREE_CHAIN (*tp);
4144
4145      /* Copy the node.  */
4146      new_tree = copy_node (*tp);
4147
4148      /* Propagate mudflap marked-ness.  */
4149      if (flag_mudflap && mf_marked_p (*tp))
4150        mf_mark (new_tree);
4151
4152      *tp = new_tree;
4153
4154      /* Now, restore the chain, if appropriate.  That will cause
4155	 walk_tree to walk into the chain as well.  */
4156      if (code == PARM_DECL
4157	  || code == TREE_LIST
4158	  || code == OMP_CLAUSE)
4159	TREE_CHAIN (*tp) = chain;
4160
4161      /* For now, we don't update BLOCKs when we make copies.  So, we
4162	 have to nullify all BIND_EXPRs.  */
4163      if (TREE_CODE (*tp) == BIND_EXPR)
4164	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4165    }
4166  else if (code == CONSTRUCTOR)
4167    {
4168      /* CONSTRUCTOR nodes need special handling because
4169         we need to duplicate the vector of elements.  */
4170      tree new_tree;
4171
4172      new_tree = copy_node (*tp);
4173
4174      /* Propagate mudflap marked-ness.  */
4175      if (flag_mudflap && mf_marked_p (*tp))
4176        mf_mark (new_tree);
4177
4178      CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4179					 CONSTRUCTOR_ELTS (*tp));
4180      *tp = new_tree;
4181    }
4182  else if (TREE_CODE_CLASS (code) == tcc_type)
4183    *walk_subtrees = 0;
4184  else if (TREE_CODE_CLASS (code) == tcc_declaration)
4185    *walk_subtrees = 0;
4186  else if (TREE_CODE_CLASS (code) == tcc_constant)
4187    *walk_subtrees = 0;
4188  else
4189    gcc_assert (code != STATEMENT_LIST);
4190  return NULL_TREE;
4191}
4192
4193/* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
4194   information indicating to what new SAVE_EXPR this one should be mapped,
4195   use that one.  Otherwise, create a new node and enter it in ST.  FN is
4196   the function into which the copy will be placed.  */
4197
4198static void
4199remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4200{
4201  struct pointer_map_t *st = (struct pointer_map_t *) st_;
4202  tree *n;
4203  tree t;
4204
4205  /* See if we already encountered this SAVE_EXPR.  */
4206  n = (tree *) pointer_map_contains (st, *tp);
4207
4208  /* If we didn't already remap this SAVE_EXPR, do so now.  */
4209  if (!n)
4210    {
4211      t = copy_node (*tp);
4212
4213      /* Remember this SAVE_EXPR.  */
4214      *pointer_map_insert (st, *tp) = t;
4215      /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
4216      *pointer_map_insert (st, t) = t;
4217    }
4218  else
4219    {
4220      /* We've already walked into this SAVE_EXPR; don't do it again.  */
4221      *walk_subtrees = 0;
4222      t = *n;
4223    }
4224
4225  /* Replace this SAVE_EXPR with the copy.  */
4226  *tp = t;
4227}
4228
4229/* Called via walk_tree.  If *TP points to a DECL_STMT for a local label,
4230   copies the declaration and enters it in the splay_tree in DATA (which is
4231   really an `copy_body_data *').  */
4232
4233static tree
4234mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4235			void *data)
4236{
4237  copy_body_data *id = (copy_body_data *) data;
4238
4239  /* Don't walk into types.  */
4240  if (TYPE_P (*tp))
4241    *walk_subtrees = 0;
4242
4243  else if (TREE_CODE (*tp) == LABEL_EXPR)
4244    {
4245      tree decl = TREE_OPERAND (*tp, 0);
4246
4247      /* Copy the decl and remember the copy.  */
4248      insert_decl_map (id, decl, id->copy_decl (decl, id));
4249    }
4250
4251  return NULL_TREE;
4252}
4253
4254/* Perform any modifications to EXPR required when it is unsaved.  Does
4255   not recurse into EXPR's subtrees.  */
4256
4257static void
4258unsave_expr_1 (tree expr)
4259{
4260  switch (TREE_CODE (expr))
4261    {
4262    case TARGET_EXPR:
4263      /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4264         It's OK for this to happen if it was part of a subtree that
4265         isn't immediately expanded, such as operand 2 of another
4266         TARGET_EXPR.  */
4267      if (TREE_OPERAND (expr, 1))
4268	break;
4269
4270      TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4271      TREE_OPERAND (expr, 3) = NULL_TREE;
4272      break;
4273
4274    default:
4275      break;
4276    }
4277}
4278
4279/* Called via walk_tree when an expression is unsaved.  Using the
4280   splay_tree pointed to by ST (which is really a `splay_tree'),
4281   remaps all local declarations to appropriate replacements.  */
4282
4283static tree
4284unsave_r (tree *tp, int *walk_subtrees, void *data)
4285{
4286  copy_body_data *id = (copy_body_data *) data;
4287  struct pointer_map_t *st = id->decl_map;
4288  tree *n;
4289
4290  /* Only a local declaration (variable or label).  */
4291  if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4292      || TREE_CODE (*tp) == LABEL_DECL)
4293    {
4294      /* Lookup the declaration.  */
4295      n = (tree *) pointer_map_contains (st, *tp);
4296
4297      /* If it's there, remap it.  */
4298      if (n)
4299	*tp = *n;
4300    }
4301
4302  else if (TREE_CODE (*tp) == STATEMENT_LIST)
4303    gcc_unreachable ();
4304  else if (TREE_CODE (*tp) == BIND_EXPR)
4305    copy_bind_expr (tp, walk_subtrees, id);
4306  else if (TREE_CODE (*tp) == SAVE_EXPR
4307	   || TREE_CODE (*tp) == TARGET_EXPR)
4308    remap_save_expr (tp, st, walk_subtrees);
4309  else
4310    {
4311      copy_tree_r (tp, walk_subtrees, NULL);
4312
4313      /* Do whatever unsaving is required.  */
4314      unsave_expr_1 (*tp);
4315    }
4316
4317  /* Keep iterating.  */
4318  return NULL_TREE;
4319}
4320
4321/* Copies everything in EXPR and replaces variables, labels
4322   and SAVE_EXPRs local to EXPR.  */
4323
4324tree
4325unsave_expr_now (tree expr)
4326{
4327  copy_body_data id;
4328
4329  /* There's nothing to do for NULL_TREE.  */
4330  if (expr == 0)
4331    return expr;
4332
4333  /* Set up ID.  */
4334  memset (&id, 0, sizeof (id));
4335  id.src_fn = current_function_decl;
4336  id.dst_fn = current_function_decl;
4337  id.decl_map = pointer_map_create ();
4338  id.debug_map = NULL;
4339
4340  id.copy_decl = copy_decl_no_change;
4341  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4342  id.transform_new_cfg = false;
4343  id.transform_return_to_modify = false;
4344  id.transform_lang_insert_block = NULL;
4345
4346  /* Walk the tree once to find local labels.  */
4347  walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4348
4349  /* Walk the tree again, copying, remapping, and unsaving.  */
4350  walk_tree (&expr, unsave_r, &id, NULL);
4351
4352  /* Clean up.  */
4353  pointer_map_destroy (id.decl_map);
4354  if (id.debug_map)
4355    pointer_map_destroy (id.debug_map);
4356
4357  return expr;
4358}
4359
4360/* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
4361   label, copies the declaration and enters it in the splay_tree in DATA (which
4362   is really a 'copy_body_data *'.  */
4363
4364static tree
4365mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4366		        bool *handled_ops_p ATTRIBUTE_UNUSED,
4367		        struct walk_stmt_info *wi)
4368{
4369  copy_body_data *id = (copy_body_data *) wi->info;
4370  gimple stmt = gsi_stmt (*gsip);
4371
4372  if (gimple_code (stmt) == GIMPLE_LABEL)
4373    {
4374      tree decl = gimple_label_label (stmt);
4375
4376      /* Copy the decl and remember the copy.  */
4377      insert_decl_map (id, decl, id->copy_decl (decl, id));
4378    }
4379
4380  return NULL_TREE;
4381}
4382
4383
4384/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4385   Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4386   remaps all local declarations to appropriate replacements in gimple
4387   operands. */
4388
4389static tree
4390replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4391{
4392  struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4393  copy_body_data *id = (copy_body_data *) wi->info;
4394  struct pointer_map_t *st = id->decl_map;
4395  tree *n;
4396  tree expr = *tp;
4397
4398  /* Only a local declaration (variable or label).  */
4399  if ((TREE_CODE (expr) == VAR_DECL
4400       && !TREE_STATIC (expr))
4401      || TREE_CODE (expr) == LABEL_DECL)
4402    {
4403      /* Lookup the declaration.  */
4404      n = (tree *) pointer_map_contains (st, expr);
4405
4406      /* If it's there, remap it.  */
4407      if (n)
4408	*tp = *n;
4409      *walk_subtrees = 0;
4410    }
4411  else if (TREE_CODE (expr) == STATEMENT_LIST
4412	   || TREE_CODE (expr) == BIND_EXPR
4413	   || TREE_CODE (expr) == SAVE_EXPR)
4414    gcc_unreachable ();
4415  else if (TREE_CODE (expr) == TARGET_EXPR)
4416    {
4417      /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4418         It's OK for this to happen if it was part of a subtree that
4419         isn't immediately expanded, such as operand 2 of another
4420         TARGET_EXPR.  */
4421      if (!TREE_OPERAND (expr, 1))
4422	{
4423	  TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4424	  TREE_OPERAND (expr, 3) = NULL_TREE;
4425	}
4426    }
4427
4428  /* Keep iterating.  */
4429  return NULL_TREE;
4430}
4431
4432
4433/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4434   Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4435   remaps all local declarations to appropriate replacements in gimple
4436   statements. */
4437
4438static tree
4439replace_locals_stmt (gimple_stmt_iterator *gsip,
4440		     bool *handled_ops_p ATTRIBUTE_UNUSED,
4441		     struct walk_stmt_info *wi)
4442{
4443  copy_body_data *id = (copy_body_data *) wi->info;
4444  gimple stmt = gsi_stmt (*gsip);
4445
4446  if (gimple_code (stmt) == GIMPLE_BIND)
4447    {
4448      tree block = gimple_bind_block (stmt);
4449
4450      if (block)
4451	{
4452	  remap_block (&block, id);
4453	  gimple_bind_set_block (stmt, block);
4454	}
4455
4456      /* This will remap a lot of the same decls again, but this should be
4457	 harmless.  */
4458      if (gimple_bind_vars (stmt))
4459	gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4460    }
4461
4462  /* Keep iterating.  */
4463  return NULL_TREE;
4464}
4465
4466
4467/* Copies everything in SEQ and replaces variables and labels local to
4468   current_function_decl.  */
4469
4470gimple_seq
4471copy_gimple_seq_and_replace_locals (gimple_seq seq)
4472{
4473  copy_body_data id;
4474  struct walk_stmt_info wi;
4475  struct pointer_set_t *visited;
4476  gimple_seq copy;
4477
4478  /* There's nothing to do for NULL_TREE.  */
4479  if (seq == NULL)
4480    return seq;
4481
4482  /* Set up ID.  */
4483  memset (&id, 0, sizeof (id));
4484  id.src_fn = current_function_decl;
4485  id.dst_fn = current_function_decl;
4486  id.decl_map = pointer_map_create ();
4487  id.debug_map = NULL;
4488
4489  id.copy_decl = copy_decl_no_change;
4490  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4491  id.transform_new_cfg = false;
4492  id.transform_return_to_modify = false;
4493  id.transform_lang_insert_block = NULL;
4494
4495  /* Walk the tree once to find local labels.  */
4496  memset (&wi, 0, sizeof (wi));
4497  visited = pointer_set_create ();
4498  wi.info = &id;
4499  wi.pset = visited;
4500  walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4501  pointer_set_destroy (visited);
4502
4503  copy = gimple_seq_copy (seq);
4504
4505  /* Walk the copy, remapping decls.  */
4506  memset (&wi, 0, sizeof (wi));
4507  wi.info = &id;
4508  walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4509
4510  /* Clean up.  */
4511  pointer_map_destroy (id.decl_map);
4512  if (id.debug_map)
4513    pointer_map_destroy (id.debug_map);
4514
4515  return copy;
4516}
4517
4518
4519/* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
4520
4521static tree
4522debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4523{
4524  if (*tp == data)
4525    return (tree) data;
4526  else
4527    return NULL;
4528}
4529
4530bool
4531debug_find_tree (tree top, tree search)
4532{
4533  return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4534}
4535
4536
4537/* Declare the variables created by the inliner.  Add all the variables in
4538   VARS to BIND_EXPR.  */
4539
4540static void
4541declare_inline_vars (tree block, tree vars)
4542{
4543  tree t;
4544  for (t = vars; t; t = TREE_CHAIN (t))
4545    {
4546      DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4547      gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4548      cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4549    }
4550
4551  if (block)
4552    BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4553}
4554
4555/* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
4556   but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
4557   VAR_DECL translation.  */
4558
4559static tree
4560copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4561{
4562  /* Don't generate debug information for the copy if we wouldn't have
4563     generated it for the copy either.  */
4564  DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4565  DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4566
4567  /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4568     declaration inspired this copy.  */
4569  DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4570
4571  /* The new variable/label has no RTL, yet.  */
4572  if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4573      && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4574    SET_DECL_RTL (copy, NULL_RTX);
4575
4576  /* These args would always appear unused, if not for this.  */
4577  TREE_USED (copy) = 1;
4578
4579  /* Set the context for the new declaration.  */
4580  if (!DECL_CONTEXT (decl))
4581    /* Globals stay global.  */
4582    ;
4583  else if (DECL_CONTEXT (decl) != id->src_fn)
4584    /* Things that weren't in the scope of the function we're inlining
4585       from aren't in the scope we're inlining to, either.  */
4586    ;
4587  else if (TREE_STATIC (decl))
4588    /* Function-scoped static variables should stay in the original
4589       function.  */
4590    ;
4591  else
4592    /* Ordinary automatic local variables are now in the scope of the
4593       new function.  */
4594    DECL_CONTEXT (copy) = id->dst_fn;
4595
4596  return copy;
4597}
4598
4599static tree
4600copy_decl_to_var (tree decl, copy_body_data *id)
4601{
4602  tree copy, type;
4603
4604  gcc_assert (TREE_CODE (decl) == PARM_DECL
4605	      || TREE_CODE (decl) == RESULT_DECL);
4606
4607  type = TREE_TYPE (decl);
4608
4609  copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4610		     VAR_DECL, DECL_NAME (decl), type);
4611  TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4612  TREE_READONLY (copy) = TREE_READONLY (decl);
4613  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4614  DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4615
4616  return copy_decl_for_dup_finish (id, decl, copy);
4617}
4618
4619/* Like copy_decl_to_var, but create a return slot object instead of a
4620   pointer variable for return by invisible reference.  */
4621
4622static tree
4623copy_result_decl_to_var (tree decl, copy_body_data *id)
4624{
4625  tree copy, type;
4626
4627  gcc_assert (TREE_CODE (decl) == PARM_DECL
4628	      || TREE_CODE (decl) == RESULT_DECL);
4629
4630  type = TREE_TYPE (decl);
4631  if (DECL_BY_REFERENCE (decl))
4632    type = TREE_TYPE (type);
4633
4634  copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4635		     VAR_DECL, DECL_NAME (decl), type);
4636  TREE_READONLY (copy) = TREE_READONLY (decl);
4637  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4638  if (!DECL_BY_REFERENCE (decl))
4639    {
4640      TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4641      DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4642    }
4643
4644  return copy_decl_for_dup_finish (id, decl, copy);
4645}
4646
4647tree
4648copy_decl_no_change (tree decl, copy_body_data *id)
4649{
4650  tree copy;
4651
4652  copy = copy_node (decl);
4653
4654  /* The COPY is not abstract; it will be generated in DST_FN.  */
4655  DECL_ABSTRACT (copy) = 0;
4656  lang_hooks.dup_lang_specific_decl (copy);
4657
4658  /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4659     been taken; it's for internal bookkeeping in expand_goto_internal.  */
4660  if (TREE_CODE (copy) == LABEL_DECL)
4661    {
4662      TREE_ADDRESSABLE (copy) = 0;
4663      LABEL_DECL_UID (copy) = -1;
4664    }
4665
4666  return copy_decl_for_dup_finish (id, decl, copy);
4667}
4668
4669static tree
4670copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4671{
4672  if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4673    return copy_decl_to_var (decl, id);
4674  else
4675    return copy_decl_no_change (decl, id);
4676}
4677
4678/* Return a copy of the function's argument tree.  */
4679static tree
4680copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4681			       bitmap args_to_skip, tree *vars)
4682{
4683  tree arg, *parg;
4684  tree new_parm = NULL;
4685  int i = 0;
4686
4687  parg = &new_parm;
4688
4689  for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4690    if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4691      {
4692        tree new_tree = remap_decl (arg, id);
4693        lang_hooks.dup_lang_specific_decl (new_tree);
4694        *parg = new_tree;
4695	parg = &TREE_CHAIN (new_tree);
4696      }
4697    else if (!pointer_map_contains (id->decl_map, arg))
4698      {
4699	/* Make an equivalent VAR_DECL.  If the argument was used
4700	   as temporary variable later in function, the uses will be
4701	   replaced by local variable.  */
4702	tree var = copy_decl_to_var (arg, id);
4703	get_var_ann (var);
4704	add_referenced_var (var);
4705	insert_decl_map (id, arg, var);
4706        /* Declare this new variable.  */
4707        TREE_CHAIN (var) = *vars;
4708        *vars = var;
4709      }
4710  return new_parm;
4711}
4712
4713/* Return a copy of the function's static chain.  */
4714static tree
4715copy_static_chain (tree static_chain, copy_body_data * id)
4716{
4717  tree *chain_copy, *pvar;
4718
4719  chain_copy = &static_chain;
4720  for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4721    {
4722      tree new_tree = remap_decl (*pvar, id);
4723      lang_hooks.dup_lang_specific_decl (new_tree);
4724      TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4725      *pvar = new_tree;
4726    }
4727  return static_chain;
4728}
4729
4730/* Return true if the function is allowed to be versioned.
4731   This is a guard for the versioning functionality.  */
4732
4733bool
4734tree_versionable_function_p (tree fndecl)
4735{
4736  return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4737	  && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4738}
4739
4740/* Delete all unreachable basic blocks and update callgraph.
4741   Doing so is somewhat nontrivial because we need to update all clones and
4742   remove inline function that become unreachable.  */
4743
4744static bool
4745delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4746{
4747  bool changed = false;
4748  basic_block b, next_bb;
4749
4750  find_unreachable_blocks ();
4751
4752  /* Delete all unreachable basic blocks.  */
4753
4754  for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4755    {
4756      next_bb = b->next_bb;
4757
4758      if (!(b->flags & BB_REACHABLE))
4759	{
4760          gimple_stmt_iterator bsi;
4761
4762          for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4763	    if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4764	      {
4765	        struct cgraph_edge *e;
4766		struct cgraph_node *node;
4767
4768	        if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4769		  {
4770		    if (!e->inline_failed)
4771		      cgraph_remove_node_and_inline_clones (e->callee);
4772		    else
4773	              cgraph_remove_edge (e);
4774		  }
4775		if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4776		    && id->dst_node->clones)
4777     		  for (node = id->dst_node->clones; node != id->dst_node;)
4778		    {
4779	              if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4780			{
4781		          if (!e->inline_failed)
4782		            cgraph_remove_node_and_inline_clones (e->callee);
4783			  else
4784	                    cgraph_remove_edge (e);
4785			}
4786
4787		      if (node->clones)
4788			node = node->clones;
4789		      else if (node->next_sibling_clone)
4790			node = node->next_sibling_clone;
4791		      else
4792			{
4793			  while (node != id->dst_node && !node->next_sibling_clone)
4794			    node = node->clone_of;
4795			  if (node != id->dst_node)
4796			    node = node->next_sibling_clone;
4797			}
4798		    }
4799	      }
4800	  delete_basic_block (b);
4801	  changed = true;
4802	}
4803    }
4804
4805  if (changed)
4806    tidy_fallthru_edges ();
4807  return changed;
4808}
4809
4810/* Update clone info after duplication.  */
4811
4812static void
4813update_clone_info (copy_body_data * id)
4814{
4815  struct cgraph_node *node;
4816  if (!id->dst_node->clones)
4817    return;
4818  for (node = id->dst_node->clones; node != id->dst_node;)
4819    {
4820      /* First update replace maps to match the new body.  */
4821      if (node->clone.tree_map)
4822        {
4823	  unsigned int i;
4824          for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4825	    {
4826	      struct ipa_replace_map *replace_info;
4827	      replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4828	      walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4829	      walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4830	    }
4831	}
4832      if (node->clones)
4833	node = node->clones;
4834      else if (node->next_sibling_clone)
4835	node = node->next_sibling_clone;
4836      else
4837	{
4838	  while (node != id->dst_node && !node->next_sibling_clone)
4839	    node = node->clone_of;
4840	  if (node != id->dst_node)
4841	    node = node->next_sibling_clone;
4842	}
4843    }
4844}
4845
4846/* Create a copy of a function's tree.
4847   OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4848   of the original function and the new copied function
4849   respectively.  In case we want to replace a DECL
4850   tree with another tree while duplicating the function's
4851   body, TREE_MAP represents the mapping between these
4852   trees. If UPDATE_CLONES is set, the call_stmt fields
4853   of edges of clones of the function will be updated.  */
4854void
4855tree_function_versioning (tree old_decl, tree new_decl,
4856			  VEC(ipa_replace_map_p,gc)* tree_map,
4857			  bool update_clones, bitmap args_to_skip)
4858{
4859  struct cgraph_node *old_version_node;
4860  struct cgraph_node *new_version_node;
4861  copy_body_data id;
4862  tree p;
4863  unsigned i;
4864  struct ipa_replace_map *replace_info;
4865  basic_block old_entry_block, bb;
4866  VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4867
4868  tree t_step;
4869  tree old_current_function_decl = current_function_decl;
4870  tree vars = NULL_TREE;
4871
4872  gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4873	      && TREE_CODE (new_decl) == FUNCTION_DECL);
4874  DECL_POSSIBLY_INLINED (old_decl) = 1;
4875
4876  old_version_node = cgraph_node (old_decl);
4877  new_version_node = cgraph_node (new_decl);
4878
4879  /* Output the inlining info for this abstract function, since it has been
4880     inlined.  If we don't do this now, we can lose the information about the
4881     variables in the function when the blocks get blown away as soon as we
4882     remove the cgraph node.  */
4883  (*debug_hooks->outlining_inline_function) (old_decl);
4884
4885  DECL_ARTIFICIAL (new_decl) = 1;
4886  DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4887  DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
4888
4889  /* Prepare the data structures for the tree copy.  */
4890  memset (&id, 0, sizeof (id));
4891
4892  /* Generate a new name for the new version. */
4893  id.statements_to_fold = pointer_set_create ();
4894
4895  id.decl_map = pointer_map_create ();
4896  id.debug_map = NULL;
4897  id.src_fn = old_decl;
4898  id.dst_fn = new_decl;
4899  id.src_node = old_version_node;
4900  id.dst_node = new_version_node;
4901  id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4902  if (id.src_node->ipa_transforms_to_apply)
4903    {
4904      VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
4905      unsigned int i;
4906
4907      id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
4908					               id.src_node->ipa_transforms_to_apply);
4909      for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
4910        VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
4911		       VEC_index (ipa_opt_pass,
4912		       		  old_transforms_to_apply,
4913				  i));
4914    }
4915
4916  id.copy_decl = copy_decl_no_change;
4917  id.transform_call_graph_edges
4918    = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4919  id.transform_new_cfg = true;
4920  id.transform_return_to_modify = false;
4921  id.transform_lang_insert_block = NULL;
4922
4923  current_function_decl = new_decl;
4924  old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4925    (DECL_STRUCT_FUNCTION (old_decl));
4926  initialize_cfun (new_decl, old_decl,
4927		   old_entry_block->count);
4928  push_cfun (DECL_STRUCT_FUNCTION (new_decl));
4929
4930  /* Copy the function's static chain.  */
4931  p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4932  if (p)
4933    DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4934      copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4935			 &id);
4936
4937  /* If there's a tree_map, prepare for substitution.  */
4938  if (tree_map)
4939    for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
4940      {
4941	gimple init;
4942	replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
4943	if (replace_info->replace_p)
4944	  {
4945	    tree op = replace_info->new_tree;
4946
4947	    STRIP_NOPS (op);
4948
4949	    if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4950	      op = TREE_OPERAND (op, 0);
4951
4952	    if (TREE_CODE (op) == ADDR_EXPR)
4953	      {
4954		op = TREE_OPERAND (op, 0);
4955		while (handled_component_p (op))
4956		  op = TREE_OPERAND (op, 0);
4957		if (TREE_CODE (op) == VAR_DECL)
4958		  add_referenced_var (op);
4959	      }
4960	    gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4961	    init = setup_one_parameter (&id, replace_info->old_tree,
4962	    			        replace_info->new_tree, id.src_fn,
4963				        NULL,
4964				        &vars);
4965	    if (init)
4966	      VEC_safe_push (gimple, heap, init_stmts, init);
4967	  }
4968      }
4969  /* Copy the function's arguments.  */
4970  if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4971    DECL_ARGUMENTS (new_decl) =
4972      copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
4973      				     args_to_skip, &vars);
4974
4975  DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
4976
4977  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
4978  number_blocks (id.dst_fn);
4979
4980  declare_inline_vars (DECL_INITIAL (new_decl), vars);
4981
4982  if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
4983    /* Add local vars.  */
4984    for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
4985	 t_step; t_step = TREE_CHAIN (t_step))
4986      {
4987	tree var = TREE_VALUE (t_step);
4988	if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
4989	  cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
4990	else if (!can_be_nonlocal (var, &id))
4991	  cfun->local_decls =
4992	    tree_cons (NULL_TREE, remap_decl (var, &id),
4993		       cfun->local_decls);
4994      }
4995
4996  /* Copy the Function's body.  */
4997  copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
4998	     ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
4999
5000  if (DECL_RESULT (old_decl) != NULL_TREE)
5001    {
5002      tree *res_decl = &DECL_RESULT (old_decl);
5003      DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
5004      lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5005    }
5006
5007  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
5008  number_blocks (new_decl);
5009
5010  /* We want to create the BB unconditionally, so that the addition of
5011     debug stmts doesn't affect BB count, which may in the end cause
5012     codegen differences.  */
5013  bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5014  while (VEC_length (gimple, init_stmts))
5015    insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
5016  update_clone_info (&id);
5017
5018  /* Remap the nonlocal_goto_save_area, if any.  */
5019  if (cfun->nonlocal_goto_save_area)
5020    {
5021      struct walk_stmt_info wi;
5022
5023      memset (&wi, 0, sizeof (wi));
5024      wi.info = &id;
5025      walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5026    }
5027
5028  /* Clean up.  */
5029  pointer_map_destroy (id.decl_map);
5030  if (id.debug_map)
5031    pointer_map_destroy (id.debug_map);
5032  free_dominance_info (CDI_DOMINATORS);
5033  free_dominance_info (CDI_POST_DOMINATORS);
5034
5035  fold_marked_statements (0, id.statements_to_fold);
5036  pointer_set_destroy (id.statements_to_fold);
5037  fold_cond_expr_cond ();
5038  delete_unreachable_blocks_update_callgraph (&id);
5039  update_ssa (TODO_update_ssa);
5040  free_dominance_info (CDI_DOMINATORS);
5041  free_dominance_info (CDI_POST_DOMINATORS);
5042
5043  gcc_assert (!id.debug_stmts);
5044  VEC_free (gimple, heap, init_stmts);
5045  pop_cfun ();
5046  current_function_decl = old_current_function_decl;
5047  gcc_assert (!current_function_decl
5048	      || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
5049  return;
5050}
5051
5052/* EXP is CALL_EXPR present in a GENERIC expression tree.  Try to integrate
5053   the callee and return the inlined body on success.  */
5054
5055tree
5056maybe_inline_call_in_expr (tree exp)
5057{
5058  tree fn = get_callee_fndecl (exp);
5059
5060  /* We can only try to inline "const" functions.  */
5061  if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5062    {
5063      struct pointer_map_t *decl_map = pointer_map_create ();
5064      call_expr_arg_iterator iter;
5065      copy_body_data id;
5066      tree param, arg, t;
5067
5068      /* Remap the parameters.  */
5069      for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5070	   param;
5071	   param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
5072	*pointer_map_insert (decl_map, param) = arg;
5073
5074      memset (&id, 0, sizeof (id));
5075      id.src_fn = fn;
5076      id.dst_fn = current_function_decl;
5077      id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5078      id.decl_map = decl_map;
5079
5080      id.copy_decl = copy_decl_no_change;
5081      id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5082      id.transform_new_cfg = false;
5083      id.transform_return_to_modify = true;
5084      id.transform_lang_insert_block = false;
5085
5086      /* Make sure not to unshare trees behind the front-end's back
5087	 since front-end specific mechanisms may rely on sharing.  */
5088      id.regimplify = false;
5089      id.do_not_unshare = true;
5090
5091      /* We're not inside any EH region.  */
5092      id.eh_lp_nr = 0;
5093
5094      t = copy_tree_body (&id);
5095      pointer_map_destroy (decl_map);
5096
5097      /* We can only return something suitable for use in a GENERIC
5098	 expression tree.  */
5099      if (TREE_CODE (t) == MODIFY_EXPR)
5100	return TREE_OPERAND (t, 1);
5101    }
5102
5103   return NULL_TREE;
5104}
5105
5106/* Duplicate a type, fields and all.  */
5107
5108tree
5109build_duplicate_type (tree type)
5110{
5111  struct copy_body_data id;
5112
5113  memset (&id, 0, sizeof (id));
5114  id.src_fn = current_function_decl;
5115  id.dst_fn = current_function_decl;
5116  id.src_cfun = cfun;
5117  id.decl_map = pointer_map_create ();
5118  id.debug_map = NULL;
5119  id.copy_decl = copy_decl_no_change;
5120
5121  type = remap_type_1 (type, &id);
5122
5123  pointer_map_destroy (id.decl_map);
5124  if (id.debug_map)
5125    pointer_map_destroy (id.debug_map);
5126
5127  TYPE_CANONICAL (type) = type;
5128
5129  return type;
5130}
5131
5132/* Return whether it is safe to inline a function because it used different
5133   target specific options or call site actual types mismatch parameter types.
5134   E is the call edge to be checked.  */
5135bool
5136tree_can_inline_p (struct cgraph_edge *e)
5137{
5138#if 0
5139  /* This causes a regression in SPEC in that it prevents a cold function from
5140     inlining a hot function.  Perhaps this should only apply to functions
5141     that the user declares hot/cold/optimize explicitly.  */
5142
5143  /* Don't inline a function with a higher optimization level than the
5144     caller, or with different space constraints (hot/cold functions).  */
5145  tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5146  tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5147
5148  if (caller_tree != callee_tree)
5149    {
5150      struct cl_optimization *caller_opt
5151	= TREE_OPTIMIZATION ((caller_tree)
5152			     ? caller_tree
5153			     : optimization_default_node);
5154
5155      struct cl_optimization *callee_opt
5156	= TREE_OPTIMIZATION ((callee_tree)
5157			     ? callee_tree
5158			     : optimization_default_node);
5159
5160      if ((caller_opt->optimize > callee_opt->optimize)
5161	  || (caller_opt->optimize_size != callee_opt->optimize_size))
5162	return false;
5163    }
5164#endif
5165  tree caller, callee, lhs;
5166
5167  caller = e->caller->decl;
5168  callee = e->callee->decl;
5169
5170  /* We cannot inline a function that uses a different EH personality
5171     than the caller.  */
5172  if (DECL_FUNCTION_PERSONALITY (caller)
5173      && DECL_FUNCTION_PERSONALITY (callee)
5174      && (DECL_FUNCTION_PERSONALITY (caller)
5175	  != DECL_FUNCTION_PERSONALITY (callee)))
5176    {
5177      e->inline_failed = CIF_UNSPECIFIED;
5178      gimple_call_set_cannot_inline (e->call_stmt, true);
5179      return false;
5180    }
5181
5182  /* Allow the backend to decide if inlining is ok.  */
5183  if (!targetm.target_option.can_inline_p (caller, callee))
5184    {
5185      e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5186      gimple_call_set_cannot_inline (e->call_stmt, true);
5187      e->call_stmt_cannot_inline_p = true;
5188      return false;
5189    }
5190
5191  /* Do not inline calls where we cannot triviall work around mismatches
5192     in argument or return types.  */
5193  if (e->call_stmt
5194      && ((DECL_RESULT (callee)
5195	   && !DECL_BY_REFERENCE (DECL_RESULT (callee))
5196	   && (lhs = gimple_call_lhs (e->call_stmt)) != NULL_TREE
5197	   && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
5198					  TREE_TYPE (lhs))
5199	   && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
5200	  || !gimple_check_call_args (e->call_stmt)))
5201    {
5202      e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5203      gimple_call_set_cannot_inline (e->call_stmt, true);
5204      e->call_stmt_cannot_inline_p = true;
5205      return false;
5206    }
5207
5208  return true;
5209}
5210