1/* Tree inlining.
2   Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3   Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to
19the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20Boston, MA 02110-1301, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "toplev.h"
27#include "tree.h"
28#include "tree-inline.h"
29#include "rtl.h"
30#include "expr.h"
31#include "flags.h"
32#include "params.h"
33#include "input.h"
34#include "insn-config.h"
35#include "varray.h"
36#include "hashtab.h"
37#include "splay-tree.h"
38#include "langhooks.h"
39#include "basic-block.h"
40#include "tree-iterator.h"
41#include "cgraph.h"
42#include "intl.h"
43#include "tree-mudflap.h"
44#include "tree-flow.h"
45#include "function.h"
46#include "ggc.h"
47#include "tree-flow.h"
48#include "diagnostic.h"
49#include "except.h"
50#include "debug.h"
51#include "pointer-set.h"
52#include "ipa-prop.h"
53
54/* I'm not real happy about this, but we need to handle gimple and
55   non-gimple trees.  */
56#include "tree-gimple.h"
57
58/* Inlining, Saving, Cloning
59
60   Inlining: a function body is duplicated, but the PARM_DECLs are
61   remapped into VAR_DECLs, and non-void RETURN_EXPRs become
62   MODIFY_EXPRs that store to a dedicated returned-value variable.
63   The duplicated eh_region info of the copy will later be appended
64   to the info for the caller; the eh_region info in copied throwing
65   statements and RESX_EXPRs is adjusted accordingly.
66
67   Saving: make a semantically-identical copy of the function body.
68   Necessary when we want to generate code for the body (a destructive
69   operation), but we expect to need this body in the future (e.g. for
70   inlining into another function).
71
72   Cloning: (only in C++) We have one body for a con/de/structor, and
73   multiple function decls, each with a unique parameter list.
74   Duplicate the body, using the given splay tree; some parameters
75   will become constants (like 0 or 1).
76
77   All of these will simultaneously lookup any callgraph edges.  If
78   we're going to inline the duplicated function body, and the given
79   function has some cloned callgraph nodes (one for each place this
80   function will be inlined) those callgraph edges will be duplicated.
81   If we're saving or cloning the body, those callgraph edges will be
82   updated to point into the new body.  (Note that the original
83   callgraph node and edge list will not be altered.)
84
85   See the CALL_EXPR handling case in copy_body_r ().  */
86
87/* 0 if we should not perform inlining.
88   1 if we should expand functions calls inline at the tree level.
89   2 if we should consider *all* functions to be inline
90   candidates.  */
91
92int flag_inline_trees = 0;
93
94/* To Do:
95
96   o In order to make inlining-on-trees work, we pessimized
97     function-local static constants.  In particular, they are now
98     always output, even when not addressed.  Fix this by treating
99     function-local static constants just like global static
100     constants; the back-end already knows not to output them if they
101     are not needed.
102
103   o Provide heuristics to clamp inlining of recursive template
104     calls?  */
105
106/* Data required for function inlining.  */
107
108typedef struct inline_data
109{
110  /* FUNCTION_DECL for function being inlined.  */
111  tree callee;
112  /* FUNCTION_DECL for function being inlined into.  */
113  tree caller;
114  /* struct function for function being inlined.  Usually this is the same
115     as DECL_STRUCT_FUNCTION (callee), but can be different if saved_cfg
116     and saved_eh are in use.  */
117  struct function *callee_cfun;
118  /* The VAR_DECL for the return value.  */
119  tree retvar;
120  /* The map from local declarations in the inlined function to
121     equivalents in the function into which it is being inlined.  */
122  splay_tree decl_map;
123  /* We use the same mechanism to build clones that we do to perform
124     inlining.  However, there are a few places where we need to
125     distinguish between those two situations.  This flag is true if
126     we are cloning, rather than inlining.  */
127  bool cloning_p;
128  /* Similarly for saving function body.  */
129  bool saving_p;
130  /* Versioning function is slightly different from inlining. */
131  bool versioning_p;
132  /* Callgraph node of function we are inlining into.  */
133  struct cgraph_node *node;
134  /* Callgraph node of currently inlined function.  */
135  struct cgraph_node *current_node;
136  /* Current BLOCK.  */
137  tree block;
138  varray_type ipa_info;
139  /* Exception region the inlined call lie in.  */
140  int eh_region;
141  /* Take region number in the function being copied, add this value and
142     get eh region number of the duplicate in the function we inline into.  */
143  int eh_region_offset;
144} inline_data;
145
146/* Prototypes.  */
147
148static tree declare_return_variable (inline_data *, tree, tree, tree *);
149static tree copy_body_r (tree *, int *, void *);
150static tree copy_generic_body (inline_data *);
151static bool inlinable_function_p (tree);
152static tree remap_decl (tree, inline_data *);
153static tree remap_type (tree, inline_data *);
154static void remap_block (tree *, inline_data *);
155static tree remap_decl (tree, inline_data *);
156static tree remap_decls (tree, inline_data *);
157static void copy_bind_expr (tree *, int *, inline_data *);
158static tree mark_local_for_remap_r (tree *, int *, void *);
159static void unsave_expr_1 (tree);
160static tree unsave_r (tree *, int *, void *);
161static void declare_inline_vars (tree, tree);
162static void remap_save_expr (tree *, void *, int *);
163static bool replace_ref_tree (inline_data *, tree *);
164static inline bool inlining_p (inline_data *);
165static void add_lexical_block (tree current_block, tree new_block);
166
167/* Insert a tree->tree mapping for ID.  Despite the name suggests
168   that the trees should be variables, it is used for more than that.  */
169
170static void
171insert_decl_map (inline_data *id, tree key, tree value)
172{
173  splay_tree_insert (id->decl_map, (splay_tree_key) key,
174		     (splay_tree_value) value);
175
176  /* Always insert an identity map as well.  If we see this same new
177     node again, we won't want to duplicate it a second time.  */
178  if (key != value)
179    splay_tree_insert (id->decl_map, (splay_tree_key) value,
180		       (splay_tree_value) value);
181}
182
183/* Remap DECL during the copying of the BLOCK tree for the function.  */
184
185static tree
186remap_decl (tree decl, inline_data *id)
187{
188  splay_tree_node n;
189  tree fn;
190
191  /* We only remap local variables in the current function.  */
192  fn = id->callee;
193
194  /* See if we have remapped this declaration.  */
195
196  n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
197
198  /* If we didn't already have an equivalent for this declaration,
199     create one now.  */
200  if (!n)
201    {
202      /* Make a copy of the variable or label.  */
203      tree t;
204      t = copy_decl_for_dup (decl, fn, id->caller, id->versioning_p);
205
206      /* Remember it, so that if we encounter this local entity again
207	 we can reuse this copy.  Do this early because remap_type may
208	 need this decl for TYPE_STUB_DECL.  */
209      insert_decl_map (id, decl, t);
210
211      /* Remap types, if necessary.  */
212      TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
213      if (TREE_CODE (t) == TYPE_DECL)
214        DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
215
216      /* Remap sizes as necessary.  */
217      walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
218      walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
219
220      /* If fields, do likewise for offset and qualifier.  */
221      if (TREE_CODE (t) == FIELD_DECL)
222	{
223	  walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
224	  if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
225	    walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
226	}
227
228#if 0
229      /* FIXME handle anon aggrs.  */
230      if (! DECL_NAME (t) && TREE_TYPE (t)
231	  && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
232	{
233	  /* For a VAR_DECL of anonymous type, we must also copy the
234	     member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS.  */
235	  tree members = NULL;
236	  tree src;
237
238	  for (src = DECL_ANON_UNION_ELEMS (t); src;
239	       src = TREE_CHAIN (src))
240	    {
241	      tree member = remap_decl (TREE_VALUE (src), id);
242
243	      gcc_assert (!TREE_PURPOSE (src));
244	      members = tree_cons (NULL, member, members);
245	    }
246	  DECL_ANON_UNION_ELEMS (t) = nreverse (members);
247	}
248#endif
249
250      /* Remember it, so that if we encounter this local entity
251	 again we can reuse this copy.  */
252      insert_decl_map (id, decl, t);
253      return t;
254    }
255
256  return unshare_expr ((tree) n->value);
257}
258
259static tree
260remap_type_1 (tree type, inline_data *id)
261{
262  tree new, t;
263
264  /* We do need a copy.  build and register it now.  If this is a pointer or
265     reference type, remap the designated type and make a new pointer or
266     reference type.  */
267  if (TREE_CODE (type) == POINTER_TYPE)
268    {
269      new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
270					 TYPE_MODE (type),
271					 TYPE_REF_CAN_ALIAS_ALL (type));
272      insert_decl_map (id, type, new);
273      return new;
274    }
275  else if (TREE_CODE (type) == REFERENCE_TYPE)
276    {
277      new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
278					    TYPE_MODE (type),
279					    TYPE_REF_CAN_ALIAS_ALL (type));
280      insert_decl_map (id, type, new);
281      return new;
282    }
283  else
284    new = copy_node (type);
285
286  insert_decl_map (id, type, new);
287
288  /* This is a new type, not a copy of an old type.  Need to reassociate
289     variants.  We can handle everything except the main variant lazily.  */
290  t = TYPE_MAIN_VARIANT (type);
291  if (type != t)
292    {
293      t = remap_type (t, id);
294      TYPE_MAIN_VARIANT (new) = t;
295      TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
296      TYPE_NEXT_VARIANT (t) = new;
297    }
298  else
299    {
300      TYPE_MAIN_VARIANT (new) = new;
301      TYPE_NEXT_VARIANT (new) = NULL;
302    }
303
304  if (TYPE_STUB_DECL (type))
305    TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
306
307  /* Lazily create pointer and reference types.  */
308  TYPE_POINTER_TO (new) = NULL;
309  TYPE_REFERENCE_TO (new) = NULL;
310
311  switch (TREE_CODE (new))
312    {
313    case INTEGER_TYPE:
314    case REAL_TYPE:
315    case ENUMERAL_TYPE:
316    case BOOLEAN_TYPE:
317    case CHAR_TYPE:
318      t = TYPE_MIN_VALUE (new);
319      if (t && TREE_CODE (t) != INTEGER_CST)
320        walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
321
322      t = TYPE_MAX_VALUE (new);
323      if (t && TREE_CODE (t) != INTEGER_CST)
324        walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
325      return new;
326
327    case FUNCTION_TYPE:
328      TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
329      walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
330      return new;
331
332    case ARRAY_TYPE:
333      TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
334      TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
335      break;
336
337    case RECORD_TYPE:
338    case UNION_TYPE:
339    case QUAL_UNION_TYPE:
340      {
341	tree f, nf = NULL;
342
343	for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
344	  {
345	    t = remap_decl (f, id);
346	    DECL_CONTEXT (t) = new;
347	    TREE_CHAIN (t) = nf;
348	    nf = t;
349	  }
350	TYPE_FIELDS (new) = nreverse (nf);
351      }
352      break;
353
354    case OFFSET_TYPE:
355    default:
356      /* Shouldn't have been thought variable sized.  */
357      gcc_unreachable ();
358    }
359
360  walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
361  walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
362
363  return new;
364}
365
366static tree
367remap_type (tree type, inline_data *id)
368{
369  splay_tree_node node;
370
371  if (type == NULL)
372    return type;
373
374  /* See if we have remapped this type.  */
375  node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
376  if (node)
377    return (tree) node->value;
378
379  /* The type only needs remapping if it's variably modified.  */
380  if (! variably_modified_type_p (type, id->callee))
381    {
382      insert_decl_map (id, type, type);
383      return type;
384    }
385
386  return remap_type_1 (type, id);
387}
388
389static tree
390remap_decls (tree decls, inline_data *id)
391{
392  tree old_var;
393  tree new_decls = NULL_TREE;
394
395  /* Remap its variables.  */
396  for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
397    {
398      tree new_var;
399
400      /* We can not chain the local static declarations into the unexpanded_var_list
401         as we can't duplicate them or break one decl rule.  Go ahead and link
402         them into unexpanded_var_list.  */
403      if (!lang_hooks.tree_inlining.auto_var_in_fn_p (old_var, id->callee)
404	  && !DECL_EXTERNAL (old_var))
405	{
406	  cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
407						 cfun->unexpanded_var_list);
408	  continue;
409	}
410
411      /* Remap the variable.  */
412      new_var = remap_decl (old_var, id);
413
414      /* If we didn't remap this variable, so we can't mess with its
415	 TREE_CHAIN.  If we remapped this variable to the return slot, it's
416	 already declared somewhere else, so don't declare it here.  */
417      if (!new_var || new_var == id->retvar)
418	;
419      else
420	{
421	  gcc_assert (DECL_P (new_var));
422	  TREE_CHAIN (new_var) = new_decls;
423	  new_decls = new_var;
424	}
425    }
426
427  return nreverse (new_decls);
428}
429
430/* Copy the BLOCK to contain remapped versions of the variables
431   therein.  And hook the new block into the block-tree.  */
432
433static void
434remap_block (tree *block, inline_data *id)
435{
436  tree old_block;
437  tree new_block;
438  tree fn;
439
440  /* Make the new block.  */
441  old_block = *block;
442  new_block = make_node (BLOCK);
443  TREE_USED (new_block) = TREE_USED (old_block);
444  BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
445  BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
446  *block = new_block;
447
448  /* Remap its variables.  */
449  BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
450
451  fn = id->caller;
452  if (id->cloning_p)
453    /* We're building a clone; DECL_INITIAL is still
454       error_mark_node, and current_binding_level is the parm
455       binding level.  */
456    lang_hooks.decls.insert_block (new_block);
457  /* Remember the remapped block.  */
458  insert_decl_map (id, old_block, new_block);
459}
460
461/* Copy the whole block tree and root it in id->block.  */
462static tree
463remap_blocks (tree block, inline_data *id)
464{
465  tree t;
466  tree new = block;
467
468  if (!block)
469    return NULL;
470
471  remap_block (&new, id);
472  gcc_assert (new != block);
473  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
474    add_lexical_block (new, remap_blocks (t, id));
475  return new;
476}
477
478static void
479copy_statement_list (tree *tp)
480{
481  tree_stmt_iterator oi, ni;
482  tree new;
483
484  new = alloc_stmt_list ();
485  ni = tsi_start (new);
486  oi = tsi_start (*tp);
487  *tp = new;
488
489  for (; !tsi_end_p (oi); tsi_next (&oi))
490    tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
491}
492
493static void
494copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
495{
496  tree block = BIND_EXPR_BLOCK (*tp);
497  /* Copy (and replace) the statement.  */
498  copy_tree_r (tp, walk_subtrees, NULL);
499  if (block)
500    {
501      remap_block (&block, id);
502      BIND_EXPR_BLOCK (*tp) = block;
503    }
504
505  if (BIND_EXPR_VARS (*tp))
506    /* This will remap a lot of the same decls again, but this should be
507       harmless.  */
508    BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
509}
510
511/* Called from copy_body_id via walk_tree.  DATA is really an
512   `inline_data *'.  */
513
514static tree
515copy_body_r (tree *tp, int *walk_subtrees, void *data)
516{
517  inline_data *id = (inline_data *) data;
518  tree fn = id->callee;
519  tree new_block;
520
521  /* Begin by recognizing trees that we'll completely rewrite for the
522     inlining context.  Our output for these trees is completely
523     different from out input (e.g. RETURN_EXPR is deleted, and morphs
524     into an edge).  Further down, we'll handle trees that get
525     duplicated and/or tweaked.  */
526
527  /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
528     GOTO_STMT with the RET_LABEL as its target.  */
529  if (TREE_CODE (*tp) == RETURN_EXPR && inlining_p (id))
530    {
531      tree assignment = TREE_OPERAND (*tp, 0);
532
533      /* If we're returning something, just turn that into an
534	 assignment into the equivalent of the original RESULT_DECL.
535	 If the "assignment" is just the result decl, the result
536	 decl has already been set (e.g. a recent "foo (&result_decl,
537	 ...)"); just toss the entire RETURN_EXPR.  */
538      if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
539	{
540	  /* Replace the RETURN_EXPR with (a copy of) the
541	     MODIFY_EXPR hanging underneath.  */
542	  *tp = copy_node (assignment);
543	}
544      else /* Else the RETURN_EXPR returns no value.  */
545	{
546	  *tp = NULL;
547	  return (void *)1;
548	}
549    }
550
551  /* Local variables and labels need to be replaced by equivalent
552     variables.  We don't want to copy static variables; there's only
553     one of those, no matter how many times we inline the containing
554     function.  Similarly for globals from an outer function.  */
555  else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
556    {
557      tree new_decl;
558
559      /* Remap the declaration.  */
560      new_decl = remap_decl (*tp, id);
561      gcc_assert (new_decl);
562      /* Replace this variable with the copy.  */
563      STRIP_TYPE_NOPS (new_decl);
564      *tp = new_decl;
565      *walk_subtrees = 0;
566    }
567  else if (TREE_CODE (*tp) == STATEMENT_LIST)
568    copy_statement_list (tp);
569  else if (TREE_CODE (*tp) == SAVE_EXPR)
570    remap_save_expr (tp, id->decl_map, walk_subtrees);
571  else if (TREE_CODE (*tp) == LABEL_DECL
572	   && (! DECL_CONTEXT (*tp)
573	       || decl_function_context (*tp) == id->callee))
574    /* These may need to be remapped for EH handling.  */
575    *tp = remap_decl (*tp, id);
576  else if (TREE_CODE (*tp) == BIND_EXPR)
577    copy_bind_expr (tp, walk_subtrees, id);
578  /* Types may need remapping as well.  */
579  else if (TYPE_P (*tp))
580    *tp = remap_type (*tp, id);
581
582  /* If this is a constant, we have to copy the node iff the type will be
583     remapped.  copy_tree_r will not copy a constant.  */
584  else if (CONSTANT_CLASS_P (*tp))
585    {
586      tree new_type = remap_type (TREE_TYPE (*tp), id);
587
588      if (new_type == TREE_TYPE (*tp))
589	*walk_subtrees = 0;
590
591      else if (TREE_CODE (*tp) == INTEGER_CST)
592	*tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
593				  TREE_INT_CST_HIGH (*tp));
594      else
595	{
596	  *tp = copy_node (*tp);
597	  TREE_TYPE (*tp) = new_type;
598	}
599    }
600
601  /* Otherwise, just copy the node.  Note that copy_tree_r already
602     knows not to copy VAR_DECLs, etc., so this is safe.  */
603  else
604    {
605      /* Here we handle trees that are not completely rewritten.
606	 First we detect some inlining-induced bogosities for
607	 discarding.  */
608      if (TREE_CODE (*tp) == MODIFY_EXPR
609	  && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
610	  && (lang_hooks.tree_inlining.auto_var_in_fn_p
611	      (TREE_OPERAND (*tp, 0), fn)))
612	{
613	  /* Some assignments VAR = VAR; don't generate any rtl code
614	     and thus don't count as variable modification.  Avoid
615	     keeping bogosities like 0 = 0.  */
616	  tree decl = TREE_OPERAND (*tp, 0), value;
617	  splay_tree_node n;
618
619	  n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
620	  if (n)
621	    {
622	      value = (tree) n->value;
623	      STRIP_TYPE_NOPS (value);
624	      if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
625		{
626		  *tp = build_empty_stmt ();
627		  return copy_body_r (tp, walk_subtrees, data);
628		}
629	    }
630	}
631      else if (TREE_CODE (*tp) == INDIRECT_REF
632	       && !id->versioning_p)
633	{
634	  /* Get rid of *& from inline substitutions that can happen when a
635	     pointer argument is an ADDR_EXPR.  */
636	  tree decl = TREE_OPERAND (*tp, 0);
637	  splay_tree_node n;
638
639	  n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
640	  if (n)
641	    {
642	      tree new;
643	      tree old;
644	      /* If we happen to get an ADDR_EXPR in n->value, strip
645	         it manually here as we'll eventually get ADDR_EXPRs
646		 which lie about their types pointed to.  In this case
647		 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
648		 but we absolutely rely on that.  As fold_indirect_ref
649	         does other useful transformations, try that first, though.  */
650	      tree type = TREE_TYPE (TREE_TYPE ((tree)n->value));
651	      new = unshare_expr ((tree)n->value);
652	      old = *tp;
653	      *tp = fold_indirect_ref_1 (type, new);
654	      if (! *tp)
655	        {
656		  if (TREE_CODE (new) == ADDR_EXPR)
657		    *tp = TREE_OPERAND (new, 0);
658	          else
659		    {
660	              *tp = build1 (INDIRECT_REF, type, new);
661		      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
662		    }
663		}
664	      *walk_subtrees = 0;
665	      return NULL;
666	    }
667	}
668
669      /* Here is the "usual case".  Copy this tree node, and then
670	 tweak some special cases.  */
671      copy_tree_r (tp, walk_subtrees, id->versioning_p ? data : NULL);
672
673      /* If EXPR has block defined, map it to newly constructed block.
674         When inlining we want EXPRs without block appear in the block
675	 of function call.  */
676      if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp))))
677	{
678	  new_block = id->block;
679	  if (TREE_BLOCK (*tp))
680	    {
681	      splay_tree_node n;
682	      n = splay_tree_lookup (id->decl_map,
683				     (splay_tree_key) TREE_BLOCK (*tp));
684	      gcc_assert (n);
685	      new_block = (tree) n->value;
686	    }
687	  TREE_BLOCK (*tp) = new_block;
688	}
689
690      if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
691	TREE_OPERAND (*tp, 0) =
692	  build_int_cst
693	    (NULL_TREE,
694	     id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
695
696      TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
697
698      /* The copied TARGET_EXPR has never been expanded, even if the
699	 original node was expanded already.  */
700      if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
701	{
702	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
703	  TREE_OPERAND (*tp, 3) = NULL_TREE;
704	}
705
706      /* Variable substitution need not be simple.  In particular, the
707	 INDIRECT_REF substitution above.  Make sure that TREE_CONSTANT
708	 and friends are up-to-date.  */
709      else if (TREE_CODE (*tp) == ADDR_EXPR)
710	{
711	  walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
712	  /* Handle the case where we substituted an INDIRECT_REF
713	     into the operand of the ADDR_EXPR.  */
714	  if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
715	    *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
716	  else
717	    {
718	      recompute_tree_invarant_for_addr_expr (*tp);
719	      if (DECL_P (TREE_OPERAND (*tp, 0)))
720	        TREE_ADDRESSABLE (TREE_OPERAND (*tp, 0)) = 1;
721	    }
722	  *walk_subtrees = 0;
723	}
724    }
725
726  /* Keep iterating.  */
727  return NULL_TREE;
728}
729
730/* Copy basic block, scale profile accordingly.  Edges will be taken care of
731   later  */
732
733static basic_block
734copy_bb (inline_data *id, basic_block bb, int frequency_scale, int count_scale)
735{
736  block_stmt_iterator bsi, copy_bsi;
737  basic_block copy_basic_block;
738
739  /* create_basic_block() will append every new block to
740     basic_block_info automatically.  */
741  copy_basic_block = create_basic_block (NULL, (void *) 0, bb->prev_bb->aux);
742  copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
743  copy_basic_block->frequency = (bb->frequency
744				     * frequency_scale / REG_BR_PROB_BASE);
745  copy_bsi = bsi_start (copy_basic_block);
746
747  for (bsi = bsi_start (bb);
748       !bsi_end_p (bsi); bsi_next (&bsi))
749    {
750      tree stmt = bsi_stmt (bsi);
751      tree orig_stmt = stmt;
752
753      walk_tree (&stmt, copy_body_r, id, NULL);
754
755      /* RETURN_EXPR might be removed,
756         this is signalled by making stmt pointer NULL.  */
757      if (stmt)
758	{
759	  tree call, decl;
760
761	  /* With return slot optimization we can end up with
762	     non-gimple (foo *)&this->m, fix that here.  */
763	  if (TREE_CODE (stmt) == MODIFY_EXPR
764	      && TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
765	      && !is_gimple_val (TREE_OPERAND (TREE_OPERAND (stmt, 1), 0)))
766	    gimplify_stmt (&stmt);
767
768          bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
769	  call = get_call_expr_in (stmt);
770	  /* We're duplicating a CALL_EXPR.  Find any corresponding
771	     callgraph edges and update or duplicate them.  */
772	  if (call && (decl = get_callee_fndecl (call)))
773	    {
774	      if (id->saving_p)
775		{
776		  struct cgraph_node *node;
777		  struct cgraph_edge *edge;
778
779		  /* We're saving a copy of the body, so we'll update the
780		     callgraph nodes in place.  Note that we avoid
781		     altering the original callgraph node; we begin with
782		     the first clone.  */
783		  for (node = id->node->next_clone;
784		       node;
785		       node = node->next_clone)
786		    {
787		      edge = cgraph_edge (node, orig_stmt);
788		      gcc_assert (edge);
789		      edge->call_stmt = stmt;
790		    }
791		}
792	      else
793		{
794		  struct cgraph_edge *edge;
795
796		  /* We're cloning or inlining this body; duplicate the
797		     associate callgraph nodes.  */
798		  if (!id->versioning_p)
799		    {
800		      edge = cgraph_edge (id->current_node, orig_stmt);
801		      if (edge)
802			cgraph_clone_edge (edge, id->node, stmt,
803					   REG_BR_PROB_BASE, 1, true);
804		    }
805		}
806	      if (id->versioning_p)
807		{
808		  /* Update the call_expr on the edges from the new version
809		     to its callees. */
810		  struct cgraph_edge *edge;
811		  edge = cgraph_edge (id->node, orig_stmt);
812		  if (edge)
813		    edge->call_stmt = stmt;
814		}
815	    }
816	  /* If you think we can abort here, you are wrong.
817	     There is no region 0 in tree land.  */
818	  gcc_assert (lookup_stmt_eh_region_fn (id->callee_cfun, orig_stmt)
819		      != 0);
820
821	  if (tree_could_throw_p (stmt))
822	    {
823	      int region = lookup_stmt_eh_region_fn (id->callee_cfun, orig_stmt);
824	      /* Add an entry for the copied tree in the EH hashtable.
825		 When saving or cloning or versioning, use the hashtable in
826		 cfun, and just copy the EH number.  When inlining, use the
827		 hashtable in the caller, and adjust the region number.  */
828	      if (region > 0)
829		add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
830
831	      /* If this tree doesn't have a region associated with it,
832		 and there is a "current region,"
833		 then associate this tree with the current region
834		 and add edges associated with this region.  */
835	      if ((lookup_stmt_eh_region_fn (id->callee_cfun,
836					     orig_stmt) <= 0
837		   && id->eh_region > 0)
838		  && tree_could_throw_p (stmt))
839		add_stmt_to_eh_region (stmt, id->eh_region);
840	    }
841	}
842    }
843  return copy_basic_block;
844}
845
846/* Copy edges from BB into its copy constructed earlier, scale profile
847   accordingly.  Edges will be taken care of later.  Assume aux
848   pointers to point to the copies of each BB.  */
849static void
850copy_edges_for_bb (basic_block bb, int count_scale)
851{
852  basic_block new_bb = bb->aux;
853  edge_iterator ei;
854  edge old_edge;
855  block_stmt_iterator bsi;
856  int flags;
857
858  /* Use the indices from the original blocks to create edges for the
859     new ones.  */
860  FOR_EACH_EDGE (old_edge, ei, bb->succs)
861    if (!(old_edge->flags & EDGE_EH))
862      {
863	edge new;
864
865	flags = old_edge->flags;
866
867	/* Return edges do get a FALLTHRU flag when the get inlined.  */
868	if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
869	    && old_edge->dest->aux != EXIT_BLOCK_PTR)
870	  flags |= EDGE_FALLTHRU;
871	new = make_edge (new_bb, old_edge->dest->aux, flags);
872	new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
873	new->probability = old_edge->probability;
874      }
875
876  if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
877    return;
878
879  for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
880    {
881      tree copy_stmt;
882
883      copy_stmt = bsi_stmt (bsi);
884      update_stmt (copy_stmt);
885      /* Do this before the possible split_block.  */
886      bsi_next (&bsi);
887
888      /* If this tree could throw an exception, there are two
889         cases where we need to add abnormal edge(s): the
890         tree wasn't in a region and there is a "current
891         region" in the caller; or the original tree had
892         EH edges.  In both cases split the block after the tree,
893         and add abnormal edge(s) as needed; we need both
894         those from the callee and the caller.
895         We check whether the copy can throw, because the const
896         propagation can change an INDIRECT_REF which throws
897         into a COMPONENT_REF which doesn't.  If the copy
898         can throw, the original could also throw.  */
899
900      if (tree_can_throw_internal (copy_stmt))
901	{
902	  if (!bsi_end_p (bsi))
903	    /* Note that bb's predecessor edges aren't necessarily
904	       right at this point; split_block doesn't care.  */
905	    {
906	      edge e = split_block (new_bb, copy_stmt);
907	      new_bb = e->dest;
908	      bsi = bsi_start (new_bb);
909	    }
910
911           make_eh_edges (copy_stmt);
912	}
913    }
914}
915
916/* Wrapper for remap_decl so it can be used as a callback.  */
917static tree
918remap_decl_1 (tree decl, void *data)
919{
920  return remap_decl (decl, data);
921}
922
923/* Make a copy of the body of FN so that it can be inserted inline in
924   another function.  Walks FN via CFG, returns new fndecl.  */
925
926static tree
927copy_cfg_body (inline_data * id, gcov_type count, int frequency,
928	       basic_block entry_block_map, basic_block exit_block_map)
929{
930  tree callee_fndecl = id->callee;
931  /* Original cfun for the callee, doesn't change.  */
932  struct function *callee_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
933  /* Copy, built by this function.  */
934  struct function *new_cfun;
935  /* Place to copy from; when a copy of the function was saved off earlier,
936     use that instead of the main copy.  */
937  struct function *cfun_to_copy =
938    (struct function *) ggc_alloc_cleared (sizeof (struct function));
939  basic_block bb;
940  tree new_fndecl = NULL;
941  bool saving_or_cloning;
942  int count_scale, frequency_scale;
943
944  if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count)
945    count_scale = (REG_BR_PROB_BASE * count
946		   / ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count);
947  else
948    count_scale = 1;
949
950  if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency)
951    frequency_scale = (REG_BR_PROB_BASE * frequency
952		       /
953		       ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency);
954  else
955    frequency_scale = count_scale;
956
957  /* Register specific tree functions.  */
958  tree_register_cfg_hooks ();
959
960  /* Must have a CFG here at this point.  */
961  gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
962	      (DECL_STRUCT_FUNCTION (callee_fndecl)));
963
964  *cfun_to_copy = *DECL_STRUCT_FUNCTION (callee_fndecl);
965
966  /* If there is a saved_cfg+saved_args lurking in the
967     struct function, a copy of the callee body was saved there, and
968     the 'struct cgraph edge' nodes have been fudged to point into the
969     saved body.  Accordingly, we want to copy that saved body so the
970     callgraph edges will be recognized and cloned properly.  */
971  if (cfun_to_copy->saved_cfg)
972    {
973      cfun_to_copy->cfg = cfun_to_copy->saved_cfg;
974      cfun_to_copy->eh = cfun_to_copy->saved_eh;
975    }
976  id->callee_cfun = cfun_to_copy;
977
978  /* If saving or cloning a function body, create new basic_block_info
979     and label_to_block_maps.  Otherwise, we're duplicating a function
980     body for inlining; insert our new blocks and labels into the
981     existing varrays.  */
982  saving_or_cloning = (id->saving_p || id->cloning_p || id->versioning_p);
983  if (saving_or_cloning)
984    {
985      new_cfun =
986	(struct function *) ggc_alloc_cleared (sizeof (struct function));
987      *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
988      new_cfun->cfg = NULL;
989      new_cfun->decl = new_fndecl = copy_node (callee_fndecl);
990      new_cfun->ib_boundaries_block = (varray_type) 0;
991      DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
992      push_cfun (new_cfun);
993      init_empty_tree_cfg ();
994
995      ENTRY_BLOCK_PTR->count =
996	(ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count * count_scale /
997	 REG_BR_PROB_BASE);
998      ENTRY_BLOCK_PTR->frequency =
999	(ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency *
1000	 frequency_scale / REG_BR_PROB_BASE);
1001      EXIT_BLOCK_PTR->count =
1002	(EXIT_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count * count_scale /
1003	 REG_BR_PROB_BASE);
1004      EXIT_BLOCK_PTR->frequency =
1005	(EXIT_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency *
1006	 frequency_scale / REG_BR_PROB_BASE);
1007
1008      entry_block_map = ENTRY_BLOCK_PTR;
1009      exit_block_map = EXIT_BLOCK_PTR;
1010    }
1011
1012  ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
1013  EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
1014
1015
1016  /* Duplicate any exception-handling regions.  */
1017  if (cfun->eh)
1018    {
1019      if (saving_or_cloning)
1020        init_eh_for_function ();
1021      id->eh_region_offset = duplicate_eh_regions (cfun_to_copy,
1022		     				   remap_decl_1,
1023						   id, id->eh_region);
1024      gcc_assert (inlining_p (id) || !id->eh_region_offset);
1025    }
1026  /* Use aux pointers to map the original blocks to copy.  */
1027  FOR_EACH_BB_FN (bb, cfun_to_copy)
1028    bb->aux = copy_bb (id, bb, frequency_scale, count_scale);
1029  /* Now that we've duplicated the blocks, duplicate their edges.  */
1030  FOR_ALL_BB_FN (bb, cfun_to_copy)
1031    copy_edges_for_bb (bb, count_scale);
1032  FOR_ALL_BB_FN (bb, cfun_to_copy)
1033    bb->aux = NULL;
1034
1035  if (saving_or_cloning)
1036    pop_cfun ();
1037
1038  return new_fndecl;
1039}
1040
1041/* Make a copy of the body of FN so that it can be inserted inline in
1042   another function.  */
1043
1044static tree
1045copy_generic_body (inline_data *id)
1046{
1047  tree body;
1048  tree fndecl = id->callee;
1049
1050  body = DECL_SAVED_TREE (fndecl);
1051  walk_tree (&body, copy_body_r, id, NULL);
1052
1053  return body;
1054}
1055
1056static tree
1057copy_body (inline_data *id, gcov_type count, int frequency,
1058	   basic_block entry_block_map, basic_block exit_block_map)
1059{
1060  tree fndecl = id->callee;
1061  tree body;
1062
1063  /* If this body has a CFG, walk CFG and copy.  */
1064  gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
1065  body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
1066
1067  return body;
1068}
1069
1070/* Return true if VALUE is an ADDR_EXPR of an automatic variable
1071   defined in function FN, or of a data member thereof.  */
1072
1073static bool
1074self_inlining_addr_expr (tree value, tree fn)
1075{
1076  tree var;
1077
1078  if (TREE_CODE (value) != ADDR_EXPR)
1079    return false;
1080
1081  var = get_base_address (TREE_OPERAND (value, 0));
1082
1083  return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
1084}
1085
1086static void
1087setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
1088		     basic_block bb, tree *vars)
1089{
1090  tree init_stmt;
1091  tree var;
1092  tree var_sub;
1093
1094  /* If the parameter is never assigned to, we may not need to
1095     create a new variable here at all.  Instead, we may be able
1096     to just use the argument value.  */
1097  if (TREE_READONLY (p)
1098      && !TREE_ADDRESSABLE (p)
1099      && value && !TREE_SIDE_EFFECTS (value))
1100    {
1101      /* We may produce non-gimple trees by adding NOPs or introduce
1102	 invalid sharing when operand is not really constant.
1103	 It is not big deal to prohibit constant propagation here as
1104	 we will constant propagate in DOM1 pass anyway.  */
1105      if (is_gimple_min_invariant (value)
1106	  && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
1107	  /* We have to be very careful about ADDR_EXPR.  Make sure
1108	     the base variable isn't a local variable of the inlined
1109	     function, e.g., when doing recursive inlining, direct or
1110	     mutually-recursive or whatever, which is why we don't
1111	     just test whether fn == current_function_decl.  */
1112	  && ! self_inlining_addr_expr (value, fn))
1113	{
1114	  insert_decl_map (id, p, value);
1115	  return;
1116	}
1117    }
1118
1119  /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
1120     here since the type of this decl must be visible to the calling
1121     function.  */
1122  var = copy_decl_for_dup (p, fn, id->caller, /*versioning=*/false);
1123
1124  /* See if the frontend wants to pass this by invisible reference.  If
1125     so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1126     replace uses of the PARM_DECL with dereferences.  */
1127  if (TREE_TYPE (var) != TREE_TYPE (p)
1128      && POINTER_TYPE_P (TREE_TYPE (var))
1129      && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1130    {
1131      insert_decl_map (id, var, var);
1132      var_sub = build_fold_indirect_ref (var);
1133    }
1134  else
1135    var_sub = var;
1136
1137  /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1138     that way, when the PARM_DECL is encountered, it will be
1139     automatically replaced by the VAR_DECL.  */
1140  insert_decl_map (id, p, var_sub);
1141
1142  /* Declare this new variable.  */
1143  TREE_CHAIN (var) = *vars;
1144  *vars = var;
1145
1146  /* Make gimplifier happy about this variable.  */
1147  DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1148
1149  /* Even if P was TREE_READONLY, the new VAR should not be.
1150     In the original code, we would have constructed a
1151     temporary, and then the function body would have never
1152     changed the value of P.  However, now, we will be
1153     constructing VAR directly.  The constructor body may
1154     change its value multiple times as it is being
1155     constructed.  Therefore, it must not be TREE_READONLY;
1156     the back-end assumes that TREE_READONLY variable is
1157     assigned to only once.  */
1158  if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1159    TREE_READONLY (var) = 0;
1160
1161  /* Initialize this VAR_DECL from the equivalent argument.  Convert
1162     the argument to the proper type in case it was promoted.  */
1163  if (value)
1164    {
1165      tree rhs = fold_convert (TREE_TYPE (var), value);
1166      block_stmt_iterator bsi = bsi_last (bb);
1167
1168      if (rhs == error_mark_node)
1169	return;
1170
1171      STRIP_USELESS_TYPE_CONVERSION (rhs);
1172
1173      /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
1174	 keep our trees in gimple form.  */
1175      init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
1176
1177      /* If we did not create a gimple value and we did not create a gimple
1178	 cast of a gimple value, then we will need to gimplify INIT_STMTS
1179	 at the end.  Note that is_gimple_cast only checks the outer
1180	 tree code, not its operand.  Thus the explicit check that its
1181	 operand is a gimple value.  */
1182      if (!is_gimple_val (rhs)
1183	  && (!is_gimple_cast (rhs)
1184	      || !is_gimple_val (TREE_OPERAND (rhs, 0))))
1185	gimplify_stmt (&init_stmt);
1186
1187      /* If VAR represents a zero-sized variable, it's possible that the
1188	 assignment statment may result in no gimple statements.  */
1189      if (init_stmt)
1190        bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
1191    }
1192}
1193
1194/* Generate code to initialize the parameters of the function at the
1195   top of the stack in ID from the ARGS (presented as a TREE_LIST).  */
1196
1197static void
1198initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
1199			       tree fn, basic_block bb)
1200{
1201  tree parms;
1202  tree a;
1203  tree p;
1204  tree vars = NULL_TREE;
1205  int argnum = 0;
1206
1207  /* Figure out what the parameters are.  */
1208  parms = DECL_ARGUMENTS (fn);
1209  if (fn == current_function_decl)
1210    parms = cfun->saved_args;
1211
1212  /* Loop through the parameter declarations, replacing each with an
1213     equivalent VAR_DECL, appropriately initialized.  */
1214  for (p = parms, a = args; p;
1215       a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
1216    {
1217      tree value;
1218
1219      ++argnum;
1220
1221      /* Find the initializer.  */
1222      value = lang_hooks.tree_inlining.convert_parm_for_inlining
1223	      (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
1224
1225      setup_one_parameter (id, p, value, fn, bb, &vars);
1226    }
1227
1228  /* Initialize the static chain.  */
1229  p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1230  if (fn == current_function_decl)
1231    p = DECL_STRUCT_FUNCTION (fn)->saved_static_chain_decl;
1232  if (p)
1233    {
1234      /* No static chain?  Seems like a bug in tree-nested.c.  */
1235      gcc_assert (static_chain);
1236
1237      setup_one_parameter (id, p, static_chain, fn, bb, &vars);
1238    }
1239
1240  declare_inline_vars (id->block, vars);
1241}
1242
1243/* Declare a return variable to replace the RESULT_DECL for the
1244   function we are calling.  An appropriate DECL_STMT is returned.
1245   The USE_STMT is filled to contain a use of the declaration to
1246   indicate the return value of the function.
1247
1248   RETURN_SLOT_ADDR, if non-null, was a fake parameter that
1249   took the address of the result.  MODIFY_DEST, if non-null, was the LHS of
1250   the MODIFY_EXPR to which this call is the RHS.
1251
1252   The return value is a (possibly null) value that is the result of the
1253   function as seen by the callee.  *USE_P is a (possibly null) value that
1254   holds the result as seen by the caller.  */
1255
1256static tree
1257declare_return_variable (inline_data *id, tree return_slot_addr,
1258			 tree modify_dest, tree *use_p)
1259{
1260  tree callee = id->callee;
1261  tree caller = id->caller;
1262  tree result = DECL_RESULT (callee);
1263  tree callee_type = TREE_TYPE (result);
1264  tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1265  tree var, use;
1266
1267  /* We don't need to do anything for functions that don't return
1268     anything.  */
1269  if (!result || VOID_TYPE_P (callee_type))
1270    {
1271      *use_p = NULL_TREE;
1272      return NULL_TREE;
1273    }
1274
1275  /* If there was a return slot, then the return value is the
1276     dereferenced address of that object.  */
1277  if (return_slot_addr)
1278    {
1279      /* The front end shouldn't have used both return_slot_addr and
1280	 a modify expression.  */
1281      gcc_assert (!modify_dest);
1282      if (DECL_BY_REFERENCE (result))
1283	var = return_slot_addr;
1284      else
1285	var = build_fold_indirect_ref (return_slot_addr);
1286      if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1287	  && !DECL_COMPLEX_GIMPLE_REG_P (result)
1288	  && DECL_P (var))
1289	DECL_COMPLEX_GIMPLE_REG_P (var) = 0;
1290      use = NULL;
1291      goto done;
1292    }
1293
1294  /* All types requiring non-trivial constructors should have been handled.  */
1295  gcc_assert (!TREE_ADDRESSABLE (callee_type));
1296
1297  /* Attempt to avoid creating a new temporary variable.  */
1298  if (modify_dest)
1299    {
1300      bool use_it = false;
1301
1302      /* We can't use MODIFY_DEST if there's type promotion involved.  */
1303      if (!lang_hooks.types_compatible_p (caller_type, callee_type))
1304	use_it = false;
1305
1306      /* ??? If we're assigning to a variable sized type, then we must
1307	 reuse the destination variable, because we've no good way to
1308	 create variable sized temporaries at this point.  */
1309      else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1310	use_it = true;
1311
1312      /* If the callee cannot possibly modify MODIFY_DEST, then we can
1313	 reuse it as the result of the call directly.  Don't do this if
1314	 it would promote MODIFY_DEST to addressable.  */
1315      else if (TREE_ADDRESSABLE (result))
1316	use_it = false;
1317      else
1318	{
1319	  tree base_m = get_base_address (modify_dest);
1320
1321	  /* If the base isn't a decl, then it's a pointer, and we don't
1322	     know where that's going to go.  */
1323	  if (!DECL_P (base_m))
1324	    use_it = false;
1325	  else if (is_global_var (base_m))
1326	    use_it = false;
1327	  else if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1328		   && !DECL_COMPLEX_GIMPLE_REG_P (result)
1329		   && DECL_COMPLEX_GIMPLE_REG_P (base_m))
1330	    use_it = false;
1331	  else if (!TREE_ADDRESSABLE (base_m))
1332	    use_it = true;
1333	}
1334
1335      if (use_it)
1336	{
1337	  var = modify_dest;
1338	  use = NULL;
1339	  goto done;
1340	}
1341    }
1342
1343  gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
1344
1345  var = copy_decl_for_dup (result, callee, caller, /*versioning=*/false);
1346
1347  DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1348  DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1349    = tree_cons (NULL_TREE, var,
1350		 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1351
1352  /* Do not have the rest of GCC warn about this variable as it should
1353     not be visible to the user.  */
1354  TREE_NO_WARNING (var) = 1;
1355
1356  /* Build the use expr.  If the return type of the function was
1357     promoted, convert it back to the expected type.  */
1358  use = var;
1359  if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
1360    use = fold_convert (caller_type, var);
1361
1362  STRIP_USELESS_TYPE_CONVERSION (use);
1363
1364 done:
1365  /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1366     way, when the RESULT_DECL is encountered, it will be
1367     automatically replaced by the VAR_DECL.  */
1368  insert_decl_map (id, result, var);
1369
1370  /* Remember this so we can ignore it in remap_decls.  */
1371  id->retvar = var;
1372
1373  *use_p = use;
1374  return var;
1375}
1376
1377/* Returns nonzero if a function can be inlined as a tree.  */
1378
1379bool
1380tree_inlinable_function_p (tree fn)
1381{
1382  return inlinable_function_p (fn);
1383}
1384
1385static const char *inline_forbidden_reason;
1386
1387static tree
1388inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
1389		      void *fnp)
1390{
1391  tree node = *nodep;
1392  tree fn = (tree) fnp;
1393  tree t;
1394
1395  switch (TREE_CODE (node))
1396    {
1397    case CALL_EXPR:
1398      /* Refuse to inline alloca call unless user explicitly forced so as
1399	 this may change program's memory overhead drastically when the
1400	 function using alloca is called in loop.  In GCC present in
1401	 SPEC2000 inlining into schedule_block cause it to require 2GB of
1402	 RAM instead of 256MB.  */
1403      if (alloca_call_p (node)
1404	  && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1405	{
1406	  inline_forbidden_reason
1407	    = G_("function %q+F can never be inlined because it uses "
1408		 "alloca (override using the always_inline attribute)");
1409	  return node;
1410	}
1411      t = get_callee_fndecl (node);
1412      if (! t)
1413	break;
1414
1415      /* We cannot inline functions that call setjmp.  */
1416      if (setjmp_call_p (t))
1417	{
1418	  inline_forbidden_reason
1419	    = G_("function %q+F can never be inlined because it uses setjmp");
1420	  return node;
1421	}
1422
1423      if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1424	switch (DECL_FUNCTION_CODE (t))
1425	  {
1426	    /* We cannot inline functions that take a variable number of
1427	       arguments.  */
1428	  case BUILT_IN_VA_START:
1429	  case BUILT_IN_STDARG_START:
1430	  case BUILT_IN_NEXT_ARG:
1431	  case BUILT_IN_VA_END:
1432	    inline_forbidden_reason
1433	      = G_("function %q+F can never be inlined because it "
1434		   "uses variable argument lists");
1435	    return node;
1436
1437	  case BUILT_IN_LONGJMP:
1438	    /* We can't inline functions that call __builtin_longjmp at
1439	       all.  The non-local goto machinery really requires the
1440	       destination be in a different function.  If we allow the
1441	       function calling __builtin_longjmp to be inlined into the
1442	       function calling __builtin_setjmp, Things will Go Awry.  */
1443	    inline_forbidden_reason
1444	      = G_("function %q+F can never be inlined because "
1445		   "it uses setjmp-longjmp exception handling");
1446	    return node;
1447
1448	  case BUILT_IN_NONLOCAL_GOTO:
1449	    /* Similarly.  */
1450	    inline_forbidden_reason
1451	      = G_("function %q+F can never be inlined because "
1452		   "it uses non-local goto");
1453	    return node;
1454
1455	  case BUILT_IN_RETURN:
1456	  case BUILT_IN_APPLY_ARGS:
1457	    /* If a __builtin_apply_args caller would be inlined,
1458	       it would be saving arguments of the function it has
1459	       been inlined into.  Similarly __builtin_return would
1460	       return from the function the inline has been inlined into.  */
1461	    inline_forbidden_reason
1462	      = G_("function %q+F can never be inlined because "
1463		   "it uses __builtin_return or __builtin_apply_args");
1464	    return node;
1465
1466	  default:
1467	    break;
1468	  }
1469      break;
1470
1471    case GOTO_EXPR:
1472      t = TREE_OPERAND (node, 0);
1473
1474      /* We will not inline a function which uses computed goto.  The
1475	 addresses of its local labels, which may be tucked into
1476	 global storage, are of course not constant across
1477	 instantiations, which causes unexpected behavior.  */
1478      if (TREE_CODE (t) != LABEL_DECL)
1479	{
1480	  inline_forbidden_reason
1481	    = G_("function %q+F can never be inlined "
1482		 "because it contains a computed goto");
1483	  return node;
1484	}
1485      break;
1486
1487    case LABEL_EXPR:
1488      t = TREE_OPERAND (node, 0);
1489      if (DECL_NONLOCAL (t))
1490	{
1491	  /* We cannot inline a function that receives a non-local goto
1492	     because we cannot remap the destination label used in the
1493	     function that is performing the non-local goto.  */
1494	  inline_forbidden_reason
1495	    = G_("function %q+F can never be inlined "
1496		 "because it receives a non-local goto");
1497	  return node;
1498	}
1499      break;
1500
1501    case RECORD_TYPE:
1502    case UNION_TYPE:
1503      /* We cannot inline a function of the form
1504
1505	   void F (int i) { struct S { int ar[i]; } s; }
1506
1507	 Attempting to do so produces a catch-22.
1508	 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1509	 UNION_TYPE nodes, then it goes into infinite recursion on a
1510	 structure containing a pointer to its own type.  If it doesn't,
1511	 then the type node for S doesn't get adjusted properly when
1512	 F is inlined.
1513
1514	 ??? This is likely no longer true, but it's too late in the 4.0
1515	 cycle to try to find out.  This should be checked for 4.1.  */
1516      for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1517	if (variably_modified_type_p (TREE_TYPE (t), NULL))
1518	  {
1519	    inline_forbidden_reason
1520	      = G_("function %q+F can never be inlined "
1521		   "because it uses variable sized variables");
1522	    return node;
1523	  }
1524
1525    default:
1526      break;
1527    }
1528
1529  return NULL_TREE;
1530}
1531
1532/* Return subexpression representing possible alloca call, if any.  */
1533static tree
1534inline_forbidden_p (tree fndecl)
1535{
1536  location_t saved_loc = input_location;
1537  block_stmt_iterator bsi;
1538  basic_block bb;
1539  tree ret = NULL_TREE;
1540
1541  FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
1542    for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1543      {
1544	ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
1545				    inline_forbidden_p_1, fndecl);
1546	if (ret)
1547	  goto egress;
1548      }
1549
1550egress:
1551  input_location = saved_loc;
1552  return ret;
1553}
1554
1555/* Returns nonzero if FN is a function that does not have any
1556   fundamental inline blocking properties.  */
1557
1558static bool
1559inlinable_function_p (tree fn)
1560{
1561  bool inlinable = true;
1562
1563  /* If we've already decided this function shouldn't be inlined,
1564     there's no need to check again.  */
1565  if (DECL_UNINLINABLE (fn))
1566    return false;
1567
1568  /* See if there is any language-specific reason it cannot be
1569     inlined.  (It is important that this hook be called early because
1570     in C++ it may result in template instantiation.)
1571     If the function is not inlinable for language-specific reasons,
1572     it is left up to the langhook to explain why.  */
1573  inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1574
1575  /* If we don't have the function body available, we can't inline it.
1576     However, this should not be recorded since we also get here for
1577     forward declared inline functions.  Therefore, return at once.  */
1578  if (!DECL_SAVED_TREE (fn))
1579    return false;
1580
1581  /* If we're not inlining at all, then we cannot inline this function.  */
1582  else if (!flag_inline_trees)
1583    inlinable = false;
1584
1585  /* Only try to inline functions if DECL_INLINE is set.  This should be
1586     true for all functions declared `inline', and for all other functions
1587     as well with -finline-functions.
1588
1589     Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1590     it's the front-end that must set DECL_INLINE in this case, because
1591     dwarf2out loses if a function that does not have DECL_INLINE set is
1592     inlined anyway.  That is why we have both DECL_INLINE and
1593     DECL_DECLARED_INLINE_P.  */
1594  /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1595	    here should be redundant.  */
1596  else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1597    inlinable = false;
1598
1599  else if (inline_forbidden_p (fn))
1600    {
1601      /* See if we should warn about uninlinable functions.  Previously,
1602	 some of these warnings would be issued while trying to expand
1603	 the function inline, but that would cause multiple warnings
1604	 about functions that would for example call alloca.  But since
1605	 this a property of the function, just one warning is enough.
1606	 As a bonus we can now give more details about the reason why a
1607	 function is not inlinable.
1608	 We only warn for functions declared `inline' by the user.  */
1609      bool do_warning = (warn_inline
1610			 && DECL_INLINE (fn)
1611			 && DECL_DECLARED_INLINE_P (fn)
1612			 && !DECL_IN_SYSTEM_HEADER (fn));
1613
1614      if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1615	sorry (inline_forbidden_reason, fn);
1616      else if (do_warning)
1617	warning (OPT_Winline, inline_forbidden_reason, fn);
1618
1619      inlinable = false;
1620    }
1621
1622  /* Squirrel away the result so that we don't have to check again.  */
1623  DECL_UNINLINABLE (fn) = !inlinable;
1624
1625  return inlinable;
1626}
1627
1628/* Estimate the cost of a memory move.  Use machine dependent
1629   word size and take possible memcpy call into account.  */
1630
1631int
1632estimate_move_cost (tree type)
1633{
1634  HOST_WIDE_INT size;
1635
1636  size = int_size_in_bytes (type);
1637
1638  if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1639    /* Cost of a memcpy call, 3 arguments and the call.  */
1640    return 4;
1641  else
1642    return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1643}
1644
1645/* Used by estimate_num_insns.  Estimate number of instructions seen
1646   by given statement.  */
1647
1648static tree
1649estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1650{
1651  int *count = data;
1652  tree x = *tp;
1653
1654  if (IS_TYPE_OR_DECL_P (x))
1655    {
1656      *walk_subtrees = 0;
1657      return NULL;
1658    }
1659  /* Assume that constants and references counts nothing.  These should
1660     be majorized by amount of operations among them we count later
1661     and are common target of CSE and similar optimizations.  */
1662  else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1663    return NULL;
1664
1665  switch (TREE_CODE (x))
1666    {
1667    /* Containers have no cost.  */
1668    case TREE_LIST:
1669    case TREE_VEC:
1670    case BLOCK:
1671    case COMPONENT_REF:
1672    case BIT_FIELD_REF:
1673    case INDIRECT_REF:
1674    case ALIGN_INDIRECT_REF:
1675    case MISALIGNED_INDIRECT_REF:
1676    case ARRAY_REF:
1677    case ARRAY_RANGE_REF:
1678    case OBJ_TYPE_REF:
1679    case EXC_PTR_EXPR: /* ??? */
1680    case FILTER_EXPR: /* ??? */
1681    case COMPOUND_EXPR:
1682    case BIND_EXPR:
1683    case WITH_CLEANUP_EXPR:
1684    case NOP_EXPR:
1685    case VIEW_CONVERT_EXPR:
1686    case SAVE_EXPR:
1687    case ADDR_EXPR:
1688    case COMPLEX_EXPR:
1689    case RANGE_EXPR:
1690    case CASE_LABEL_EXPR:
1691    case SSA_NAME:
1692    case CATCH_EXPR:
1693    case EH_FILTER_EXPR:
1694    case STATEMENT_LIST:
1695    case ERROR_MARK:
1696    case NON_LVALUE_EXPR:
1697    case FDESC_EXPR:
1698    case VA_ARG_EXPR:
1699    case TRY_CATCH_EXPR:
1700    case TRY_FINALLY_EXPR:
1701    case LABEL_EXPR:
1702    case GOTO_EXPR:
1703    case RETURN_EXPR:
1704    case EXIT_EXPR:
1705    case LOOP_EXPR:
1706    case PHI_NODE:
1707    case WITH_SIZE_EXPR:
1708      break;
1709
1710    /* We don't account constants for now.  Assume that the cost is amortized
1711       by operations that do use them.  We may re-consider this decision once
1712       we are able to optimize the tree before estimating its size and break
1713       out static initializers.  */
1714    case IDENTIFIER_NODE:
1715    case INTEGER_CST:
1716    case REAL_CST:
1717    case COMPLEX_CST:
1718    case VECTOR_CST:
1719    case STRING_CST:
1720      *walk_subtrees = 0;
1721      return NULL;
1722
1723    /* Try to estimate the cost of assignments.  We have three cases to
1724       deal with:
1725	1) Simple assignments to registers;
1726	2) Stores to things that must live in memory.  This includes
1727	   "normal" stores to scalars, but also assignments of large
1728	   structures, or constructors of big arrays;
1729	3) TARGET_EXPRs.
1730
1731       Let us look at the first two cases, assuming we have "a = b + C":
1732       <modify_expr <var_decl "a"> <plus_expr <var_decl "b"> <constant C>>
1733       If "a" is a GIMPLE register, the assignment to it is free on almost
1734       any target, because "a" usually ends up in a real register.  Hence
1735       the only cost of this expression comes from the PLUS_EXPR, and we
1736       can ignore the MODIFY_EXPR.
1737       If "a" is not a GIMPLE register, the assignment to "a" will most
1738       likely be a real store, so the cost of the MODIFY_EXPR is the cost
1739       of moving something into "a", which we compute using the function
1740       estimate_move_cost.
1741
1742       The third case deals with TARGET_EXPRs, for which the semantics are
1743       that a temporary is assigned, unless the TARGET_EXPR itself is being
1744       assigned to something else.  In the latter case we do not need the
1745       temporary.  E.g. in <modify_expr <var_decl "a"> <target_expr>>, the
1746       MODIFY_EXPR is free.  */
1747    case INIT_EXPR:
1748    case MODIFY_EXPR:
1749      /* Is the right and side a TARGET_EXPR?  */
1750      if (TREE_CODE (TREE_OPERAND (x, 1)) == TARGET_EXPR)
1751	break;
1752      /* ... fall through ...  */
1753
1754    case TARGET_EXPR:
1755      x = TREE_OPERAND (x, 0);
1756      /* Is this an assignments to a register?  */
1757      if (is_gimple_reg (x))
1758	break;
1759      /* Otherwise it's a store, so fall through to compute the move cost.  */
1760
1761    case CONSTRUCTOR:
1762      *count += estimate_move_cost (TREE_TYPE (x));
1763      break;
1764
1765    /* Assign cost of 1 to usual operations.
1766       ??? We may consider mapping RTL costs to this.  */
1767    case COND_EXPR:
1768    case VEC_COND_EXPR:
1769
1770    case PLUS_EXPR:
1771    case MINUS_EXPR:
1772    case MULT_EXPR:
1773
1774    case FIX_TRUNC_EXPR:
1775    case FIX_CEIL_EXPR:
1776    case FIX_FLOOR_EXPR:
1777    case FIX_ROUND_EXPR:
1778
1779    case NEGATE_EXPR:
1780    case FLOAT_EXPR:
1781    case MIN_EXPR:
1782    case MAX_EXPR:
1783    case ABS_EXPR:
1784
1785    case LSHIFT_EXPR:
1786    case RSHIFT_EXPR:
1787    case LROTATE_EXPR:
1788    case RROTATE_EXPR:
1789    case VEC_LSHIFT_EXPR:
1790    case VEC_RSHIFT_EXPR:
1791
1792    case BIT_IOR_EXPR:
1793    case BIT_XOR_EXPR:
1794    case BIT_AND_EXPR:
1795    case BIT_NOT_EXPR:
1796
1797    case TRUTH_ANDIF_EXPR:
1798    case TRUTH_ORIF_EXPR:
1799    case TRUTH_AND_EXPR:
1800    case TRUTH_OR_EXPR:
1801    case TRUTH_XOR_EXPR:
1802    case TRUTH_NOT_EXPR:
1803
1804    case LT_EXPR:
1805    case LE_EXPR:
1806    case GT_EXPR:
1807    case GE_EXPR:
1808    case EQ_EXPR:
1809    case NE_EXPR:
1810    case ORDERED_EXPR:
1811    case UNORDERED_EXPR:
1812
1813    case UNLT_EXPR:
1814    case UNLE_EXPR:
1815    case UNGT_EXPR:
1816    case UNGE_EXPR:
1817    case UNEQ_EXPR:
1818    case LTGT_EXPR:
1819
1820    case CONVERT_EXPR:
1821
1822    case CONJ_EXPR:
1823
1824    case PREDECREMENT_EXPR:
1825    case PREINCREMENT_EXPR:
1826    case POSTDECREMENT_EXPR:
1827    case POSTINCREMENT_EXPR:
1828
1829    case SWITCH_EXPR:
1830
1831    case ASM_EXPR:
1832
1833    case REALIGN_LOAD_EXPR:
1834
1835    case REDUC_MAX_EXPR:
1836    case REDUC_MIN_EXPR:
1837    case REDUC_PLUS_EXPR:
1838
1839    case RESX_EXPR:
1840      *count += 1;
1841      break;
1842
1843    /* Few special cases of expensive operations.  This is useful
1844       to avoid inlining on functions having too many of these.  */
1845    case TRUNC_DIV_EXPR:
1846    case CEIL_DIV_EXPR:
1847    case FLOOR_DIV_EXPR:
1848    case ROUND_DIV_EXPR:
1849    case EXACT_DIV_EXPR:
1850    case TRUNC_MOD_EXPR:
1851    case CEIL_MOD_EXPR:
1852    case FLOOR_MOD_EXPR:
1853    case ROUND_MOD_EXPR:
1854    case RDIV_EXPR:
1855      *count += 10;
1856      break;
1857    case CALL_EXPR:
1858      {
1859	tree decl = get_callee_fndecl (x);
1860	tree arg;
1861
1862	if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1863	  switch (DECL_FUNCTION_CODE (decl))
1864	    {
1865	    case BUILT_IN_CONSTANT_P:
1866	      *walk_subtrees = 0;
1867	      return NULL_TREE;
1868	    case BUILT_IN_EXPECT:
1869	      return NULL_TREE;
1870	    default:
1871	      break;
1872	    }
1873
1874	/* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
1875	   that does use function declaration to figure out the arguments.  */
1876	if (!decl)
1877	  {
1878	    for (arg = TREE_OPERAND (x, 1); arg; arg = TREE_CHAIN (arg))
1879	      *count += estimate_move_cost (TREE_TYPE (TREE_VALUE (arg)));
1880	  }
1881	else
1882	  {
1883	    for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1884	      *count += estimate_move_cost (TREE_TYPE (arg));
1885	  }
1886
1887	*count += PARAM_VALUE (PARAM_INLINE_CALL_COST);
1888	break;
1889      }
1890    default:
1891      gcc_unreachable ();
1892    }
1893  return NULL;
1894}
1895
1896/* Estimate number of instructions that will be created by expanding EXPR.  */
1897
1898int
1899estimate_num_insns (tree expr)
1900{
1901  int num = 0;
1902  struct pointer_set_t *visited_nodes;
1903  basic_block bb;
1904  block_stmt_iterator bsi;
1905  struct function *my_function;
1906
1907  /* If we're given an entire function, walk the CFG.  */
1908  if (TREE_CODE (expr) == FUNCTION_DECL)
1909    {
1910      my_function = DECL_STRUCT_FUNCTION (expr);
1911      gcc_assert (my_function && my_function->cfg);
1912      visited_nodes = pointer_set_create ();
1913      FOR_EACH_BB_FN (bb, my_function)
1914	{
1915	  for (bsi = bsi_start (bb);
1916	       !bsi_end_p (bsi);
1917	       bsi_next (&bsi))
1918	    {
1919	      walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
1920			 &num, visited_nodes);
1921	    }
1922	}
1923      pointer_set_destroy (visited_nodes);
1924    }
1925  else
1926    walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1927
1928  return num;
1929}
1930
1931typedef struct function *function_p;
1932
1933DEF_VEC_P(function_p);
1934DEF_VEC_ALLOC_P(function_p,heap);
1935
1936/* Initialized with NOGC, making this poisonous to the garbage collector.  */
1937static VEC(function_p,heap) *cfun_stack;
1938
1939void
1940push_cfun (struct function *new_cfun)
1941{
1942  VEC_safe_push (function_p, heap, cfun_stack, cfun);
1943  cfun = new_cfun;
1944}
1945
1946void
1947pop_cfun (void)
1948{
1949  cfun = VEC_pop (function_p, cfun_stack);
1950}
1951
1952/* Install new lexical TREE_BLOCK underneath 'current_block'.  */
1953static void
1954add_lexical_block (tree current_block, tree new_block)
1955{
1956  tree *blk_p;
1957
1958  /* Walk to the last sub-block.  */
1959  for (blk_p = &BLOCK_SUBBLOCKS (current_block);
1960       *blk_p;
1961       blk_p = &TREE_CHAIN (*blk_p))
1962    ;
1963  *blk_p = new_block;
1964  BLOCK_SUPERCONTEXT (new_block) = current_block;
1965}
1966
1967/* If *TP is a CALL_EXPR, replace it with its inline expansion.  */
1968
1969static bool
1970expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
1971{
1972  inline_data *id;
1973  tree t;
1974  tree use_retvar;
1975  tree fn;
1976  splay_tree st;
1977  tree args;
1978  tree return_slot_addr;
1979  tree modify_dest;
1980  location_t saved_location;
1981  struct cgraph_edge *cg_edge;
1982  const char *reason;
1983  basic_block return_block;
1984  edge e;
1985  block_stmt_iterator bsi, stmt_bsi;
1986  bool successfully_inlined = FALSE;
1987  tree t_step;
1988  tree var;
1989  struct cgraph_node *old_node;
1990  tree decl;
1991
1992  /* See what we've got.  */
1993  id = (inline_data *) data;
1994  t = *tp;
1995
1996  /* Set input_location here so we get the right instantiation context
1997     if we call instantiate_decl from inlinable_function_p.  */
1998  saved_location = input_location;
1999  if (EXPR_HAS_LOCATION (t))
2000    input_location = EXPR_LOCATION (t);
2001
2002  /* From here on, we're only interested in CALL_EXPRs.  */
2003  if (TREE_CODE (t) != CALL_EXPR)
2004    goto egress;
2005
2006  /* First, see if we can figure out what function is being called.
2007     If we cannot, then there is no hope of inlining the function.  */
2008  fn = get_callee_fndecl (t);
2009  if (!fn)
2010    goto egress;
2011
2012  /* Turn forward declarations into real ones.  */
2013  fn = cgraph_node (fn)->decl;
2014
2015  /* If fn is a declaration of a function in a nested scope that was
2016     globally declared inline, we don't set its DECL_INITIAL.
2017     However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
2018     C++ front-end uses it for cdtors to refer to their internal
2019     declarations, that are not real functions.  Fortunately those
2020     don't have trees to be saved, so we can tell by checking their
2021     DECL_SAVED_TREE.  */
2022  if (! DECL_INITIAL (fn)
2023      && DECL_ABSTRACT_ORIGIN (fn)
2024      && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
2025    fn = DECL_ABSTRACT_ORIGIN (fn);
2026
2027  /* Objective C and fortran still calls tree_rest_of_compilation directly.
2028     Kill this check once this is fixed.  */
2029  if (!id->current_node->analyzed)
2030    goto egress;
2031
2032  cg_edge = cgraph_edge (id->current_node, stmt);
2033
2034  /* Constant propagation on argument done during previous inlining
2035     may create new direct call.  Produce an edge for it.  */
2036  if (!cg_edge)
2037    {
2038      struct cgraph_node *dest = cgraph_node (fn);
2039
2040      /* We have missing edge in the callgraph.  This can happen in one case
2041         where previous inlining turned indirect call into direct call by
2042         constant propagating arguments.  In all other cases we hit a bug
2043         (incorrect node sharing is most common reason for missing edges.  */
2044      gcc_assert (dest->needed || !flag_unit_at_a_time);
2045      cgraph_create_edge (id->node, dest, stmt,
2046			  bb->count, bb->loop_depth)->inline_failed
2047	= N_("originally indirect function call not considered for inlining");
2048      goto egress;
2049    }
2050
2051  /* Don't try to inline functions that are not well-suited to
2052     inlining.  */
2053  if (!cgraph_inline_p (cg_edge, &reason))
2054    {
2055      if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
2056	  /* Avoid warnings during early inline pass. */
2057	  && (!flag_unit_at_a_time || cgraph_global_info_ready))
2058	{
2059	  sorry ("inlining failed in call to %q+F: %s", fn, reason);
2060	  sorry ("called from here");
2061	}
2062      else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
2063	       && !DECL_IN_SYSTEM_HEADER (fn)
2064	       && strlen (reason)
2065	       && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
2066	       /* Avoid warnings during early inline pass. */
2067	       && (!flag_unit_at_a_time || cgraph_global_info_ready))
2068	{
2069	  warning (OPT_Winline, "inlining failed in call to %q+F: %s",
2070		   fn, reason);
2071	  warning (OPT_Winline, "called from here");
2072	}
2073      goto egress;
2074    }
2075
2076#ifdef ENABLE_CHECKING
2077  if (cg_edge->callee->decl != id->node->decl)
2078    verify_cgraph_node (cg_edge->callee);
2079#endif
2080
2081  /* We will be inlining this callee.  */
2082
2083  id->eh_region = lookup_stmt_eh_region (stmt);
2084
2085  /* Split the block holding the CALL_EXPR.  */
2086
2087  e = split_block (bb, stmt);
2088  bb = e->src;
2089  return_block = e->dest;
2090  remove_edge (e);
2091
2092  /* split_block splits before the statement, work around this by moving
2093     the call into the first half_bb.  Not pretty, but seems easier than
2094     doing the CFG manipulation by hand when the CALL_EXPR is in the last
2095     statement in BB.  */
2096  stmt_bsi = bsi_last (bb);
2097  bsi = bsi_start (return_block);
2098  if (!bsi_end_p (bsi))
2099    bsi_move_before (&stmt_bsi, &bsi);
2100  else
2101    {
2102      tree stmt = bsi_stmt (stmt_bsi);
2103      bsi_remove (&stmt_bsi);
2104      bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2105    }
2106  stmt_bsi = bsi_start (return_block);
2107
2108  /* Build a block containing code to initialize the arguments, the
2109     actual inline expansion of the body, and a label for the return
2110     statements within the function to jump to.  The type of the
2111     statement expression is the return type of the function call.  */
2112  id->block = make_node (BLOCK);
2113  BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
2114  BLOCK_SOURCE_LOCATION (id->block) = input_location;
2115  add_lexical_block (TREE_BLOCK (stmt), id->block);
2116
2117  /* Local declarations will be replaced by their equivalents in this
2118     map.  */
2119  st = id->decl_map;
2120  id->decl_map = splay_tree_new (splay_tree_compare_pointers,
2121				 NULL, NULL);
2122
2123  /* Initialize the parameters.  */
2124  args = TREE_OPERAND (t, 1);
2125
2126  initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2), fn, bb);
2127
2128  /* Record the function we are about to inline.  */
2129  id->callee = fn;
2130
2131  if (DECL_STRUCT_FUNCTION (fn)->saved_blocks)
2132    add_lexical_block (id->block, remap_blocks (DECL_STRUCT_FUNCTION (fn)->saved_blocks, id));
2133  else if (DECL_INITIAL (fn))
2134    add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
2135
2136  /* Return statements in the function body will be replaced by jumps
2137     to the RET_LABEL.  */
2138
2139  gcc_assert (DECL_INITIAL (fn));
2140  gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
2141
2142  /* Find the lhs to which the result of this call is assigned.  */
2143  return_slot_addr = NULL;
2144  if (TREE_CODE (stmt) == MODIFY_EXPR)
2145    {
2146      modify_dest = TREE_OPERAND (stmt, 0);
2147
2148      /* The function which we are inlining might not return a value,
2149	 in which case we should issue a warning that the function
2150	 does not return a value.  In that case the optimizers will
2151	 see that the variable to which the value is assigned was not
2152	 initialized.  We do not want to issue a warning about that
2153	 uninitialized variable.  */
2154      if (DECL_P (modify_dest))
2155	TREE_NO_WARNING (modify_dest) = 1;
2156      if (CALL_EXPR_RETURN_SLOT_OPT (t))
2157	{
2158	  return_slot_addr = build_fold_addr_expr (modify_dest);
2159	  STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2160	  modify_dest = NULL;
2161	}
2162    }
2163  else
2164    modify_dest = NULL;
2165
2166  /* Declare the return variable for the function.  */
2167  decl = declare_return_variable (id, return_slot_addr,
2168			          modify_dest, &use_retvar);
2169  /* Do this only if declare_return_variable created a new one.  */
2170  if (decl && !return_slot_addr && decl != modify_dest)
2171    declare_inline_vars (id->block, decl);
2172
2173  /* After we've initialized the parameters, we insert the body of the
2174     function itself.  */
2175  old_node = id->current_node;
2176
2177  /* Anoint the callee-to-be-duplicated as the "current_node."  When
2178     CALL_EXPRs within callee are duplicated, the edges from callee to
2179     callee's callees (caller's grandchildren) will be cloned.  */
2180  id->current_node = cg_edge->callee;
2181
2182  /* This is it.  Duplicate the callee body.  Assume callee is
2183     pre-gimplified.  Note that we must not alter the caller
2184     function in any way before this point, as this CALL_EXPR may be
2185     a self-referential call; if we're calling ourselves, we need to
2186     duplicate our body before altering anything.  */
2187  copy_body (id, bb->count, bb->frequency, bb, return_block);
2188  id->current_node = old_node;
2189
2190  /* Add local vars in this inlined callee to caller.  */
2191  t_step = id->callee_cfun->unexpanded_var_list;
2192  if (id->callee_cfun->saved_unexpanded_var_list)
2193    t_step = id->callee_cfun->saved_unexpanded_var_list;
2194  for (; t_step; t_step = TREE_CHAIN (t_step))
2195    {
2196      var = TREE_VALUE (t_step);
2197      if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2198	cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2199					       cfun->unexpanded_var_list);
2200      else
2201	cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
2202					       cfun->unexpanded_var_list);
2203    }
2204
2205  /* Clean up.  */
2206  splay_tree_delete (id->decl_map);
2207  id->decl_map = st;
2208
2209  /* If the inlined function returns a result that we care about,
2210     clobber the CALL_EXPR with a reference to the return variable.  */
2211  if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2212    {
2213      *tp = use_retvar;
2214      maybe_clean_or_replace_eh_stmt (stmt, stmt);
2215    }
2216  else
2217    /* We're modifying a TSI owned by gimple_expand_calls_inline();
2218       tsi_delink() will leave the iterator in a sane state.  */
2219    bsi_remove (&stmt_bsi);
2220
2221  bsi_next (&bsi);
2222  if (bsi_end_p (bsi))
2223    tree_purge_dead_eh_edges (return_block);
2224
2225  /* If the value of the new expression is ignored, that's OK.  We
2226     don't warn about this for CALL_EXPRs, so we shouldn't warn about
2227     the equivalent inlined version either.  */
2228  TREE_USED (*tp) = 1;
2229
2230  /* Output the inlining info for this abstract function, since it has been
2231     inlined.  If we don't do this now, we can lose the information about the
2232     variables in the function when the blocks get blown away as soon as we
2233     remove the cgraph node.  */
2234  (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
2235
2236  /* Update callgraph if needed.  */
2237  cgraph_remove_node (cg_edge->callee);
2238
2239  /* Declare the 'auto' variables added with this inlined body.  */
2240  record_vars (BLOCK_VARS (id->block));
2241  id->block = NULL_TREE;
2242  successfully_inlined = TRUE;
2243
2244 egress:
2245  input_location = saved_location;
2246  return successfully_inlined;
2247}
2248
2249/* Expand call statements reachable from STMT_P.
2250   We can only have CALL_EXPRs as the "toplevel" tree code or nested
2251   in a MODIFY_EXPR.  See tree-gimple.c:get_call_expr_in().  We can
2252   unfortunately not use that function here because we need a pointer
2253   to the CALL_EXPR, not the tree itself.  */
2254
2255static bool
2256gimple_expand_calls_inline (basic_block bb, inline_data *id)
2257{
2258  block_stmt_iterator bsi;
2259
2260  /* Register specific tree functions.  */
2261  tree_register_cfg_hooks ();
2262  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2263    {
2264      tree *expr_p = bsi_stmt_ptr (bsi);
2265      tree stmt = *expr_p;
2266
2267      if (TREE_CODE (*expr_p) == MODIFY_EXPR)
2268	expr_p = &TREE_OPERAND (*expr_p, 1);
2269      if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2270	expr_p = &TREE_OPERAND (*expr_p, 0);
2271      if (TREE_CODE (*expr_p) == CALL_EXPR)
2272	if (expand_call_inline (bb, stmt, expr_p, id))
2273	  return true;
2274    }
2275  return false;
2276}
2277
2278/* Expand calls to inline functions in the body of FN.  */
2279
2280void
2281optimize_inline_calls (tree fn)
2282{
2283  inline_data id;
2284  tree prev_fn;
2285  basic_block bb;
2286  /* There is no point in performing inlining if errors have already
2287     occurred -- and we might crash if we try to inline invalid
2288     code.  */
2289  if (errorcount || sorrycount)
2290    return;
2291
2292  /* Clear out ID.  */
2293  memset (&id, 0, sizeof (id));
2294
2295  id.current_node = id.node = cgraph_node (fn);
2296  id.caller = fn;
2297  /* Or any functions that aren't finished yet.  */
2298  prev_fn = NULL_TREE;
2299  if (current_function_decl)
2300    {
2301      id.caller = current_function_decl;
2302      prev_fn = current_function_decl;
2303    }
2304  push_gimplify_context ();
2305
2306  /* Reach the trees by walking over the CFG, and note the
2307     enclosing basic-blocks in the call edges.  */
2308  /* We walk the blocks going forward, because inlined function bodies
2309     will split id->current_basic_block, and the new blocks will
2310     follow it; we'll trudge through them, processing their CALL_EXPRs
2311     along the way.  */
2312  FOR_EACH_BB (bb)
2313    gimple_expand_calls_inline (bb, &id);
2314
2315
2316  pop_gimplify_context (NULL);
2317  /* Renumber the (code) basic_blocks consecutively.  */
2318  compact_blocks ();
2319  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
2320  number_blocks (fn);
2321
2322#ifdef ENABLE_CHECKING
2323    {
2324      struct cgraph_edge *e;
2325
2326      verify_cgraph_node (id.node);
2327
2328      /* Double check that we inlined everything we are supposed to inline.  */
2329      for (e = id.node->callees; e; e = e->next_callee)
2330	gcc_assert (e->inline_failed);
2331    }
2332#endif
2333  /* We need to rescale frequencies again to peak at REG_BR_PROB_BASE
2334     as inlining loops might increase the maximum.  */
2335  if (ENTRY_BLOCK_PTR->count)
2336    counts_to_freqs ();
2337  fold_cond_expr_cond ();
2338}
2339
2340/* FN is a function that has a complete body, and CLONE is a function whose
2341   body is to be set to a copy of FN, mapping argument declarations according
2342   to the ARG_MAP splay_tree.  */
2343
2344void
2345clone_body (tree clone, tree fn, void *arg_map)
2346{
2347  inline_data id;
2348
2349  /* Clone the body, as if we were making an inline call.  But, remap the
2350     parameters in the callee to the parameters of caller.  */
2351  memset (&id, 0, sizeof (id));
2352  id.caller = clone;
2353  id.callee = fn;
2354  id.callee_cfun = DECL_STRUCT_FUNCTION (fn);
2355  id.decl_map = (splay_tree)arg_map;
2356
2357  /* Cloning is treated slightly differently from inlining.  Set
2358     CLONING_P so that it's clear which operation we're performing.  */
2359  id.cloning_p = true;
2360
2361  /* We're not inside any EH region.  */
2362  id.eh_region = -1;
2363
2364  /* Actually copy the body.  */
2365  append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
2366}
2367
2368/* Save duplicate body in FN.  MAP is used to pass around splay tree
2369   used to update arguments in restore_body.  */
2370
2371/* Make and return duplicate of body in FN.  Put copies of DECL_ARGUMENTS
2372   in *arg_copy and of the static chain, if any, in *sc_copy.  */
2373
2374void
2375save_body (tree fn, tree *arg_copy, tree *sc_copy)
2376{
2377  inline_data id;
2378  tree newdecl, *parg;
2379  basic_block fn_entry_block;
2380  tree t_step;
2381
2382  memset (&id, 0, sizeof (id));
2383  id.callee = fn;
2384  id.callee_cfun = DECL_STRUCT_FUNCTION (fn);
2385  id.caller = fn;
2386  id.node = cgraph_node (fn);
2387  id.saving_p = true;
2388  id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2389  *arg_copy = DECL_ARGUMENTS (fn);
2390
2391  for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
2392    {
2393      tree new = copy_node (*parg);
2394
2395      lang_hooks.dup_lang_specific_decl (new);
2396      DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
2397      insert_decl_map (&id, *parg, new);
2398      TREE_CHAIN (new) = TREE_CHAIN (*parg);
2399      *parg = new;
2400    }
2401
2402  *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2403  if (*sc_copy)
2404    {
2405      tree new = copy_node (*sc_copy);
2406
2407      lang_hooks.dup_lang_specific_decl (new);
2408      DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
2409      insert_decl_map (&id, *sc_copy, new);
2410      TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
2411      *sc_copy = new;
2412    }
2413
2414  /* We're not inside any EH region.  */
2415  id.eh_region = -1;
2416
2417  insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
2418
2419  DECL_STRUCT_FUNCTION (fn)->saved_blocks
2420    = remap_blocks (DECL_INITIAL (fn), &id);
2421  for (t_step = id.callee_cfun->unexpanded_var_list;
2422       t_step;
2423       t_step = TREE_CHAIN (t_step))
2424    {
2425      tree var = TREE_VALUE (t_step);
2426      if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2427	cfun->saved_unexpanded_var_list
2428	  = tree_cons (NULL_TREE, var, cfun->saved_unexpanded_var_list);
2429      else
2430	cfun->saved_unexpanded_var_list
2431	  = tree_cons (NULL_TREE, remap_decl (var, &id),
2432		       cfun->saved_unexpanded_var_list);
2433    }
2434
2435  /* Actually copy the body, including a new (struct function *) and CFG.
2436     EH info is also duplicated so its labels point into the copied
2437     CFG, not the original.  */
2438  fn_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fn));
2439  newdecl = copy_body (&id, fn_entry_block->count, fn_entry_block->frequency,
2440		       NULL, NULL);
2441  DECL_STRUCT_FUNCTION (fn)->saved_cfg = DECL_STRUCT_FUNCTION (newdecl)->cfg;
2442  DECL_STRUCT_FUNCTION (fn)->saved_eh = DECL_STRUCT_FUNCTION (newdecl)->eh;
2443
2444  /* Clean up.  */
2445  splay_tree_delete (id.decl_map);
2446}
2447
2448/* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
2449
2450tree
2451copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2452{
2453  enum tree_code code = TREE_CODE (*tp);
2454  inline_data *id = (inline_data *) data;
2455
2456  /* We make copies of most nodes.  */
2457  if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2458      || code == TREE_LIST
2459      || code == TREE_VEC
2460      || code == TYPE_DECL)
2461    {
2462      /* Because the chain gets clobbered when we make a copy, we save it
2463	 here.  */
2464      tree chain = TREE_CHAIN (*tp);
2465      tree new;
2466
2467      if (id && id->versioning_p && replace_ref_tree (id, tp))
2468	{
2469	  *walk_subtrees = 0;
2470	  return NULL_TREE;
2471	}
2472      /* Copy the node.  */
2473      new = copy_node (*tp);
2474
2475      /* Propagate mudflap marked-ness.  */
2476      if (flag_mudflap && mf_marked_p (*tp))
2477        mf_mark (new);
2478
2479      *tp = new;
2480
2481      /* Now, restore the chain, if appropriate.  That will cause
2482	 walk_tree to walk into the chain as well.  */
2483      if (code == PARM_DECL || code == TREE_LIST)
2484	TREE_CHAIN (*tp) = chain;
2485
2486      /* For now, we don't update BLOCKs when we make copies.  So, we
2487	 have to nullify all BIND_EXPRs.  */
2488      if (TREE_CODE (*tp) == BIND_EXPR)
2489	BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2490    }
2491  else if (code == CONSTRUCTOR)
2492    {
2493      /* CONSTRUCTOR nodes need special handling because
2494         we need to duplicate the vector of elements.  */
2495      tree new;
2496
2497      new = copy_node (*tp);
2498
2499      /* Propagate mudflap marked-ness.  */
2500      if (flag_mudflap && mf_marked_p (*tp))
2501        mf_mark (new);
2502
2503      CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
2504					 CONSTRUCTOR_ELTS (*tp));
2505      *tp = new;
2506    }
2507  else if (TREE_CODE_CLASS (code) == tcc_type)
2508    *walk_subtrees = 0;
2509  else if (TREE_CODE_CLASS (code) == tcc_declaration)
2510    *walk_subtrees = 0;
2511  else if (TREE_CODE_CLASS (code) == tcc_constant)
2512    *walk_subtrees = 0;
2513  else
2514    gcc_assert (code != STATEMENT_LIST);
2515  return NULL_TREE;
2516}
2517
2518/* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
2519   information indicating to what new SAVE_EXPR this one should be mapped,
2520   use that one.  Otherwise, create a new node and enter it in ST.  FN is
2521   the function into which the copy will be placed.  */
2522
2523static void
2524remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2525{
2526  splay_tree st = (splay_tree) st_;
2527  splay_tree_node n;
2528  tree t;
2529
2530  /* See if we already encountered this SAVE_EXPR.  */
2531  n = splay_tree_lookup (st, (splay_tree_key) *tp);
2532
2533  /* If we didn't already remap this SAVE_EXPR, do so now.  */
2534  if (!n)
2535    {
2536      t = copy_node (*tp);
2537
2538      /* Remember this SAVE_EXPR.  */
2539      splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2540      /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
2541      splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2542    }
2543  else
2544    {
2545      /* We've already walked into this SAVE_EXPR; don't do it again.  */
2546      *walk_subtrees = 0;
2547      t = (tree) n->value;
2548    }
2549
2550  /* Replace this SAVE_EXPR with the copy.  */
2551  *tp = t;
2552}
2553
2554/* Called via walk_tree.  If *TP points to a DECL_STMT for a local label,
2555   copies the declaration and enters it in the splay_tree in DATA (which is
2556   really an `inline_data *').  */
2557
2558static tree
2559mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2560			void *data)
2561{
2562  inline_data *id = (inline_data *) data;
2563
2564  /* Don't walk into types.  */
2565  if (TYPE_P (*tp))
2566    *walk_subtrees = 0;
2567
2568  else if (TREE_CODE (*tp) == LABEL_EXPR)
2569    {
2570      tree decl = TREE_OPERAND (*tp, 0);
2571
2572      /* Copy the decl and remember the copy.  */
2573      insert_decl_map (id, decl,
2574		       copy_decl_for_dup (decl, DECL_CONTEXT (decl),
2575					  DECL_CONTEXT (decl),  /*versioning=*/false));
2576    }
2577
2578  return NULL_TREE;
2579}
2580
2581/* Perform any modifications to EXPR required when it is unsaved.  Does
2582   not recurse into EXPR's subtrees.  */
2583
2584static void
2585unsave_expr_1 (tree expr)
2586{
2587  switch (TREE_CODE (expr))
2588    {
2589    case TARGET_EXPR:
2590      /* Don't mess with a TARGET_EXPR that hasn't been expanded.
2591         It's OK for this to happen if it was part of a subtree that
2592         isn't immediately expanded, such as operand 2 of another
2593         TARGET_EXPR.  */
2594      if (TREE_OPERAND (expr, 1))
2595	break;
2596
2597      TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
2598      TREE_OPERAND (expr, 3) = NULL_TREE;
2599      break;
2600
2601    default:
2602      break;
2603    }
2604}
2605
2606/* Called via walk_tree when an expression is unsaved.  Using the
2607   splay_tree pointed to by ST (which is really a `splay_tree'),
2608   remaps all local declarations to appropriate replacements.  */
2609
2610static tree
2611unsave_r (tree *tp, int *walk_subtrees, void *data)
2612{
2613  inline_data *id = (inline_data *) data;
2614  splay_tree st = id->decl_map;
2615  splay_tree_node n;
2616
2617  /* Only a local declaration (variable or label).  */
2618  if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2619      || TREE_CODE (*tp) == LABEL_DECL)
2620    {
2621      /* Lookup the declaration.  */
2622      n = splay_tree_lookup (st, (splay_tree_key) *tp);
2623
2624      /* If it's there, remap it.  */
2625      if (n)
2626	*tp = (tree) n->value;
2627    }
2628
2629  else if (TREE_CODE (*tp) == STATEMENT_LIST)
2630    copy_statement_list (tp);
2631  else if (TREE_CODE (*tp) == BIND_EXPR)
2632    copy_bind_expr (tp, walk_subtrees, id);
2633  else if (TREE_CODE (*tp) == SAVE_EXPR)
2634    remap_save_expr (tp, st, walk_subtrees);
2635  else
2636    {
2637      copy_tree_r (tp, walk_subtrees, NULL);
2638
2639      /* Do whatever unsaving is required.  */
2640      unsave_expr_1 (*tp);
2641    }
2642
2643  /* Keep iterating.  */
2644  return NULL_TREE;
2645}
2646
2647/* Copies everything in EXPR and replaces variables, labels
2648   and SAVE_EXPRs local to EXPR.  */
2649
2650tree
2651unsave_expr_now (tree expr)
2652{
2653  inline_data id;
2654
2655  /* There's nothing to do for NULL_TREE.  */
2656  if (expr == 0)
2657    return expr;
2658
2659  /* Set up ID.  */
2660  memset (&id, 0, sizeof (id));
2661  id.callee = current_function_decl;
2662  id.caller = current_function_decl;
2663  id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2664
2665  /* Walk the tree once to find local labels.  */
2666  walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2667
2668  /* Walk the tree again, copying, remapping, and unsaving.  */
2669  walk_tree (&expr, unsave_r, &id, NULL);
2670
2671  /* Clean up.  */
2672  splay_tree_delete (id.decl_map);
2673
2674  return expr;
2675}
2676
2677/* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
2678
2679static tree
2680debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2681{
2682  if (*tp == data)
2683    return (tree) data;
2684  else
2685    return NULL;
2686}
2687
2688bool
2689debug_find_tree (tree top, tree search)
2690{
2691  return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2692}
2693
2694
2695/* Declare the variables created by the inliner.  Add all the variables in
2696   VARS to BIND_EXPR.  */
2697
2698static void
2699declare_inline_vars (tree block, tree vars)
2700{
2701  tree t;
2702  for (t = vars; t; t = TREE_CHAIN (t))
2703    DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2704
2705  if (block)
2706    BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
2707}
2708
2709
2710/* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
2711   but now it will be in the TO_FN.  VERSIONING means that this function
2712   is used by the versioning utility (not inlining or cloning).  */
2713
2714tree
2715copy_decl_for_dup (tree decl, tree from_fn, tree to_fn, bool versioning)
2716{
2717  tree copy;
2718
2719  gcc_assert (DECL_P (decl));
2720  /* Copy the declaration.  */
2721  if (!versioning
2722      && (TREE_CODE (decl) == PARM_DECL
2723	  || TREE_CODE (decl) == RESULT_DECL))
2724    {
2725      tree type = TREE_TYPE (decl);
2726
2727      /* For a parameter or result, we must make an equivalent VAR_DECL,
2728	 not a new PARM_DECL.  */
2729      copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
2730      TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
2731      TREE_READONLY (copy) = TREE_READONLY (decl);
2732      TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
2733      DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (decl);
2734    }
2735  else
2736    {
2737      copy = copy_node (decl);
2738      /* The COPY is not abstract; it will be generated in TO_FN.  */
2739      DECL_ABSTRACT (copy) = 0;
2740      lang_hooks.dup_lang_specific_decl (copy);
2741
2742      /* TREE_ADDRESSABLE isn't used to indicate that a label's
2743	 address has been taken; it's for internal bookkeeping in
2744	 expand_goto_internal.  */
2745      if (TREE_CODE (copy) == LABEL_DECL)
2746	{
2747	  TREE_ADDRESSABLE (copy) = 0;
2748	  LABEL_DECL_UID (copy) = -1;
2749	}
2750    }
2751
2752  /* Don't generate debug information for the copy if we wouldn't have
2753     generated it for the copy either.  */
2754  DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
2755  DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
2756
2757  /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
2758     declaration inspired this copy.  */
2759  DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
2760
2761  /* The new variable/label has no RTL, yet.  */
2762  if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
2763      && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
2764    SET_DECL_RTL (copy, NULL_RTX);
2765
2766  /* These args would always appear unused, if not for this.  */
2767  TREE_USED (copy) = 1;
2768
2769  /* Set the context for the new declaration.  */
2770  if (!DECL_CONTEXT (decl))
2771    /* Globals stay global.  */
2772    ;
2773  else if (DECL_CONTEXT (decl) != from_fn)
2774    /* Things that weren't in the scope of the function we're inlining
2775       from aren't in the scope we're inlining to, either.  */
2776    ;
2777  else if (TREE_STATIC (decl))
2778    /* Function-scoped static variables should stay in the original
2779       function.  */
2780    ;
2781  else
2782    /* Ordinary automatic local variables are now in the scope of the
2783       new function.  */
2784    DECL_CONTEXT (copy) = to_fn;
2785
2786  return copy;
2787}
2788
2789/* Return a copy of the function's argument tree.  */
2790static tree
2791copy_arguments_for_versioning (tree orig_parm, inline_data * id)
2792{
2793  tree *arg_copy, *parg;
2794
2795  arg_copy = &orig_parm;
2796  for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
2797    {
2798      tree new = remap_decl (*parg, id);
2799      lang_hooks.dup_lang_specific_decl (new);
2800      TREE_CHAIN (new) = TREE_CHAIN (*parg);
2801      *parg = new;
2802    }
2803  return orig_parm;
2804}
2805
2806/* Return a copy of the function's static chain.  */
2807static tree
2808copy_static_chain (tree static_chain, inline_data * id)
2809{
2810  tree *chain_copy, *pvar;
2811
2812  chain_copy = &static_chain;
2813  for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
2814    {
2815      tree new = remap_decl (*pvar, id);
2816      lang_hooks.dup_lang_specific_decl (new);
2817      TREE_CHAIN (new) = TREE_CHAIN (*pvar);
2818      *pvar = new;
2819    }
2820  return static_chain;
2821}
2822
2823/* Return true if the function is allowed to be versioned.
2824   This is a guard for the versioning functionality.  */
2825bool
2826tree_versionable_function_p (tree fndecl)
2827{
2828  if (fndecl == NULL_TREE)
2829    return false;
2830  /* ??? There are cases where a function is
2831     uninlinable but can be versioned.  */
2832  if (!tree_inlinable_function_p (fndecl))
2833    return false;
2834
2835  return true;
2836}
2837
2838/* Create a copy of a function's tree.
2839   OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
2840   of the original function and the new copied function
2841   respectively.  In case we want to replace a DECL
2842   tree with another tree while duplicating the function's
2843   body, TREE_MAP represents the mapping between these
2844   trees.  */
2845void
2846tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map)
2847{
2848  struct cgraph_node *old_version_node;
2849  struct cgraph_node *new_version_node;
2850  inline_data id;
2851  tree p, new_fndecl;
2852  unsigned i;
2853  struct ipa_replace_map *replace_info;
2854  basic_block old_entry_block;
2855  tree t_step;
2856
2857  gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
2858	      && TREE_CODE (new_decl) == FUNCTION_DECL);
2859  DECL_POSSIBLY_INLINED (old_decl) = 1;
2860
2861  old_version_node = cgraph_node (old_decl);
2862  new_version_node = cgraph_node (new_decl);
2863
2864  allocate_struct_function (new_decl);
2865  /* Cfun points to the new allocated function struct at this point.  */
2866  cfun->function_end_locus = DECL_SOURCE_LOCATION (new_decl);
2867
2868  DECL_ARTIFICIAL (new_decl) = 1;
2869  DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
2870
2871  /* Generate a new name for the new version. */
2872  DECL_NAME (new_decl) =
2873    create_tmp_var_name (NULL);
2874  /* Create a new SYMBOL_REF rtx for the new name. */
2875  if (DECL_RTL (old_decl) != NULL)
2876    {
2877      SET_DECL_RTL (new_decl, copy_rtx (DECL_RTL (old_decl)));
2878      XEXP (DECL_RTL (new_decl), 0) =
2879	gen_rtx_SYMBOL_REF (GET_MODE (XEXP (DECL_RTL (old_decl), 0)),
2880			    IDENTIFIER_POINTER (DECL_NAME (new_decl)));
2881    }
2882
2883  /* Prepare the data structures for the tree copy.  */
2884  memset (&id, 0, sizeof (id));
2885
2886  /* The new version. */
2887  id.node = new_version_node;
2888
2889  /* The old version. */
2890  id.current_node = cgraph_node (old_decl);
2891
2892  id.versioning_p = true;
2893  id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2894  id.caller = new_decl;
2895  id.callee = old_decl;
2896  id.callee_cfun = DECL_STRUCT_FUNCTION (old_decl);
2897
2898  current_function_decl = new_decl;
2899
2900  /* Copy the function's static chain.  */
2901  p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
2902  if (p)
2903    DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
2904      copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
2905			 &id);
2906  /* Copy the function's arguments.  */
2907  if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
2908    DECL_ARGUMENTS (new_decl) =
2909      copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
2910
2911  /* If there's a tree_map, prepare for substitution.  */
2912  if (tree_map)
2913    for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
2914      {
2915	replace_info = VARRAY_GENERIC_PTR (tree_map, i);
2916	if (replace_info->replace_p && !replace_info->ref_p)
2917	  insert_decl_map (&id, replace_info->old_tree,
2918			   replace_info->new_tree);
2919	else if (replace_info->replace_p && replace_info->ref_p)
2920	  id.ipa_info = tree_map;
2921      }
2922
2923  DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.callee), &id);
2924
2925  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
2926  number_blocks (id.caller);
2927
2928  if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
2929    /* Add local vars.  */
2930    for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
2931	 t_step; t_step = TREE_CHAIN (t_step))
2932      {
2933	tree var = TREE_VALUE (t_step);
2934	if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2935	  cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2936						 cfun->unexpanded_var_list);
2937	else
2938	  cfun->unexpanded_var_list =
2939	    tree_cons (NULL_TREE, remap_decl (var, &id),
2940		       cfun->unexpanded_var_list);
2941      }
2942
2943  /* Copy the Function's body.  */
2944  old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
2945    (DECL_STRUCT_FUNCTION (old_decl));
2946  new_fndecl = copy_body (&id,
2947			  old_entry_block->count,
2948			  old_entry_block->frequency, NULL, NULL);
2949
2950  DECL_SAVED_TREE (new_decl) = DECL_SAVED_TREE (new_fndecl);
2951
2952  DECL_STRUCT_FUNCTION (new_decl)->cfg =
2953    DECL_STRUCT_FUNCTION (new_fndecl)->cfg;
2954  DECL_STRUCT_FUNCTION (new_decl)->eh = DECL_STRUCT_FUNCTION (new_fndecl)->eh;
2955  DECL_STRUCT_FUNCTION (new_decl)->ib_boundaries_block =
2956    DECL_STRUCT_FUNCTION (new_fndecl)->ib_boundaries_block;
2957  DECL_STRUCT_FUNCTION (new_decl)->last_label_uid =
2958    DECL_STRUCT_FUNCTION (new_fndecl)->last_label_uid;
2959
2960  if (DECL_RESULT (old_decl) != NULL_TREE)
2961    {
2962      tree *res_decl = &DECL_RESULT (old_decl);
2963      DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
2964      lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
2965    }
2966
2967  current_function_decl = NULL;
2968  /* Renumber the lexical scoping (non-code) blocks consecutively.  */
2969  number_blocks (new_decl);
2970
2971  /* Clean up.  */
2972  splay_tree_delete (id.decl_map);
2973  fold_cond_expr_cond ();
2974  return;
2975}
2976
2977/*  Replace an INDIRECT_REF tree of a given DECL tree with a new
2978    given tree.
2979    ID->ipa_info keeps the old tree and the new tree.
2980    TP points to the INDIRECT REF tree.  Return true if
2981    the trees were replaced.  */
2982static bool
2983replace_ref_tree (inline_data * id, tree * tp)
2984{
2985  bool replaced = false;
2986  tree new;
2987
2988  if (id->ipa_info && VARRAY_ACTIVE_SIZE (id->ipa_info) > 0)
2989    {
2990      unsigned i;
2991
2992      for (i = 0; i < VARRAY_ACTIVE_SIZE (id->ipa_info); i++)
2993	{
2994	  struct ipa_replace_map *replace_info;
2995	  replace_info = VARRAY_GENERIC_PTR (id->ipa_info, i);
2996
2997	  if (replace_info->replace_p && replace_info->ref_p)
2998	    {
2999	      tree old_tree = replace_info->old_tree;
3000	      tree new_tree = replace_info->new_tree;
3001
3002	      if (TREE_CODE (*tp) == INDIRECT_REF
3003		  && TREE_OPERAND (*tp, 0) == old_tree)
3004		{
3005		  new = copy_node (new_tree);
3006		  *tp = new;
3007		  replaced = true;
3008		}
3009	    }
3010	}
3011    }
3012  return replaced;
3013}
3014
3015/* Return true if we are inlining.  */
3016static inline bool
3017inlining_p (inline_data * id)
3018{
3019  return (!id->saving_p && !id->cloning_p && !id->versioning_p);
3020}
3021
3022/* Duplicate a type, fields and all.  */
3023
3024tree
3025build_duplicate_type (tree type)
3026{
3027  inline_data id;
3028
3029  memset (&id, 0, sizeof (id));
3030  id.callee = current_function_decl;
3031  id.caller = current_function_decl;
3032  id.callee_cfun = cfun;
3033  id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
3034
3035  type = remap_type_1 (type, &id);
3036
3037  splay_tree_delete (id.decl_map);
3038
3039  return type;
3040}
3041