tree-inline.c revision 96263
1/* Control and data flow functions for trees.
2   Copyright 2001, 2002 Free Software Foundation, Inc.
3   Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING.  If not, write to
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "toplev.h"
25#include "tree.h"
26#include "tree-inline.h"
27#include "rtl.h"
28#include "expr.h"
29#include "flags.h"
30#include "params.h"
31#include "input.h"
32#include "insn-config.h"
33#include "integrate.h"
34#include "varray.h"
35#include "hashtab.h"
36#include "splay-tree.h"
37#include "langhooks.h"
38
39/* This should be eventually be generalized to other languages, but
40   this would require a shared function-as-trees infrastructure.  */
41#include "c-common.h"
42
43/* 0 if we should not perform inlining.
44   1 if we should expand functions calls inline at the tree level.
45   2 if we should consider *all* functions to be inline
46   candidates.  */
47
48int flag_inline_trees = 0;
49
50/* To Do:
51
52   o In order to make inlining-on-trees work, we pessimized
53     function-local static constants.  In particular, they are now
54     always output, even when not addressed.  Fix this by treating
55     function-local static constants just like global static
56     constants; the back-end already knows not to output them if they
57     are not needed.
58
59   o Provide heuristics to clamp inlining of recursive template
60     calls?  */
61
62/* Data required for function inlining.  */
63
64typedef struct inline_data
65{
66  /* A stack of the functions we are inlining.  For example, if we are
67     compiling `f', which calls `g', which calls `h', and we are
68     inlining the body of `h', the stack will contain, `h', followed
69     by `g', followed by `f'.  The first few elements of the stack may
70     contain other functions that we know we should not recurse into,
71     even though they are not directly being inlined.  */
72  varray_type fns;
73  /* The index of the first element of FNS that really represents an
74     inlined function.  */
75  unsigned first_inlined_fn;
76  /* The label to jump to when a return statement is encountered.  If
77     this value is NULL, then return statements will simply be
78     remapped as return statements, rather than as jumps.  */
79  tree ret_label;
80  /* The map from local declarations in the inlined function to
81     equivalents in the function into which it is being inlined.  */
82  splay_tree decl_map;
83  /* Nonzero if we are currently within the cleanup for a
84     TARGET_EXPR.  */
85  int in_target_cleanup_p;
86  /* A stack of the TARGET_EXPRs that we are currently processing.  */
87  varray_type target_exprs;
88  /* A list of the functions current function has inlined.  */
89  varray_type inlined_fns;
90  /* The approximate number of statements we have inlined in the
91     current call stack.  */
92  int inlined_stmts;
93  /* We use the same mechanism to build clones that we do to perform
94     inlining.  However, there are a few places where we need to
95     distinguish between those two situations.  This flag is true if
96     we are cloning, rather than inlining.  */
97  bool cloning_p;
98  /* Hash table used to prevent walk_tree from visiting the same node
99     umpteen million times.  */
100  htab_t tree_pruner;
101} inline_data;
102
103/* Prototypes.  */
104
105static tree initialize_inlined_parameters PARAMS ((inline_data *, tree, tree));
106static tree declare_return_variable PARAMS ((inline_data *, tree *));
107static tree copy_body_r PARAMS ((tree *, int *, void *));
108static tree copy_body PARAMS ((inline_data *));
109static tree expand_call_inline PARAMS ((tree *, int *, void *));
110static void expand_calls_inline PARAMS ((tree *, inline_data *));
111static int inlinable_function_p PARAMS ((tree, inline_data *));
112static tree remap_decl PARAMS ((tree, inline_data *));
113static void remap_block PARAMS ((tree, tree, inline_data *));
114static void copy_scope_stmt PARAMS ((tree *, int *, inline_data *));
115
116/* The approximate number of instructions per statement.  This number
117   need not be particularly accurate; it is used only to make
118   decisions about when a function is too big to inline.  */
119#define INSNS_PER_STMT (10)
120
121/* Remap DECL during the copying of the BLOCK tree for the function.  */
122
123static tree
124remap_decl (decl, id)
125     tree decl;
126     inline_data *id;
127{
128  splay_tree_node n;
129  tree fn;
130
131  /* We only remap local variables in the current function.  */
132  fn = VARRAY_TOP_TREE (id->fns);
133  if (! (*lang_hooks.tree_inlining.auto_var_in_fn_p) (decl, fn))
134    return NULL_TREE;
135
136  /* See if we have remapped this declaration.  */
137  n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
138  /* If we didn't already have an equivalent for this declaration,
139     create one now.  */
140  if (!n)
141    {
142      tree t;
143
144      /* Make a copy of the variable or label.  */
145      t = copy_decl_for_inlining (decl, fn,
146				  VARRAY_TREE (id->fns, 0));
147
148      /* The decl T could be a dynamic array or other variable size type,
149	 in which case some fields need to be remapped because they may
150	 contain SAVE_EXPRs.  */
151      if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE
152	  && TYPE_DOMAIN (TREE_TYPE (t)))
153	{
154	  TREE_TYPE (t) = copy_node (TREE_TYPE (t));
155	  TYPE_DOMAIN (TREE_TYPE (t))
156	    = copy_node (TYPE_DOMAIN (TREE_TYPE (t)));
157	  walk_tree (&TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (t))),
158		     copy_body_r, id, NULL);
159	}
160
161      if (! DECL_NAME (t) && TREE_TYPE (t)
162	  && (*lang_hooks.tree_inlining.anon_aggr_type_p) (TREE_TYPE (t)))
163	{
164	  /* For a VAR_DECL of anonymous type, we must also copy the
165	     member VAR_DECLS here and rechain the
166	     DECL_ANON_UNION_ELEMS.  */
167	  tree members = NULL;
168	  tree src;
169
170	  for (src = DECL_ANON_UNION_ELEMS (t); src;
171	       src = TREE_CHAIN (src))
172	    {
173	      tree member = remap_decl (TREE_VALUE (src), id);
174
175	      if (TREE_PURPOSE (src))
176		abort ();
177	      members = tree_cons (NULL, member, members);
178	    }
179	  DECL_ANON_UNION_ELEMS (t) = nreverse (members);
180	}
181
182      /* Remember it, so that if we encounter this local entity
183	 again we can reuse this copy.  */
184      n = splay_tree_insert (id->decl_map,
185			     (splay_tree_key) decl,
186			     (splay_tree_value) t);
187    }
188
189  return (tree) n->value;
190}
191
192/* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
193   remapped versions of the variables therein.  And hook the new block
194   into the block-tree.  If non-NULL, the DECLS are declarations to
195   add to use instead of the BLOCK_VARS in the old block.  */
196
197static void
198remap_block (scope_stmt, decls, id)
199     tree scope_stmt;
200     tree decls;
201     inline_data *id;
202{
203  /* We cannot do this in the cleanup for a TARGET_EXPR since we do
204     not know whether or not expand_expr will actually write out the
205     code we put there.  If it does not, then we'll have more BLOCKs
206     than block-notes, and things will go awry.  At some point, we
207     should make the back-end handle BLOCK notes in a tidier way,
208     without requiring a strict correspondence to the block-tree; then
209     this check can go.  */
210  if (id->in_target_cleanup_p)
211    {
212      SCOPE_STMT_BLOCK (scope_stmt) = NULL_TREE;
213      return;
214    }
215
216  /* If this is the beginning of a scope, remap the associated BLOCK.  */
217  if (SCOPE_BEGIN_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
218    {
219      tree old_block;
220      tree new_block;
221      tree old_var;
222      tree fn;
223
224      /* Make the new block.  */
225      old_block = SCOPE_STMT_BLOCK (scope_stmt);
226      new_block = make_node (BLOCK);
227      TREE_USED (new_block) = TREE_USED (old_block);
228      BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
229      SCOPE_STMT_BLOCK (scope_stmt) = new_block;
230
231      /* Remap its variables.  */
232      for (old_var = decls ? decls : BLOCK_VARS (old_block);
233	   old_var;
234	   old_var = TREE_CHAIN (old_var))
235	{
236	  tree new_var;
237
238	  /* Remap the variable.  */
239	  new_var = remap_decl (old_var, id);
240	  /* If we didn't remap this variable, so we can't mess with
241	     its TREE_CHAIN.  If we remapped this variable to
242	     something other than a declaration (say, if we mapped it
243	     to a constant), then we must similarly omit any mention
244	     of it here.  */
245	  if (!new_var || !DECL_P (new_var))
246	    ;
247	  else
248	    {
249	      TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
250	      BLOCK_VARS (new_block) = new_var;
251	    }
252	}
253      /* We put the BLOCK_VARS in reverse order; fix that now.  */
254      BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
255      fn = VARRAY_TREE (id->fns, 0);
256      if (id->cloning_p)
257	/* We're building a clone; DECL_INITIAL is still
258	   error_mark_node, and current_binding_level is the parm
259	   binding level.  */
260	insert_block (new_block);
261      else
262	{
263	  /* Attach this new block after the DECL_INITIAL block for the
264	     function into which this block is being inlined.  In
265	     rest_of_compilation we will straighten out the BLOCK tree.  */
266	  tree *first_block;
267	  if (DECL_INITIAL (fn))
268	    first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
269	  else
270	    first_block = &DECL_INITIAL (fn);
271	  BLOCK_CHAIN (new_block) = *first_block;
272	  *first_block = new_block;
273	}
274      /* Remember the remapped block.  */
275      splay_tree_insert (id->decl_map,
276			 (splay_tree_key) old_block,
277			 (splay_tree_value) new_block);
278    }
279  /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
280     remapped block.  */
281  else if (SCOPE_END_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
282    {
283      splay_tree_node n;
284
285      /* Find this block in the table of remapped things.  */
286      n = splay_tree_lookup (id->decl_map,
287			     (splay_tree_key) SCOPE_STMT_BLOCK (scope_stmt));
288      if (! n)
289	abort ();
290      SCOPE_STMT_BLOCK (scope_stmt) = (tree) n->value;
291    }
292}
293
294/* Copy the SCOPE_STMT pointed to by TP.  */
295
296static void
297copy_scope_stmt (tp, walk_subtrees, id)
298     tree *tp;
299     int *walk_subtrees;
300     inline_data *id;
301{
302  tree block;
303
304  /* Remember whether or not this statement was nullified.  When
305     making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
306     doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
307     deal with copying BLOCKs if they do not wish to do so.  */
308  block = SCOPE_STMT_BLOCK (*tp);
309  /* Copy (and replace) the statement.  */
310  copy_tree_r (tp, walk_subtrees, NULL);
311  /* Restore the SCOPE_STMT_BLOCK.  */
312  SCOPE_STMT_BLOCK (*tp) = block;
313
314  /* Remap the associated block.  */
315  remap_block (*tp, NULL_TREE, id);
316}
317
318/* Called from copy_body via walk_tree.  DATA is really an
319   `inline_data *'.  */
320
321static tree
322copy_body_r (tp, walk_subtrees, data)
323     tree *tp;
324     int *walk_subtrees;
325     void *data;
326{
327  inline_data* id;
328  tree fn;
329
330  /* Set up.  */
331  id = (inline_data *) data;
332  fn = VARRAY_TOP_TREE (id->fns);
333
334#if 0
335  /* All automatic variables should have a DECL_CONTEXT indicating
336     what function they come from.  */
337  if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
338      && DECL_NAMESPACE_SCOPE_P (*tp))
339    if (! DECL_EXTERNAL (*tp) && ! TREE_STATIC (*tp))
340      abort ();
341#endif
342
343  /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
344     GOTO_STMT with the RET_LABEL as its target.  */
345  if (TREE_CODE (*tp) == RETURN_STMT && id->ret_label)
346    {
347      tree return_stmt = *tp;
348      tree goto_stmt;
349
350      /* Build the GOTO_STMT.  */
351      goto_stmt = build_stmt (GOTO_STMT, id->ret_label);
352      TREE_CHAIN (goto_stmt) = TREE_CHAIN (return_stmt);
353      GOTO_FAKE_P (goto_stmt) = 1;
354
355      /* If we're returning something, just turn that into an
356	 assignment into the equivalent of the original
357	 RESULT_DECL.  */
358      if (RETURN_EXPR (return_stmt))
359	{
360	  *tp = build_stmt (EXPR_STMT,
361			    RETURN_EXPR (return_stmt));
362	  STMT_IS_FULL_EXPR_P (*tp) = 1;
363	  /* And then jump to the end of the function.  */
364	  TREE_CHAIN (*tp) = goto_stmt;
365	}
366      /* If we're not returning anything just do the jump.  */
367      else
368	*tp = goto_stmt;
369    }
370  /* Local variables and labels need to be replaced by equivalent
371     variables.  We don't want to copy static variables; there's only
372     one of those, no matter how many times we inline the containing
373     function.  */
374  else if ((*lang_hooks.tree_inlining.auto_var_in_fn_p) (*tp, fn))
375    {
376      tree new_decl;
377
378      /* Remap the declaration.  */
379      new_decl = remap_decl (*tp, id);
380      if (! new_decl)
381	abort ();
382      /* Replace this variable with the copy.  */
383      STRIP_TYPE_NOPS (new_decl);
384      *tp = new_decl;
385    }
386#if 0
387  else if (nonstatic_local_decl_p (*tp)
388	   && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
389    abort ();
390#endif
391  else if (TREE_CODE (*tp) == SAVE_EXPR)
392    remap_save_expr (tp, id->decl_map, VARRAY_TREE (id->fns, 0),
393		     walk_subtrees);
394  else if (TREE_CODE (*tp) == UNSAVE_EXPR)
395    /* UNSAVE_EXPRs should not be generated until expansion time.  */
396    abort ();
397  /* For a SCOPE_STMT, we must copy the associated block so that we
398     can write out debugging information for the inlined variables.  */
399  else if (TREE_CODE (*tp) == SCOPE_STMT && !id->in_target_cleanup_p)
400    copy_scope_stmt (tp, walk_subtrees, id);
401  /* Otherwise, just copy the node.  Note that copy_tree_r already
402     knows not to copy VAR_DECLs, etc., so this is safe.  */
403  else
404    {
405      copy_tree_r (tp, walk_subtrees, NULL);
406
407      /* The copied TARGET_EXPR has never been expanded, even if the
408	 original node was expanded already.  */
409      if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
410	{
411	  TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
412	  TREE_OPERAND (*tp, 3) = NULL_TREE;
413	}
414      else if (TREE_CODE (*tp) == MODIFY_EXPR
415	       && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
416	       && ((*lang_hooks.tree_inlining.auto_var_in_fn_p)
417		   (TREE_OPERAND (*tp, 0), fn)))
418	{
419	  /* Some assignments VAR = VAR; don't generate any rtl code
420	     and thus don't count as variable modification.  Avoid
421	     keeping bogosities like 0 = 0.  */
422	  tree decl = TREE_OPERAND (*tp, 0), value;
423	  splay_tree_node n;
424
425	  n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
426	  if (n)
427	    {
428	      value = (tree) n->value;
429	      STRIP_TYPE_NOPS (value);
430	      if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
431		*tp = value;
432	    }
433	}
434    }
435
436  /* Keep iterating.  */
437  return NULL_TREE;
438}
439
440/* Make a copy of the body of FN so that it can be inserted inline in
441   another function.  */
442
443static tree
444copy_body (id)
445     inline_data *id;
446{
447  tree body;
448
449  body = DECL_SAVED_TREE (VARRAY_TOP_TREE (id->fns));
450  walk_tree (&body, copy_body_r, id, NULL);
451
452  return body;
453}
454
455/* Generate code to initialize the parameters of the function at the
456   top of the stack in ID from the ARGS (presented as a TREE_LIST).  */
457
458static tree
459initialize_inlined_parameters (id, args, fn)
460     inline_data *id;
461     tree args;
462     tree fn;
463{
464  tree init_stmts;
465  tree parms;
466  tree a;
467  tree p;
468
469  /* Figure out what the parameters are.  */
470  parms = DECL_ARGUMENTS (fn);
471
472  /* Start with no initializations whatsoever.  */
473  init_stmts = NULL_TREE;
474
475  /* Loop through the parameter declarations, replacing each with an
476     equivalent VAR_DECL, appropriately initialized.  */
477  for (p = parms, a = args; p;
478       a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
479    {
480      tree init_stmt;
481      tree var;
482      tree value;
483      tree cleanup;
484
485      /* Find the initializer.  */
486      value = (*lang_hooks.tree_inlining.convert_parm_for_inlining)
487	      (p, a ? TREE_VALUE (a) : NULL_TREE, fn);
488
489      /* If the parameter is never assigned to, we may not need to
490	 create a new variable here at all.  Instead, we may be able
491	 to just use the argument value.  */
492      if (TREE_READONLY (p)
493	  && !TREE_ADDRESSABLE (p)
494	  && value && !TREE_SIDE_EFFECTS (value))
495	{
496	  /* Simplify the value, if possible.  */
497	  value = fold (DECL_P (value) ? decl_constant_value (value) : value);
498
499	  /* We can't risk substituting complex expressions.  They
500	     might contain variables that will be assigned to later.
501	     Theoretically, we could check the expression to see if
502	     all of the variables that determine its value are
503	     read-only, but we don't bother.  */
504	  if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
505	    {
506	      /* If this is a declaration, wrap it a NOP_EXPR so that
507		 we don't try to put the VALUE on the list of
508		 BLOCK_VARS.  */
509	      if (DECL_P (value))
510		value = build1 (NOP_EXPR, TREE_TYPE (value), value);
511
512	      splay_tree_insert (id->decl_map,
513				 (splay_tree_key) p,
514				 (splay_tree_value) value);
515	      continue;
516	    }
517	}
518
519      /* Make an equivalent VAR_DECL.  */
520      var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
521      /* Register the VAR_DECL as the equivalent for the PARM_DECL;
522	 that way, when the PARM_DECL is encountered, it will be
523	 automatically replaced by the VAR_DECL.  */
524      splay_tree_insert (id->decl_map,
525			 (splay_tree_key) p,
526			 (splay_tree_value) var);
527
528      /* Declare this new variable.  */
529      init_stmt = build_stmt (DECL_STMT, var);
530      TREE_CHAIN (init_stmt) = init_stmts;
531      init_stmts = init_stmt;
532
533      /* Initialize this VAR_DECL from the equivalent argument.  If
534	 the argument is an object, created via a constructor or copy,
535	 this will not result in an extra copy: the TARGET_EXPR
536	 representing the argument will be bound to VAR, and the
537	 object will be constructed in VAR.  */
538      if (! TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
539	DECL_INITIAL (var) = value;
540      else
541	{
542	  /* Even if P was TREE_READONLY, the new VAR should not be.
543	     In the original code, we would have constructed a
544	     temporary, and then the function body would have never
545	     changed the value of P.  However, now, we will be
546	     constructing VAR directly.  The constructor body may
547	     change its value multiple times as it is being
548	     constructed.  Therefore, it must not be TREE_READONLY;
549	     the back-end assumes that TREE_READONLY variable is
550	     assigned to only once.  */
551	  TREE_READONLY (var) = 0;
552
553	  /* Build a run-time initialization.  */
554	  init_stmt = build_stmt (EXPR_STMT,
555				  build (INIT_EXPR, TREE_TYPE (p),
556					 var, value));
557	  /* Add this initialization to the list.  Note that we want the
558	     declaration *after* the initialization because we are going
559	     to reverse all the initialization statements below.  */
560	  TREE_CHAIN (init_stmt) = init_stmts;
561	  init_stmts = init_stmt;
562	}
563
564      /* See if we need to clean up the declaration.  */
565      cleanup = maybe_build_cleanup (var);
566      if (cleanup)
567	{
568	  tree cleanup_stmt;
569	  /* Build the cleanup statement.  */
570	  cleanup_stmt = build_stmt (CLEANUP_STMT, var, cleanup);
571	  /* Add it to the *front* of the list; the list will be
572	     reversed below.  */
573	  TREE_CHAIN (cleanup_stmt) = init_stmts;
574	  init_stmts = cleanup_stmt;
575	}
576    }
577
578  /* Evaluate trailing arguments.  */
579  for (; a; a = TREE_CHAIN (a))
580    {
581      tree init_stmt;
582      tree value = TREE_VALUE (a);
583
584      if (! value || ! TREE_SIDE_EFFECTS (value))
585	continue;
586
587      init_stmt = build_stmt (EXPR_STMT, value);
588      TREE_CHAIN (init_stmt) = init_stmts;
589      init_stmts = init_stmt;
590    }
591
592  /* The initialization statements have been built up in reverse
593     order.  Straighten them out now.  */
594  return nreverse (init_stmts);
595}
596
597/* Declare a return variable to replace the RESULT_DECL for the
598   function we are calling.  An appropriate DECL_STMT is returned.
599   The USE_STMT is filled in to contain a use of the declaration to
600   indicate the return value of the function.  */
601
602static tree
603declare_return_variable (id, use_stmt)
604     struct inline_data *id;
605     tree *use_stmt;
606{
607  tree fn = VARRAY_TOP_TREE (id->fns);
608  tree result = DECL_RESULT (fn);
609  tree var;
610  int need_return_decl = 1;
611
612  /* We don't need to do anything for functions that don't return
613     anything.  */
614  if (!result || VOID_TYPE_P (TREE_TYPE (result)))
615    {
616      *use_stmt = NULL_TREE;
617      return NULL_TREE;
618    }
619
620  var = ((*lang_hooks.tree_inlining.copy_res_decl_for_inlining)
621	 (result, fn, VARRAY_TREE (id->fns, 0), id->decl_map,
622	  &need_return_decl, &id->target_exprs));
623
624  /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
625     way, when the RESULT_DECL is encountered, it will be
626     automatically replaced by the VAR_DECL.  */
627  splay_tree_insert (id->decl_map,
628		     (splay_tree_key) result,
629		     (splay_tree_value) var);
630
631  /* Build the USE_STMT.  If the return type of the function was
632     promoted, convert it back to the expected type.  */
633  if (TREE_TYPE (var) == TREE_TYPE (TREE_TYPE (fn)))
634    *use_stmt = build_stmt (EXPR_STMT, var);
635  else
636    *use_stmt = build_stmt (EXPR_STMT,
637			    build1 (NOP_EXPR, TREE_TYPE (TREE_TYPE (fn)),
638				    var));
639
640  TREE_ADDRESSABLE (*use_stmt) = 1;
641
642  /* Build the declaration statement if FN does not return an
643     aggregate.  */
644  if (need_return_decl)
645    return build_stmt (DECL_STMT, var);
646  /* If FN does return an aggregate, there's no need to declare the
647     return variable; we're using a variable in our caller's frame.  */
648  else
649    return NULL_TREE;
650}
651
652/* Returns non-zero if a function can be inlined as a tree.  */
653
654int
655tree_inlinable_function_p (fn)
656     tree fn;
657{
658  return inlinable_function_p (fn, NULL);
659}
660
661/* Returns non-zero if FN is a function that can be inlined into the
662   inlining context ID_.  If ID_ is NULL, check whether the function
663   can be inlined at all.  */
664
665static int
666inlinable_function_p (fn, id)
667     tree fn;
668     inline_data *id;
669{
670  int inlinable;
671
672  /* If we've already decided this function shouldn't be inlined,
673     there's no need to check again.  */
674  if (DECL_UNINLINABLE (fn))
675    return 0;
676
677  /* Assume it is not inlinable.  */
678  inlinable = 0;
679
680  /* If we're not inlining things, then nothing is inlinable.  */
681  if (! flag_inline_trees)
682    ;
683  /* If we're not inlining all functions and the function was not
684     declared `inline', we don't inline it.  Don't think of
685     disregarding DECL_INLINE when flag_inline_trees == 2; it's the
686     front-end that must set DECL_INLINE in this case, because
687     dwarf2out loses if a function is inlined that doesn't have
688     DECL_INLINE set.  */
689  else if (! DECL_INLINE (fn))
690    ;
691  /* We can't inline functions that are too big.  Only allow a single
692     function to eat up half of our budget.  Make special allowance
693     for extern inline functions, though.  */
694  else if (! (*lang_hooks.tree_inlining.disregard_inline_limits) (fn)
695	   && DECL_NUM_STMTS (fn) * INSNS_PER_STMT > MAX_INLINE_INSNS / 2)
696    ;
697  /* All is well.  We can inline this function.  Traditionally, GCC
698     has refused to inline functions using alloca, or functions whose
699     values are returned in a PARALLEL, and a few other such obscure
700     conditions.  We are not equally constrained at the tree level.  */
701  else
702    inlinable = 1;
703
704  /* Squirrel away the result so that we don't have to check again.  */
705  DECL_UNINLINABLE (fn) = ! inlinable;
706
707  /* Even if this function is not itself too big to inline, it might
708     be that we've done so much inlining already that we don't want to
709     risk too much inlining any more and thus halve the acceptable
710     size.  */
711  if (! (*lang_hooks.tree_inlining.disregard_inline_limits) (fn)
712      && ((DECL_NUM_STMTS (fn) + (id ? id->inlined_stmts : 0)) * INSNS_PER_STMT
713	  > MAX_INLINE_INSNS)
714      && DECL_NUM_STMTS (fn) * INSNS_PER_STMT > MAX_INLINE_INSNS / 4)
715    inlinable = 0;
716
717  if (inlinable && (*lang_hooks.tree_inlining.cannot_inline_tree_fn) (&fn))
718    inlinable = 0;
719
720  /* If we don't have the function body available, we can't inline
721     it.  */
722  if (! DECL_SAVED_TREE (fn))
723    inlinable = 0;
724
725  /* Check again, language hooks may have modified it.  */
726  if (! inlinable || DECL_UNINLINABLE (fn))
727    return 0;
728
729  /* Don't do recursive inlining, either.  We don't record this in
730     DECL_UNINLINABLE; we may be able to inline this function later.  */
731  if (id)
732    {
733      size_t i;
734
735      for (i = 0; i < VARRAY_ACTIVE_SIZE (id->fns); ++i)
736	if (VARRAY_TREE (id->fns, i) == fn)
737	  return 0;
738
739      if (DECL_INLINED_FNS (fn))
740	{
741	  int j;
742	  tree inlined_fns = DECL_INLINED_FNS (fn);
743
744	  for (j = 0; j < TREE_VEC_LENGTH (inlined_fns); ++j)
745	    if (TREE_VEC_ELT (inlined_fns, j) == VARRAY_TREE (id->fns, 0))
746	      return 0;
747	}
748    }
749
750  /* Return the result.  */
751  return inlinable;
752}
753
754/* If *TP is a CALL_EXPR, replace it with its inline expansion.  */
755
756static tree
757expand_call_inline (tp, walk_subtrees, data)
758     tree *tp;
759     int *walk_subtrees;
760     void *data;
761{
762  inline_data *id;
763  tree t;
764  tree expr;
765  tree chain;
766  tree fn;
767  tree scope_stmt;
768  tree use_stmt;
769  tree arg_inits;
770  tree *inlined_body;
771  splay_tree st;
772
773  /* See what we've got.  */
774  id = (inline_data *) data;
775  t = *tp;
776
777  /* Recurse, but letting recursive invocations know that we are
778     inside the body of a TARGET_EXPR.  */
779  if (TREE_CODE (*tp) == TARGET_EXPR)
780    {
781      int i, len = first_rtl_op (TARGET_EXPR);
782
783      /* We're walking our own subtrees.  */
784      *walk_subtrees = 0;
785
786      /* Push *TP on the stack of pending TARGET_EXPRs.  */
787      VARRAY_PUSH_TREE (id->target_exprs, *tp);
788
789      /* Actually walk over them.  This loop is the body of
790	 walk_trees, omitting the case where the TARGET_EXPR
791	 itself is handled.  */
792      for (i = 0; i < len; ++i)
793	{
794	  if (i == 2)
795	    ++id->in_target_cleanup_p;
796	  walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
797		     id->tree_pruner);
798	  if (i == 2)
799	    --id->in_target_cleanup_p;
800	}
801
802      /* We're done with this TARGET_EXPR now.  */
803      VARRAY_POP (id->target_exprs);
804
805      return NULL_TREE;
806    }
807
808  if (TYPE_P (t))
809    /* Because types were not copied in copy_body, CALL_EXPRs beneath
810       them should not be expanded.  This can happen if the type is a
811       dynamic array type, for example.  */
812    *walk_subtrees = 0;
813
814  /* From here on, we're only interested in CALL_EXPRs.  */
815  if (TREE_CODE (t) != CALL_EXPR)
816    return NULL_TREE;
817
818  /* First, see if we can figure out what function is being called.
819     If we cannot, then there is no hope of inlining the function.  */
820  fn = get_callee_fndecl (t);
821  if (!fn)
822    return NULL_TREE;
823
824  /* If fn is a declaration of a function in a nested scope that was
825     globally declared inline, we don't set its DECL_INITIAL.
826     However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
827     C++ front-end uses it for cdtors to refer to their internal
828     declarations, that are not real functions.  Fortunately those
829     don't have trees to be saved, so we can tell by checking their
830     DECL_SAVED_TREE.  */
831  if (! DECL_INITIAL (fn)
832      && DECL_ABSTRACT_ORIGIN (fn)
833      && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
834    fn = DECL_ABSTRACT_ORIGIN (fn);
835
836  /* Don't try to inline functions that are not well-suited to
837     inlining.  */
838  if (!inlinable_function_p (fn, id))
839    return NULL_TREE;
840
841  if (! (*lang_hooks.tree_inlining.start_inlining) (fn))
842    return NULL_TREE;
843
844  /* Set the current filename and line number to the function we are
845     inlining so that when we create new _STMT nodes here they get
846     line numbers corresponding to the function we are calling.  We
847     wrap the whole inlined body in an EXPR_WITH_FILE_AND_LINE as well
848     because individual statements don't record the filename.  */
849  push_srcloc (fn->decl.filename, fn->decl.linenum);
850
851  /* Build a statement-expression containing code to initialize the
852     arguments, the actual inline expansion of the body, and a label
853     for the return statements within the function to jump to.  The
854     type of the statement expression is the return type of the
855     function call.  */
856  expr = build1 (STMT_EXPR, TREE_TYPE (TREE_TYPE (fn)), NULL_TREE);
857  /* There is no scope associated with the statement-expression.  */
858  STMT_EXPR_NO_SCOPE (expr) = 1;
859
860  /* Local declarations will be replaced by their equivalents in this
861     map.  */
862  st = id->decl_map;
863  id->decl_map = splay_tree_new (splay_tree_compare_pointers,
864				 NULL, NULL);
865
866  /* Initialize the parameters.  */
867  arg_inits = initialize_inlined_parameters (id, TREE_OPERAND (t, 1), fn);
868  /* Expand any inlined calls in the initializers.  Do this before we
869     push FN on the stack of functions we are inlining; we want to
870     inline calls to FN that appear in the initializers for the
871     parameters.  */
872  expand_calls_inline (&arg_inits, id);
873  /* And add them to the tree.  */
874  STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), arg_inits);
875
876  /* Record the function we are about to inline so that we can avoid
877     recursing into it.  */
878  VARRAY_PUSH_TREE (id->fns, fn);
879
880  /* Record the function we are about to inline if optimize_function
881     has not been called on it yet and we don't have it in the list.  */
882  if (! DECL_INLINED_FNS (fn))
883    {
884      int i;
885
886      for (i = VARRAY_ACTIVE_SIZE (id->inlined_fns) - 1; i >= 0; i--)
887	if (VARRAY_TREE (id->inlined_fns, i) == fn)
888	  break;
889      if (i < 0)
890	VARRAY_PUSH_TREE (id->inlined_fns, fn);
891    }
892
893  /* Return statements in the function body will be replaced by jumps
894     to the RET_LABEL.  */
895  id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
896  DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
897
898  if (! DECL_INITIAL (fn)
899      || TREE_CODE (DECL_INITIAL (fn)) != BLOCK)
900    abort ();
901
902  /* Create a block to put the parameters in.  We have to do this
903     after the parameters have been remapped because remapping
904     parameters is different from remapping ordinary variables.  */
905  scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
906  SCOPE_BEGIN_P (scope_stmt) = 1;
907  SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
908  remap_block (scope_stmt, DECL_ARGUMENTS (fn), id);
909  TREE_CHAIN (scope_stmt) = STMT_EXPR_STMT (expr);
910  STMT_EXPR_STMT (expr) = scope_stmt;
911
912  /* Tell the debugging backends that this block represents the
913     outermost scope of the inlined function.  */
914  if (SCOPE_STMT_BLOCK (scope_stmt))
915    BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt)) = DECL_ORIGIN (fn);
916
917  /* Declare the return variable for the function.  */
918  STMT_EXPR_STMT (expr)
919    = chainon (STMT_EXPR_STMT (expr),
920	       declare_return_variable (id, &use_stmt));
921
922  /* After we've initialized the parameters, we insert the body of the
923     function itself.  */
924  inlined_body = &STMT_EXPR_STMT (expr);
925  while (*inlined_body)
926    inlined_body = &TREE_CHAIN (*inlined_body);
927  *inlined_body = copy_body (id);
928
929  /* Close the block for the parameters.  */
930  scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
931  SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
932  remap_block (scope_stmt, NULL_TREE, id);
933  STMT_EXPR_STMT (expr)
934    = chainon (STMT_EXPR_STMT (expr), scope_stmt);
935
936  /* After the body of the function comes the RET_LABEL.  This must come
937     before we evaluate the returned value below, because that evalulation
938     may cause RTL to be generated.  */
939  STMT_EXPR_STMT (expr)
940    = chainon (STMT_EXPR_STMT (expr),
941	       build_stmt (LABEL_STMT, id->ret_label));
942
943  /* Finally, mention the returned value so that the value of the
944     statement-expression is the returned value of the function.  */
945  STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), use_stmt);
946
947  /* Clean up.  */
948  splay_tree_delete (id->decl_map);
949  id->decl_map = st;
950
951  /* The new expression has side-effects if the old one did.  */
952  TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
953
954  /* Replace the call by the inlined body.  Wrap it in an
955     EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
956     pointing to the right place.  */
957  chain = TREE_CHAIN (*tp);
958  *tp = build_expr_wfl (expr, DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn),
959			/*col=*/0);
960  EXPR_WFL_EMIT_LINE_NOTE (*tp) = 1;
961  TREE_CHAIN (*tp) = chain;
962  pop_srcloc ();
963
964  /* If the value of the new expression is ignored, that's OK.  We
965     don't warn about this for CALL_EXPRs, so we shouldn't warn about
966     the equivalent inlined version either.  */
967  TREE_USED (*tp) = 1;
968
969  /* Our function now has more statements than it did before.  */
970  DECL_NUM_STMTS (VARRAY_TREE (id->fns, 0)) += DECL_NUM_STMTS (fn);
971  id->inlined_stmts += DECL_NUM_STMTS (fn);
972
973  /* Recurse into the body of the just inlined function.  */
974  expand_calls_inline (inlined_body, id);
975  VARRAY_POP (id->fns);
976
977  /* If we've returned to the top level, clear out the record of how
978     much inlining has been done.  */
979  if (VARRAY_ACTIVE_SIZE (id->fns) == id->first_inlined_fn)
980    id->inlined_stmts = 0;
981
982  /* Don't walk into subtrees.  We've already handled them above.  */
983  *walk_subtrees = 0;
984
985  (*lang_hooks.tree_inlining.end_inlining) (fn);
986
987  /* Keep iterating.  */
988  return NULL_TREE;
989}
990
991/* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
992   expansions as appropriate.  */
993
994static void
995expand_calls_inline (tp, id)
996     tree *tp;
997     inline_data *id;
998{
999  /* Search through *TP, replacing all calls to inline functions by
1000     appropriate equivalents.  Use walk_tree in no-duplicates mode
1001     to avoid exponential time complexity.  (We can't just use
1002     walk_tree_without_duplicates, because of the special TARGET_EXPR
1003     handling in expand_calls.  The hash table is set up in
1004     optimize_function.  */
1005  walk_tree (tp, expand_call_inline, id, id->tree_pruner);
1006}
1007
1008/* Expand calls to inline functions in the body of FN.  */
1009
1010void
1011optimize_inline_calls (fn)
1012     tree fn;
1013{
1014  inline_data id;
1015  tree prev_fn;
1016
1017  /* Clear out ID.  */
1018  memset (&id, 0, sizeof (id));
1019
1020  /* Don't allow recursion into FN.  */
1021  VARRAY_TREE_INIT (id.fns, 32, "fns");
1022  VARRAY_PUSH_TREE (id.fns, fn);
1023  /* Or any functions that aren't finished yet.  */
1024  prev_fn = NULL_TREE;
1025  if (current_function_decl)
1026    {
1027      VARRAY_PUSH_TREE (id.fns, current_function_decl);
1028      prev_fn = current_function_decl;
1029    }
1030
1031  prev_fn = ((*lang_hooks.tree_inlining.add_pending_fn_decls)
1032	     (&id.fns, prev_fn));
1033
1034  /* Create the stack of TARGET_EXPRs.  */
1035  VARRAY_TREE_INIT (id.target_exprs, 32, "target_exprs");
1036
1037  /* Create the list of functions this call will inline.  */
1038  VARRAY_TREE_INIT (id.inlined_fns, 32, "inlined_fns");
1039
1040  /* Keep track of the low-water mark, i.e., the point where the first
1041     real inlining is represented in ID.FNS.  */
1042  id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
1043
1044  /* Replace all calls to inline functions with the bodies of those
1045     functions.  */
1046  id.tree_pruner = htab_create (37, htab_hash_pointer,
1047				htab_eq_pointer, NULL);
1048  expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
1049
1050  /* Clean up.  */
1051  htab_delete (id.tree_pruner);
1052  VARRAY_FREE (id.fns);
1053  VARRAY_FREE (id.target_exprs);
1054  if (DECL_LANG_SPECIFIC (fn))
1055    {
1056      tree ifn = make_tree_vec (VARRAY_ACTIVE_SIZE (id.inlined_fns));
1057
1058      memcpy (&TREE_VEC_ELT (ifn, 0), &VARRAY_TREE (id.inlined_fns, 0),
1059	      VARRAY_ACTIVE_SIZE (id.inlined_fns) * sizeof (tree));
1060      DECL_INLINED_FNS (fn) = ifn;
1061    }
1062  VARRAY_FREE (id.inlined_fns);
1063}
1064
1065/* FN is a function that has a complete body, and CLONE is a function
1066   whose body is to be set to a copy of FN, mapping argument
1067   declarations according to the ARG_MAP splay_tree.  */
1068
1069void
1070clone_body (clone, fn, arg_map)
1071     tree clone, fn;
1072     void *arg_map;
1073{
1074  inline_data id;
1075
1076  /* Clone the body, as if we were making an inline call.  But, remap
1077     the parameters in the callee to the parameters of caller.  If
1078     there's an in-charge parameter, map it to an appropriate
1079     constant.  */
1080  memset (&id, 0, sizeof (id));
1081  VARRAY_TREE_INIT (id.fns, 2, "fns");
1082  VARRAY_PUSH_TREE (id.fns, clone);
1083  VARRAY_PUSH_TREE (id.fns, fn);
1084  id.decl_map = (splay_tree)arg_map;
1085
1086  /* Cloning is treated slightly differently from inlining.  Set
1087     CLONING_P so that it's clear which operation we're performing.  */
1088  id.cloning_p = true;
1089
1090  /* Actually copy the body.  */
1091  TREE_CHAIN (DECL_SAVED_TREE (clone)) = copy_body (&id);
1092
1093  /* Clean up.  */
1094  VARRAY_FREE (id.fns);
1095}
1096
1097/* Apply FUNC to all the sub-trees of TP in a pre-order traversal.
1098   FUNC is called with the DATA and the address of each sub-tree.  If
1099   FUNC returns a non-NULL value, the traversal is aborted, and the
1100   value returned by FUNC is returned.  If HTAB is non-NULL it is used
1101   to record the nodes visited, and to avoid visiting a node more than
1102   once.  */
1103
1104tree
1105walk_tree (tp, func, data, htab_)
1106     tree *tp;
1107     walk_tree_fn func;
1108     void *data;
1109     void *htab_;
1110{
1111  htab_t htab = (htab_t) htab_;
1112  enum tree_code code;
1113  int walk_subtrees;
1114  tree result;
1115
1116#define WALK_SUBTREE(NODE)				\
1117  do							\
1118    {							\
1119      result = walk_tree (&(NODE), func, data, htab);	\
1120      if (result)					\
1121	return result;					\
1122    }							\
1123  while (0)
1124
1125#define WALK_SUBTREE_TAIL(NODE)				\
1126  do							\
1127    {							\
1128       tp = & (NODE);					\
1129       goto tail_recurse;				\
1130    }							\
1131  while (0)
1132
1133 tail_recurse:
1134  /* Skip empty subtrees.  */
1135  if (!*tp)
1136    return NULL_TREE;
1137
1138  if (htab)
1139    {
1140      void **slot;
1141
1142      /* Don't walk the same tree twice, if the user has requested
1143         that we avoid doing so.  */
1144      if (htab_find (htab, *tp))
1145	return NULL_TREE;
1146      /* If we haven't already seen this node, add it to the table.  */
1147      slot = htab_find_slot (htab, *tp, INSERT);
1148      *slot = *tp;
1149    }
1150
1151  /* Call the function.  */
1152  walk_subtrees = 1;
1153  result = (*func) (tp, &walk_subtrees, data);
1154
1155  /* If we found something, return it.  */
1156  if (result)
1157    return result;
1158
1159  code = TREE_CODE (*tp);
1160
1161  /* Even if we didn't, FUNC may have decided that there was nothing
1162     interesting below this point in the tree.  */
1163  if (!walk_subtrees)
1164    {
1165      if (statement_code_p (code) || code == TREE_LIST
1166	  || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp))
1167	/* But we still need to check our siblings.  */
1168	WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
1169      else
1170	return NULL_TREE;
1171    }
1172
1173  /* Handle common cases up front.  */
1174  if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
1175      || TREE_CODE_CLASS (code) == 'r'
1176      || TREE_CODE_CLASS (code) == 's')
1177    {
1178      int i, len;
1179
1180      /* Set lineno here so we get the right instantiation context
1181	 if we call instantiate_decl from inlinable_function_p.  */
1182      if (statement_code_p (code) && !STMT_LINENO_FOR_FN_P (*tp))
1183	lineno = STMT_LINENO (*tp);
1184
1185      /* Walk over all the sub-trees of this operand.  */
1186      len = first_rtl_op (code);
1187      /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
1188	 But, we only want to walk once.  */
1189      if (code == TARGET_EXPR
1190	  && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
1191	--len;
1192      /* Go through the subtrees.  We need to do this in forward order so
1193         that the scope of a FOR_EXPR is handled properly.  */
1194      for (i = 0; i < len; ++i)
1195	WALK_SUBTREE (TREE_OPERAND (*tp, i));
1196
1197      /* For statements, we also walk the chain so that we cover the
1198	 entire statement tree.  */
1199      if (statement_code_p (code))
1200	{
1201	  if (code == DECL_STMT
1202	      && DECL_STMT_DECL (*tp)
1203	      && DECL_P (DECL_STMT_DECL (*tp)))
1204	    {
1205	      /* Walk the DECL_INITIAL and DECL_SIZE.  We don't want to walk
1206		 into declarations that are just mentioned, rather than
1207		 declared; they don't really belong to this part of the tree.
1208		 And, we can see cycles: the initializer for a declaration can
1209		 refer to the declaration itself.  */
1210	      WALK_SUBTREE (DECL_INITIAL (DECL_STMT_DECL (*tp)));
1211	      WALK_SUBTREE (DECL_SIZE (DECL_STMT_DECL (*tp)));
1212	      WALK_SUBTREE (DECL_SIZE_UNIT (DECL_STMT_DECL (*tp)));
1213	    }
1214
1215	  /* This can be tail-recursion optimized if we write it this way.  */
1216	  WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
1217	}
1218
1219      /* We didn't find what we were looking for.  */
1220      return NULL_TREE;
1221    }
1222  else if (TREE_CODE_CLASS (code) == 'd')
1223    {
1224      WALK_SUBTREE_TAIL (TREE_TYPE (*tp));
1225    }
1226
1227  result = (*lang_hooks.tree_inlining.walk_subtrees) (tp, &walk_subtrees, func,
1228						      data, htab);
1229  if (result || ! walk_subtrees)
1230    return result;
1231
1232  /* Not one of the easy cases.  We must explicitly go through the
1233     children.  */
1234  switch (code)
1235    {
1236    case ERROR_MARK:
1237    case IDENTIFIER_NODE:
1238    case INTEGER_CST:
1239    case REAL_CST:
1240    case VECTOR_CST:
1241    case STRING_CST:
1242    case REAL_TYPE:
1243    case COMPLEX_TYPE:
1244    case VECTOR_TYPE:
1245    case VOID_TYPE:
1246    case BOOLEAN_TYPE:
1247    case UNION_TYPE:
1248    case ENUMERAL_TYPE:
1249    case BLOCK:
1250    case RECORD_TYPE:
1251      /* None of thse have subtrees other than those already walked
1252         above.  */
1253      break;
1254
1255    case POINTER_TYPE:
1256    case REFERENCE_TYPE:
1257      WALK_SUBTREE_TAIL (TREE_TYPE (*tp));
1258      break;
1259
1260    case TREE_LIST:
1261      WALK_SUBTREE (TREE_VALUE (*tp));
1262      WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
1263      break;
1264
1265    case TREE_VEC:
1266      {
1267	int len = TREE_VEC_LENGTH (*tp);
1268
1269	if (len == 0)
1270	  break;
1271
1272	/* Walk all elements but the first.  */
1273	while (--len)
1274	  WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
1275
1276	/* Now walk the first one as a tail call.  */
1277	WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
1278      }
1279
1280    case COMPLEX_CST:
1281      WALK_SUBTREE (TREE_REALPART (*tp));
1282      WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
1283
1284    case CONSTRUCTOR:
1285      WALK_SUBTREE_TAIL (CONSTRUCTOR_ELTS (*tp));
1286
1287    case METHOD_TYPE:
1288      WALK_SUBTREE (TYPE_METHOD_BASETYPE (*tp));
1289      /* Fall through.  */
1290
1291    case FUNCTION_TYPE:
1292      WALK_SUBTREE (TREE_TYPE (*tp));
1293      {
1294	tree arg = TYPE_ARG_TYPES (*tp);
1295
1296	/* We never want to walk into default arguments.  */
1297	for (; arg; arg = TREE_CHAIN (arg))
1298	  WALK_SUBTREE (TREE_VALUE (arg));
1299      }
1300      break;
1301
1302    case ARRAY_TYPE:
1303      WALK_SUBTREE (TREE_TYPE (*tp));
1304      WALK_SUBTREE_TAIL (TYPE_DOMAIN (*tp));
1305
1306    case INTEGER_TYPE:
1307      WALK_SUBTREE (TYPE_MIN_VALUE (*tp));
1308      WALK_SUBTREE_TAIL (TYPE_MAX_VALUE (*tp));
1309
1310    case OFFSET_TYPE:
1311      WALK_SUBTREE (TREE_TYPE (*tp));
1312      WALK_SUBTREE_TAIL (TYPE_OFFSET_BASETYPE (*tp));
1313
1314    default:
1315      abort ();
1316    }
1317
1318  /* We didn't find what we were looking for.  */
1319  return NULL_TREE;
1320
1321#undef WALK_SUBTREE
1322}
1323
1324/* Like walk_tree, but does not walk duplicate nodes more than
1325   once.  */
1326
1327tree
1328walk_tree_without_duplicates (tp, func, data)
1329     tree *tp;
1330     walk_tree_fn func;
1331     void *data;
1332{
1333  tree result;
1334  htab_t htab;
1335
1336  htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
1337  result = walk_tree (tp, func, data, htab);
1338  htab_delete (htab);
1339  return result;
1340}
1341
1342/* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */
1343
1344tree
1345copy_tree_r (tp, walk_subtrees, data)
1346     tree *tp;
1347     int *walk_subtrees;
1348     void *data ATTRIBUTE_UNUSED;
1349{
1350  enum tree_code code = TREE_CODE (*tp);
1351
1352  /* We make copies of most nodes.  */
1353  if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
1354      || TREE_CODE_CLASS (code) == 'r'
1355      || TREE_CODE_CLASS (code) == 'c'
1356      || TREE_CODE_CLASS (code) == 's'
1357      || code == TREE_LIST
1358      || code == TREE_VEC
1359      || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp))
1360    {
1361      /* Because the chain gets clobbered when we make a copy, we save it
1362	 here.  */
1363      tree chain = TREE_CHAIN (*tp);
1364
1365      /* Copy the node.  */
1366      *tp = copy_node (*tp);
1367
1368      /* Now, restore the chain, if appropriate.  That will cause
1369	 walk_tree to walk into the chain as well.  */
1370      if (code == PARM_DECL || code == TREE_LIST
1371	  || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp)
1372	  || statement_code_p (code))
1373	TREE_CHAIN (*tp) = chain;
1374
1375      /* For now, we don't update BLOCKs when we make copies.  So, we
1376	 have to nullify all scope-statements.  */
1377      if (TREE_CODE (*tp) == SCOPE_STMT)
1378	SCOPE_STMT_BLOCK (*tp) = NULL_TREE;
1379    }
1380  else if (TREE_CODE_CLASS (code) == 't')
1381    /* There's no need to copy types, or anything beneath them.  */
1382    *walk_subtrees = 0;
1383
1384  return NULL_TREE;
1385}
1386
1387/* The SAVE_EXPR pointed to by TP is being copied.  If ST contains
1388   information indicating to what new SAVE_EXPR this one should be
1389   mapped, use that one.  Otherwise, create a new node and enter it in
1390   ST.  FN is the function into which the copy will be placed.  */
1391
1392void
1393remap_save_expr (tp, st_, fn, walk_subtrees)
1394     tree *tp;
1395     void *st_;
1396     tree fn;
1397     int *walk_subtrees;
1398{
1399  splay_tree st = (splay_tree) st_;
1400  splay_tree_node n;
1401
1402  /* See if we already encountered this SAVE_EXPR.  */
1403  n = splay_tree_lookup (st, (splay_tree_key) *tp);
1404
1405  /* If we didn't already remap this SAVE_EXPR, do so now.  */
1406  if (!n)
1407    {
1408      tree t = copy_node (*tp);
1409
1410      /* The SAVE_EXPR is now part of the function into which we
1411	 are inlining this body.  */
1412      SAVE_EXPR_CONTEXT (t) = fn;
1413      /* And we haven't evaluated it yet.  */
1414      SAVE_EXPR_RTL (t) = NULL_RTX;
1415      /* Remember this SAVE_EXPR.  */
1416      n = splay_tree_insert (st,
1417			     (splay_tree_key) *tp,
1418			     (splay_tree_value) t);
1419      /* Make sure we don't remap an already-remapped SAVE_EXPR.  */
1420      splay_tree_insert (st, (splay_tree_key) t,
1421			 (splay_tree_value) error_mark_node);
1422    }
1423  else
1424    /* We've already walked into this SAVE_EXPR, so we needn't do it
1425       again.  */
1426    *walk_subtrees = 0;
1427
1428  /* Replace this SAVE_EXPR with the copy.  */
1429  *tp = (tree) n->value;
1430}
1431