integrate.c revision 119256
1/* Procedure integration for GCC.
2   Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4   Contributed by Michael Tiemann (tiemann@cygnus.com)
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA.  */
22
23#include "config.h"
24#include "system.h"
25
26#include "rtl.h"
27#include "tree.h"
28#include "tm_p.h"
29#include "regs.h"
30#include "flags.h"
31#include "debug.h"
32#include "insn-config.h"
33#include "expr.h"
34#include "output.h"
35#include "recog.h"
36#include "integrate.h"
37#include "real.h"
38#include "except.h"
39#include "function.h"
40#include "toplev.h"
41#include "intl.h"
42#include "loop.h"
43#include "params.h"
44#include "ggc.h"
45#include "target.h"
46#include "langhooks.h"
47
48/* Similar, but round to the next highest integer that meets the
49   alignment.  */
50#define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
51
52/* Default max number of insns a function can have and still be inline.
53   This is overridden on RISC machines.  */
54#ifndef INTEGRATE_THRESHOLD
55/* Inlining small functions might save more space then not inlining at
56   all.  Assume 1 instruction for the call and 1.5 insns per argument.  */
57#define INTEGRATE_THRESHOLD(DECL) \
58  (optimize_size \
59   ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
60   : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
61#endif
62
63
64/* Private type used by {get/has}_func_hard_reg_initial_val.  */
65typedef struct initial_value_pair GTY(()) {
66  rtx hard_reg;
67  rtx pseudo;
68} initial_value_pair;
69typedef struct initial_value_struct GTY(()) {
70  int num_entries;
71  int max_entries;
72  initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
73} initial_value_struct;
74
75static void setup_initial_hard_reg_value_integration PARAMS ((struct function *, struct inline_remap *));
76
77static rtvec initialize_for_inline	PARAMS ((tree));
78static void note_modified_parmregs	PARAMS ((rtx, rtx, void *));
79static void integrate_parm_decls	PARAMS ((tree, struct inline_remap *,
80						 rtvec));
81static tree integrate_decl_tree		PARAMS ((tree,
82						 struct inline_remap *));
83static void subst_constants		PARAMS ((rtx *, rtx,
84						 struct inline_remap *, int));
85static void set_block_origin_self	PARAMS ((tree));
86static void set_block_abstract_flags	PARAMS ((tree, int));
87static void process_reg_param		PARAMS ((struct inline_remap *, rtx,
88						 rtx));
89void set_decl_abstract_flags		PARAMS ((tree, int));
90static void mark_stores                 PARAMS ((rtx, rtx, void *));
91static void save_parm_insns		PARAMS ((rtx, rtx));
92static void copy_insn_list              PARAMS ((rtx, struct inline_remap *,
93						 rtx));
94static void copy_insn_notes		PARAMS ((rtx, struct inline_remap *,
95						 int));
96static int compare_blocks               PARAMS ((const PTR, const PTR));
97static int find_block                   PARAMS ((const PTR, const PTR));
98
99/* Used by copy_rtx_and_substitute; this indicates whether the function is
100   called for the purpose of inlining or some other purpose (i.e. loop
101   unrolling).  This affects how constant pool references are handled.
102   This variable contains the FUNCTION_DECL for the inlined function.  */
103static struct function *inlining = 0;
104
105/* Returns the Ith entry in the label_map contained in MAP.  If the
106   Ith entry has not yet been set, return a fresh label.  This function
107   performs a lazy initialization of label_map, thereby avoiding huge memory
108   explosions when the label_map gets very large.  */
109
110rtx
111get_label_from_map (map, i)
112     struct inline_remap *map;
113     int i;
114{
115  rtx x = map->label_map[i];
116
117  if (x == NULL_RTX)
118    x = map->label_map[i] = gen_label_rtx ();
119
120  return x;
121}
122
123/* Return false if the function FNDECL cannot be inlined on account of its
124   attributes, true otherwise.  */
125bool
126function_attribute_inlinable_p (fndecl)
127     tree fndecl;
128{
129  if (targetm.attribute_table)
130    {
131      tree a;
132
133      for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
134	{
135	  tree name = TREE_PURPOSE (a);
136	  int i;
137
138	  for (i = 0; targetm.attribute_table[i].name != NULL; i++)
139	    if (is_attribute_p (targetm.attribute_table[i].name, name))
140	      return (*targetm.function_attribute_inlinable_p) (fndecl);
141	}
142    }
143
144  return true;
145}
146
147/* Zero if the current function (whose FUNCTION_DECL is FNDECL)
148   is safe and reasonable to integrate into other functions.
149   Nonzero means value is a warning msgid with a single %s
150   for the function's name.  */
151
152const char *
153function_cannot_inline_p (fndecl)
154     tree fndecl;
155{
156  rtx insn;
157  tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
158
159  /* For functions marked as inline increase the maximum size to
160     MAX_INLINE_INSNS_RTL (--param max-inline-insn-rtl=<n>). For
161     regular functions use the limit given by INTEGRATE_THRESHOLD.
162     Note that the RTL inliner is not used by the languages that use
163     the tree inliner (C, C++).  */
164
165  int max_insns = (DECL_INLINE (fndecl))
166		   ? (MAX_INLINE_INSNS_RTL
167		      + 8 * list_length (DECL_ARGUMENTS (fndecl)))
168		   : INTEGRATE_THRESHOLD (fndecl);
169
170  int ninsns = 0;
171  tree parms;
172
173  if (DECL_UNINLINABLE (fndecl))
174    return N_("function cannot be inline");
175
176  /* No inlines with varargs.  */
177  if (last && TREE_VALUE (last) != void_type_node)
178    return N_("varargs function cannot be inline");
179
180  if (current_function_calls_alloca)
181    return N_("function using alloca cannot be inline");
182
183  if (current_function_calls_setjmp)
184    return N_("function using setjmp cannot be inline");
185
186  if (current_function_calls_eh_return)
187    return N_("function uses __builtin_eh_return");
188
189  if (current_function_contains_functions)
190    return N_("function with nested functions cannot be inline");
191
192  if (forced_labels)
193    return
194      N_("function with label addresses used in initializers cannot inline");
195
196  if (current_function_cannot_inline)
197    return current_function_cannot_inline;
198
199  /* If its not even close, don't even look.  */
200  if (get_max_uid () > 3 * max_insns)
201    return N_("function too large to be inline");
202
203#if 0
204  /* Don't inline functions which do not specify a function prototype and
205     have BLKmode argument or take the address of a parameter.  */
206  for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
207    {
208      if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
209	TREE_ADDRESSABLE (parms) = 1;
210      if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
211	return N_("no prototype, and parameter address used; cannot be inline");
212    }
213#endif
214
215  /* We can't inline functions that return structures
216     the old-fashioned PCC way, copying into a static block.  */
217  if (current_function_returns_pcc_struct)
218    return N_("inline functions not supported for this return value type");
219
220  /* We can't inline functions that return structures of varying size.  */
221  if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
222      && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
223    return N_("function with varying-size return value cannot be inline");
224
225  /* Cannot inline a function with a varying size argument or one that
226     receives a transparent union.  */
227  for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
228    {
229      if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
230	return N_("function with varying-size parameter cannot be inline");
231      else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
232	       && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
233	return N_("function with transparent unit parameter cannot be inline");
234    }
235
236  if (get_max_uid () > max_insns)
237    {
238      for (ninsns = 0, insn = get_first_nonparm_insn ();
239	   insn && ninsns < max_insns;
240	   insn = NEXT_INSN (insn))
241	if (INSN_P (insn))
242	  ninsns++;
243
244      if (ninsns >= max_insns)
245	return N_("function too large to be inline");
246    }
247
248  /* We will not inline a function which uses computed goto.  The addresses of
249     its local labels, which may be tucked into global storage, are of course
250     not constant across instantiations, which causes unexpected behavior.  */
251  if (current_function_has_computed_jump)
252    return N_("function with computed jump cannot inline");
253
254  /* We cannot inline a nested function that jumps to a nonlocal label.  */
255  if (current_function_has_nonlocal_goto)
256    return N_("function with nonlocal goto cannot be inline");
257
258  /* We can't inline functions that return a PARALLEL rtx.  */
259  if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
260    {
261      rtx result = DECL_RTL (DECL_RESULT (fndecl));
262      if (GET_CODE (result) == PARALLEL)
263	return N_("inline functions not supported for this return value type");
264    }
265
266  /* If the function has a target specific attribute attached to it,
267     then we assume that we should not inline it.  This can be overriden
268     by the target if it defines TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P.  */
269  if (!function_attribute_inlinable_p (fndecl))
270    return N_("function with target specific attribute(s) cannot be inlined");
271
272  return NULL;
273}
274
275/* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
276   Zero for a reg that isn't a parm's home.
277   Only reg numbers less than max_parm_reg are mapped here.  */
278static tree *parmdecl_map;
279
280/* In save_for_inline, nonzero if past the parm-initialization insns.  */
281static int in_nonparm_insns;
282
283/* Subroutine for `save_for_inline'.  Performs initialization
284   needed to save FNDECL's insns and info for future inline expansion.  */
285
286static rtvec
287initialize_for_inline (fndecl)
288     tree fndecl;
289{
290  int i;
291  rtvec arg_vector;
292  tree parms;
293
294  /* Clear out PARMDECL_MAP.  It was allocated in the caller's frame.  */
295  memset ((char *) parmdecl_map, 0, max_parm_reg * sizeof (tree));
296  arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
297
298  for (parms = DECL_ARGUMENTS (fndecl), i = 0;
299       parms;
300       parms = TREE_CHAIN (parms), i++)
301    {
302      rtx p = DECL_RTL (parms);
303
304      /* If we have (mem (addressof (mem ...))), use the inner MEM since
305	 otherwise the copy_rtx call below will not unshare the MEM since
306	 it shares ADDRESSOF.  */
307      if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
308	  && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
309	p = XEXP (XEXP (p, 0), 0);
310
311      RTVEC_ELT (arg_vector, i) = p;
312
313      if (GET_CODE (p) == REG)
314	parmdecl_map[REGNO (p)] = parms;
315      else if (GET_CODE (p) == CONCAT)
316	{
317	  rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
318	  rtx pimag = gen_imagpart (GET_MODE (preal), p);
319
320	  if (GET_CODE (preal) == REG)
321	    parmdecl_map[REGNO (preal)] = parms;
322	  if (GET_CODE (pimag) == REG)
323	    parmdecl_map[REGNO (pimag)] = parms;
324	}
325
326      /* This flag is cleared later
327	 if the function ever modifies the value of the parm.  */
328      TREE_READONLY (parms) = 1;
329    }
330
331  return arg_vector;
332}
333
334/* Copy NODE (which must be a DECL, but not a PARM_DECL).  The DECL
335   originally was in the FROM_FN, but now it will be in the
336   TO_FN.  */
337
338tree
339copy_decl_for_inlining (decl, from_fn, to_fn)
340     tree decl;
341     tree from_fn;
342     tree to_fn;
343{
344  tree copy;
345
346  /* Copy the declaration.  */
347  if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
348    {
349      tree type;
350      int invisiref = 0;
351
352      /* See if the frontend wants to pass this by invisible reference.  */
353      if (TREE_CODE (decl) == PARM_DECL
354	  && DECL_ARG_TYPE (decl) != TREE_TYPE (decl)
355	  && POINTER_TYPE_P (DECL_ARG_TYPE (decl))
356	  && TREE_TYPE (DECL_ARG_TYPE (decl)) == TREE_TYPE (decl))
357	{
358	  invisiref = 1;
359	  type = DECL_ARG_TYPE (decl);
360	}
361      else
362	type = TREE_TYPE (decl);
363
364      /* For a parameter, we must make an equivalent VAR_DECL, not a
365	 new PARM_DECL.  */
366      copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
367      if (!invisiref)
368	{
369	  TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
370	  TREE_READONLY (copy) = TREE_READONLY (decl);
371	  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
372	}
373      else
374	{
375	  TREE_ADDRESSABLE (copy) = 0;
376	  TREE_READONLY (copy) = 1;
377	  TREE_THIS_VOLATILE (copy) = 0;
378	}
379    }
380  else
381    {
382      copy = copy_node (decl);
383      /* The COPY is not abstract; it will be generated in TO_FN.  */
384      DECL_ABSTRACT (copy) = 0;
385      (*lang_hooks.dup_lang_specific_decl) (copy);
386
387      /* TREE_ADDRESSABLE isn't used to indicate that a label's
388	 address has been taken; it's for internal bookkeeping in
389	 expand_goto_internal.  */
390      if (TREE_CODE (copy) == LABEL_DECL)
391	TREE_ADDRESSABLE (copy) = 0;
392    }
393
394  /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
395     declaration inspired this copy.  */
396  DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
397
398  /* The new variable/label has no RTL, yet.  */
399  SET_DECL_RTL (copy, NULL_RTX);
400
401  /* These args would always appear unused, if not for this.  */
402  TREE_USED (copy) = 1;
403
404  /* Set the context for the new declaration.  */
405  if (!DECL_CONTEXT (decl))
406    /* Globals stay global.  */
407    ;
408  else if (DECL_CONTEXT (decl) != from_fn)
409    /* Things that weren't in the scope of the function we're inlining
410       from aren't in the scope we're inlining too, either.  */
411    ;
412  else if (TREE_STATIC (decl))
413    /* Function-scoped static variables should say in the original
414       function.  */
415    ;
416  else
417    /* Ordinary automatic local variables are now in the scope of the
418       new function.  */
419    DECL_CONTEXT (copy) = to_fn;
420
421  return copy;
422}
423
424/* Make the insns and PARM_DECLs of the current function permanent
425   and record other information in DECL_SAVED_INSNS to allow inlining
426   of this function in subsequent calls.
427
428   This routine need not copy any insns because we are not going
429   to immediately compile the insns in the insn chain.  There
430   are two cases when we would compile the insns for FNDECL:
431   (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
432   be output at the end of other compilation, because somebody took
433   its address.  In the first case, the insns of FNDECL are copied
434   as it is expanded inline, so FNDECL's saved insns are not
435   modified.  In the second case, FNDECL is used for the last time,
436   so modifying the rtl is not a problem.
437
438   We don't have to worry about FNDECL being inline expanded by
439   other functions which are written at the end of compilation
440   because flag_no_inline is turned on when we begin writing
441   functions at the end of compilation.  */
442
443void
444save_for_inline (fndecl)
445     tree fndecl;
446{
447  rtx insn;
448  rtvec argvec;
449  rtx first_nonparm_insn;
450
451  /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
452     Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
453     Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
454     for the parms, prior to elimination of virtual registers.
455     These values are needed for substituting parms properly.  */
456  if (! flag_no_inline)
457    parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
458
459  /* Make and emit a return-label if we have not already done so.  */
460
461  if (return_label == 0)
462    {
463      return_label = gen_label_rtx ();
464      emit_label (return_label);
465    }
466
467  if (! flag_no_inline)
468    argvec = initialize_for_inline (fndecl);
469  else
470    argvec = NULL;
471
472  /* Delete basic block notes created by early run of find_basic_block.
473     The notes would be later used by find_basic_blocks to reuse the memory
474     for basic_block structures on already freed obstack.  */
475  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
476    if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)
477      delete_related_insns (insn);
478
479  /* If there are insns that copy parms from the stack into pseudo registers,
480     those insns are not copied.  `expand_inline_function' must
481     emit the correct code to handle such things.  */
482
483  insn = get_insns ();
484  if (GET_CODE (insn) != NOTE)
485    abort ();
486
487  if (! flag_no_inline)
488    {
489      /* Get the insn which signals the end of parameter setup code.  */
490      first_nonparm_insn = get_first_nonparm_insn ();
491
492      /* Now just scan the chain of insns to see what happens to our
493	 PARM_DECLs.  If a PARM_DECL is used but never modified, we
494	 can substitute its rtl directly when expanding inline (and
495	 perform constant folding when its incoming value is
496	 constant).  Otherwise, we have to copy its value into a new
497	 register and track the new register's life.  */
498      in_nonparm_insns = 0;
499      save_parm_insns (insn, first_nonparm_insn);
500
501      cfun->inl_max_label_num = max_label_num ();
502      cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
503      cfun->original_arg_vector = argvec;
504    }
505  cfun->original_decl_initial = DECL_INITIAL (fndecl);
506  cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
507  DECL_SAVED_INSNS (fndecl) = cfun;
508
509  /* Clean up.  */
510  if (! flag_no_inline)
511    free (parmdecl_map);
512}
513
514/* Scan the chain of insns to see what happens to our PARM_DECLs.  If a
515   PARM_DECL is used but never modified, we can substitute its rtl directly
516   when expanding inline (and perform constant folding when its incoming
517   value is constant). Otherwise, we have to copy its value into a new
518   register and track the new register's life.  */
519
520static void
521save_parm_insns (insn, first_nonparm_insn)
522     rtx insn;
523     rtx first_nonparm_insn;
524{
525  if (insn == NULL_RTX)
526    return;
527
528  for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
529    {
530      if (insn == first_nonparm_insn)
531	in_nonparm_insns = 1;
532
533      if (INSN_P (insn))
534	{
535	  /* Record what interesting things happen to our parameters.  */
536	  note_stores (PATTERN (insn), note_modified_parmregs, NULL);
537
538	  /* If this is a CALL_PLACEHOLDER insn then we need to look into the
539	     three attached sequences: normal call, sibling call and tail
540	     recursion.  */
541	  if (GET_CODE (insn) == CALL_INSN
542	      && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
543	    {
544	      int i;
545
546	      for (i = 0; i < 3; i++)
547		save_parm_insns (XEXP (PATTERN (insn), i),
548				 first_nonparm_insn);
549	    }
550	}
551    }
552}
553
554/* Note whether a parameter is modified or not.  */
555
556static void
557note_modified_parmregs (reg, x, data)
558     rtx reg;
559     rtx x ATTRIBUTE_UNUSED;
560     void *data ATTRIBUTE_UNUSED;
561{
562  if (GET_CODE (reg) == REG && in_nonparm_insns
563      && REGNO (reg) < max_parm_reg
564      && REGNO (reg) >= FIRST_PSEUDO_REGISTER
565      && parmdecl_map[REGNO (reg)] != 0)
566    TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
567}
568
569/* Unfortunately, we need a global copy of const_equiv map for communication
570   with a function called from note_stores.  Be *very* careful that this
571   is used properly in the presence of recursion.  */
572
573varray_type global_const_equiv_varray;
574
575#define FIXED_BASE_PLUS_P(X) \
576  (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT	\
577   && GET_CODE (XEXP (X, 0)) == REG				\
578   && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER		\
579   && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
580
581/* Called to set up a mapping for the case where a parameter is in a
582   register.  If it is read-only and our argument is a constant, set up the
583   constant equivalence.
584
585   If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
586   if it is a register.
587
588   Also, don't allow hard registers here; they might not be valid when
589   substituted into insns.  */
590static void
591process_reg_param (map, loc, copy)
592     struct inline_remap *map;
593     rtx loc, copy;
594{
595  if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
596      || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
597	  && ! REG_USERVAR_P (copy))
598      || (GET_CODE (copy) == REG
599	  && REGNO (copy) < FIRST_PSEUDO_REGISTER))
600    {
601      rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
602      REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
603      if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
604	SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
605      copy = temp;
606    }
607  map->reg_map[REGNO (loc)] = copy;
608}
609
610/* Compare two BLOCKs for qsort.  The key we sort on is the
611   BLOCK_ABSTRACT_ORIGIN of the blocks.  We cannot just subtract the
612   two pointers, because it may overflow sizeof(int).  */
613
614static int
615compare_blocks (v1, v2)
616     const PTR v1;
617     const PTR v2;
618{
619  tree b1 = *((const tree *) v1);
620  tree b2 = *((const tree *) v2);
621  char *p1 = (char *) BLOCK_ABSTRACT_ORIGIN (b1);
622  char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
623
624  if (p1 == p2)
625    return 0;
626  return p1 < p2 ? -1 : 1;
627}
628
629/* Compare two BLOCKs for bsearch.  The first pointer corresponds to
630   an original block; the second to a remapped equivalent.  */
631
632static int
633find_block (v1, v2)
634     const PTR v1;
635     const PTR v2;
636{
637  const union tree_node *b1 = (const union tree_node *) v1;
638  tree b2 = *((const tree *) v2);
639  char *p1 = (char *) b1;
640  char *p2 = (char *) BLOCK_ABSTRACT_ORIGIN (b2);
641
642  if (p1 == p2)
643    return 0;
644  return p1 < p2 ? -1 : 1;
645}
646
647/* Integrate the procedure defined by FNDECL.  Note that this function
648   may wind up calling itself.  Since the static variables are not
649   reentrant, we do not assign them until after the possibility
650   of recursion is eliminated.
651
652   If IGNORE is nonzero, do not produce a value.
653   Otherwise store the value in TARGET if it is nonzero and that is convenient.
654
655   Value is:
656   (rtx)-1 if we could not substitute the function
657   0 if we substituted it and it does not produce a value
658   else an rtx for where the value is stored.  */
659
660rtx
661expand_inline_function (fndecl, parms, target, ignore, type,
662			structure_value_addr)
663     tree fndecl, parms;
664     rtx target;
665     int ignore;
666     tree type;
667     rtx structure_value_addr;
668{
669  struct function *inlining_previous;
670  struct function *inl_f = DECL_SAVED_INSNS (fndecl);
671  tree formal, actual, block;
672  rtx parm_insns = inl_f->emit->x_first_insn;
673  rtx insns = (inl_f->inl_last_parm_insn
674	       ? NEXT_INSN (inl_f->inl_last_parm_insn)
675	       : parm_insns);
676  tree *arg_trees;
677  rtx *arg_vals;
678  int max_regno;
679  int i;
680  int min_labelno = inl_f->emit->x_first_label_num;
681  int max_labelno = inl_f->inl_max_label_num;
682  int nargs;
683  rtx loc;
684  rtx stack_save = 0;
685  rtx temp;
686  struct inline_remap *map = 0;
687  rtvec arg_vector = inl_f->original_arg_vector;
688  rtx static_chain_value = 0;
689  int inl_max_uid;
690  int eh_region_offset;
691
692  /* The pointer used to track the true location of the memory used
693     for MAP->LABEL_MAP.  */
694  rtx *real_label_map = 0;
695
696  /* Allow for equivalences of the pseudos we make for virtual fp and ap.  */
697  max_regno = inl_f->emit->x_reg_rtx_no + 3;
698  if (max_regno < FIRST_PSEUDO_REGISTER)
699    abort ();
700
701  /* Pull out the decl for the function definition; fndecl may be a
702     local declaration, which would break DECL_ABSTRACT_ORIGIN.  */
703  fndecl = inl_f->decl;
704
705  nargs = list_length (DECL_ARGUMENTS (fndecl));
706
707  if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
708    cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
709
710  /* Check that the parms type match and that sufficient arguments were
711     passed.  Since the appropriate conversions or default promotions have
712     already been applied, the machine modes should match exactly.  */
713
714  for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
715       formal;
716       formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
717    {
718      tree arg;
719      enum machine_mode mode;
720
721      if (actual == 0)
722	return (rtx) (size_t) -1;
723
724      arg = TREE_VALUE (actual);
725      mode = TYPE_MODE (DECL_ARG_TYPE (formal));
726
727      if (arg == error_mark_node
728	  || mode != TYPE_MODE (TREE_TYPE (arg))
729	  /* If they are block mode, the types should match exactly.
730	     They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
731	     which could happen if the parameter has incomplete type.  */
732	  || (mode == BLKmode
733	      && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
734		  != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
735	return (rtx) (size_t) -1;
736    }
737
738  /* Extra arguments are valid, but will be ignored below, so we must
739     evaluate them here for side-effects.  */
740  for (; actual; actual = TREE_CHAIN (actual))
741    expand_expr (TREE_VALUE (actual), const0_rtx,
742		 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
743
744  /* Expand the function arguments.  Do this first so that any
745     new registers get created before we allocate the maps.  */
746
747  arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
748  arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
749
750  for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
751       formal;
752       formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
753    {
754      /* Actual parameter, converted to the type of the argument within the
755	 function.  */
756      tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
757      /* Mode of the variable used within the function.  */
758      enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
759      int invisiref = 0;
760
761      arg_trees[i] = arg;
762      loc = RTVEC_ELT (arg_vector, i);
763
764      /* If this is an object passed by invisible reference, we copy the
765	 object into a stack slot and save its address.  If this will go
766	 into memory, we do nothing now.  Otherwise, we just expand the
767	 argument.  */
768      if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
769	  && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
770	{
771	  rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
772
773	  store_expr (arg, stack_slot, 0);
774	  arg_vals[i] = XEXP (stack_slot, 0);
775	  invisiref = 1;
776	}
777      else if (GET_CODE (loc) != MEM)
778	{
779	  if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
780	    {
781	      int unsignedp = TREE_UNSIGNED (TREE_TYPE (formal));
782	      enum machine_mode pmode = TYPE_MODE (TREE_TYPE (formal));
783
784	      pmode = promote_mode (TREE_TYPE (formal), pmode,
785				    &unsignedp, 0);
786
787	      if (GET_MODE (loc) != pmode)
788		abort ();
789
790	      /* The mode if LOC and ARG can differ if LOC was a variable
791		 that had its mode promoted via PROMOTED_MODE.  */
792	      arg_vals[i] = convert_modes (pmode,
793					   TYPE_MODE (TREE_TYPE (arg)),
794					   expand_expr (arg, NULL_RTX, mode,
795							EXPAND_SUM),
796					   unsignedp);
797	    }
798	  else
799	    arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
800	}
801      else
802	arg_vals[i] = 0;
803
804      if (arg_vals[i] != 0
805	  && (! TREE_READONLY (formal)
806	      /* If the parameter is not read-only, copy our argument through
807		 a register.  Also, we cannot use ARG_VALS[I] if it overlaps
808		 TARGET in any way.  In the inline function, they will likely
809		 be two different pseudos, and `safe_from_p' will make all
810		 sorts of smart assumptions about their not conflicting.
811		 But if ARG_VALS[I] overlaps TARGET, these assumptions are
812		 wrong, so put ARG_VALS[I] into a fresh register.
813		 Don't worry about invisible references, since their stack
814		 temps will never overlap the target.  */
815	      || (target != 0
816		  && ! invisiref
817		  && (GET_CODE (arg_vals[i]) == REG
818		      || GET_CODE (arg_vals[i]) == SUBREG
819		      || GET_CODE (arg_vals[i]) == MEM)
820		  && reg_overlap_mentioned_p (arg_vals[i], target))
821	      /* ??? We must always copy a SUBREG into a REG, because it might
822		 get substituted into an address, and not all ports correctly
823		 handle SUBREGs in addresses.  */
824	      || (GET_CODE (arg_vals[i]) == SUBREG)))
825	arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
826
827      if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
828	  && POINTER_TYPE_P (TREE_TYPE (formal)))
829	mark_reg_pointer (arg_vals[i],
830			  TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
831    }
832
833  /* Allocate the structures we use to remap things.  */
834
835  map = (struct inline_remap *) xcalloc (1, sizeof (struct inline_remap));
836  map->fndecl = fndecl;
837
838  VARRAY_TREE_INIT (map->block_map, 10, "block_map");
839  map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
840
841  /* We used to use alloca here, but the size of what it would try to
842     allocate would occasionally cause it to exceed the stack limit and
843     cause unpredictable core dumps.  */
844  real_label_map
845    = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
846  map->label_map = real_label_map;
847  map->local_return_label = NULL_RTX;
848
849  inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
850  map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
851  map->min_insnno = 0;
852  map->max_insnno = inl_max_uid;
853
854  map->integrating = 1;
855  map->compare_src = NULL_RTX;
856  map->compare_mode = VOIDmode;
857
858  /* const_equiv_varray maps pseudos in our routine to constants, so
859     it needs to be large enough for all our pseudos.  This is the
860     number we are currently using plus the number in the called
861     routine, plus 15 for each arg, five to compute the virtual frame
862     pointer, and five for the return value.  This should be enough
863     for most cases.  We do not reference entries outside the range of
864     the map.
865
866     ??? These numbers are quite arbitrary and were obtained by
867     experimentation.  At some point, we should try to allocate the
868     table after all the parameters are set up so we can more accurately
869     estimate the number of pseudos we will need.  */
870
871  VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
872			   (max_reg_num ()
873			    + (max_regno - FIRST_PSEUDO_REGISTER)
874			    + 15 * nargs
875			    + 10),
876			   "expand_inline_function");
877  map->const_age = 0;
878
879  /* Record the current insn in case we have to set up pointers to frame
880     and argument memory blocks.  If there are no insns yet, add a dummy
881     insn that can be used as an insertion point.  */
882  map->insns_at_start = get_last_insn ();
883  if (map->insns_at_start == 0)
884    map->insns_at_start = emit_note (NULL, NOTE_INSN_DELETED);
885
886  map->regno_pointer_align = inl_f->emit->regno_pointer_align;
887  map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
888
889  /* Update the outgoing argument size to allow for those in the inlined
890     function.  */
891  if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
892    current_function_outgoing_args_size = inl_f->outgoing_args_size;
893
894  /* If the inline function needs to make PIC references, that means
895     that this function's PIC offset table must be used.  */
896  if (inl_f->uses_pic_offset_table)
897    current_function_uses_pic_offset_table = 1;
898
899  /* If this function needs a context, set it up.  */
900  if (inl_f->needs_context)
901    static_chain_value = lookup_static_chain (fndecl);
902
903  if (GET_CODE (parm_insns) == NOTE
904      && NOTE_LINE_NUMBER (parm_insns) > 0)
905    {
906      rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
907			    NOTE_LINE_NUMBER (parm_insns));
908      if (note)
909	RTX_INTEGRATED_P (note) = 1;
910    }
911
912  /* Process each argument.  For each, set up things so that the function's
913     reference to the argument will refer to the argument being passed.
914     We only replace REG with REG here.  Any simplifications are done
915     via const_equiv_map.
916
917     We make two passes:  In the first, we deal with parameters that will
918     be placed into registers, since we need to ensure that the allocated
919     register number fits in const_equiv_map.  Then we store all non-register
920     parameters into their memory location.  */
921
922  /* Don't try to free temp stack slots here, because we may put one of the
923     parameters into a temp stack slot.  */
924
925  for (i = 0; i < nargs; i++)
926    {
927      rtx copy = arg_vals[i];
928
929      loc = RTVEC_ELT (arg_vector, i);
930
931      /* There are three cases, each handled separately.  */
932      if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
933	  && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
934	{
935	  /* This must be an object passed by invisible reference (it could
936	     also be a variable-sized object, but we forbid inlining functions
937	     with variable-sized arguments).  COPY is the address of the
938	     actual value (this computation will cause it to be copied).  We
939	     map that address for the register, noting the actual address as
940	     an equivalent in case it can be substituted into the insns.  */
941
942	  if (GET_CODE (copy) != REG)
943	    {
944	      temp = copy_addr_to_reg (copy);
945	      if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
946		SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
947	      copy = temp;
948	    }
949	  map->reg_map[REGNO (XEXP (loc, 0))] = copy;
950	}
951      else if (GET_CODE (loc) == MEM)
952	{
953	  /* This is the case of a parameter that lives in memory.  It
954	     will live in the block we allocate in the called routine's
955	     frame that simulates the incoming argument area.  Do nothing
956	     with the parameter now; we will call store_expr later.  In
957	     this case, however, we must ensure that the virtual stack and
958	     incoming arg rtx values are expanded now so that we can be
959	     sure we have enough slots in the const equiv map since the
960	     store_expr call can easily blow the size estimate.  */
961	  if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
962	    copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
963	}
964      else if (GET_CODE (loc) == REG)
965	process_reg_param (map, loc, copy);
966      else if (GET_CODE (loc) == CONCAT)
967	{
968	  rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
969	  rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
970	  rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
971	  rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
972
973	  process_reg_param (map, locreal, copyreal);
974	  process_reg_param (map, locimag, copyimag);
975	}
976      else
977	abort ();
978    }
979
980  /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
981     specially.  This function can be called recursively, so we need to
982     save the previous value.  */
983  inlining_previous = inlining;
984  inlining = inl_f;
985
986  /* Now do the parameters that will be placed in memory.  */
987
988  for (formal = DECL_ARGUMENTS (fndecl), i = 0;
989       formal; formal = TREE_CHAIN (formal), i++)
990    {
991      loc = RTVEC_ELT (arg_vector, i);
992
993      if (GET_CODE (loc) == MEM
994	  /* Exclude case handled above.  */
995	  && ! (GET_CODE (XEXP (loc, 0)) == REG
996		&& REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
997	{
998	  rtx note = emit_note (DECL_SOURCE_FILE (formal),
999				DECL_SOURCE_LINE (formal));
1000	  if (note)
1001	    RTX_INTEGRATED_P (note) = 1;
1002
1003	  /* Compute the address in the area we reserved and store the
1004	     value there.  */
1005	  temp = copy_rtx_and_substitute (loc, map, 1);
1006	  subst_constants (&temp, NULL_RTX, map, 1);
1007	  apply_change_group ();
1008	  if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1009	    temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1010	  store_expr (arg_trees[i], temp, 0);
1011	}
1012    }
1013
1014  /* Deal with the places that the function puts its result.
1015     We are driven by what is placed into DECL_RESULT.
1016
1017     Initially, we assume that we don't have anything special handling for
1018     REG_FUNCTION_RETURN_VALUE_P.  */
1019
1020  map->inline_target = 0;
1021  loc = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
1022	 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
1023
1024  if (TYPE_MODE (type) == VOIDmode)
1025    /* There is no return value to worry about.  */
1026    ;
1027  else if (GET_CODE (loc) == MEM)
1028    {
1029      if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
1030	{
1031	  temp = copy_rtx_and_substitute (loc, map, 1);
1032	  subst_constants (&temp, NULL_RTX, map, 1);
1033	  apply_change_group ();
1034	  target = temp;
1035	}
1036      else
1037	{
1038	  if (! structure_value_addr
1039	      || ! aggregate_value_p (DECL_RESULT (fndecl)))
1040	    abort ();
1041
1042	  /* Pass the function the address in which to return a structure
1043	     value.  Note that a constructor can cause someone to call us
1044	     with STRUCTURE_VALUE_ADDR, but the initialization takes place
1045	     via the first parameter, rather than the struct return address.
1046
1047	     We have two cases: If the address is a simple register
1048	     indirect, use the mapping mechanism to point that register to
1049	     our structure return address.  Otherwise, store the structure
1050	     return value into the place that it will be referenced from.  */
1051
1052	  if (GET_CODE (XEXP (loc, 0)) == REG)
1053	    {
1054	      temp = force_operand (structure_value_addr, NULL_RTX);
1055	      temp = force_reg (Pmode, temp);
1056	      /* A virtual register might be invalid in an insn, because
1057		 it can cause trouble in reload.  Since we don't have access
1058		 to the expanders at map translation time, make sure we have
1059		 a proper register now.
1060		 If a virtual register is actually valid, cse or combine
1061		 can put it into the mapped insns.  */
1062	      if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
1063		  && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
1064	      temp = copy_to_mode_reg (Pmode, temp);
1065	      map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1066
1067	      if (CONSTANT_P (structure_value_addr)
1068		  || GET_CODE (structure_value_addr) == ADDRESSOF
1069		  || (GET_CODE (structure_value_addr) == PLUS
1070		      && (XEXP (structure_value_addr, 0)
1071			  == virtual_stack_vars_rtx)
1072		      && (GET_CODE (XEXP (structure_value_addr, 1))
1073			  == CONST_INT)))
1074		{
1075		  SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1076					CONST_AGE_PARM);
1077		}
1078	    }
1079	  else
1080	    {
1081	      temp = copy_rtx_and_substitute (loc, map, 1);
1082	      subst_constants (&temp, NULL_RTX, map, 0);
1083	      apply_change_group ();
1084	      emit_move_insn (temp, structure_value_addr);
1085	    }
1086	}
1087    }
1088  else if (ignore)
1089    /* We will ignore the result value, so don't look at its structure.
1090       Note that preparations for an aggregate return value
1091       do need to be made (above) even if it will be ignored.  */
1092    ;
1093  else if (GET_CODE (loc) == REG)
1094    {
1095      /* The function returns an object in a register and we use the return
1096	 value.  Set up our target for remapping.  */
1097
1098      /* Machine mode function was declared to return.  */
1099      enum machine_mode departing_mode = TYPE_MODE (type);
1100      /* (Possibly wider) machine mode it actually computes
1101	 (for the sake of callers that fail to declare it right).
1102	 We have to use the mode of the result's RTL, rather than
1103	 its type, since expand_function_start may have promoted it.  */
1104      enum machine_mode arriving_mode
1105	= GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1106      rtx reg_to_map;
1107
1108      /* Don't use MEMs as direct targets because on some machines
1109	 substituting a MEM for a REG makes invalid insns.
1110	 Let the combiner substitute the MEM if that is valid.  */
1111      if (target == 0 || GET_CODE (target) != REG
1112	  || GET_MODE (target) != departing_mode)
1113	{
1114	  /* Don't make BLKmode registers.  If this looks like
1115	     a BLKmode object being returned in a register, get
1116	     the mode from that, otherwise abort.  */
1117	  if (departing_mode == BLKmode)
1118	    {
1119	      if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1120		{
1121		  departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1122		  arriving_mode = departing_mode;
1123		}
1124	      else
1125		abort ();
1126	    }
1127
1128	  target = gen_reg_rtx (departing_mode);
1129	}
1130
1131      /* If function's value was promoted before return,
1132	 avoid machine mode mismatch when we substitute INLINE_TARGET.
1133	 But TARGET is what we will return to the caller.  */
1134      if (arriving_mode != departing_mode)
1135	{
1136	  /* Avoid creating a paradoxical subreg wider than
1137	     BITS_PER_WORD, since that is illegal.  */
1138	  if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1139	    {
1140	      if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1141					  GET_MODE_BITSIZE (arriving_mode)))
1142		/* Maybe could be handled by using convert_move () ?  */
1143		abort ();
1144	      reg_to_map = gen_reg_rtx (arriving_mode);
1145	      target = gen_lowpart (departing_mode, reg_to_map);
1146	    }
1147	  else
1148	    reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1149	}
1150      else
1151	reg_to_map = target;
1152
1153      /* Usually, the result value is the machine's return register.
1154	 Sometimes it may be a pseudo. Handle both cases.  */
1155      if (REG_FUNCTION_VALUE_P (loc))
1156	map->inline_target = reg_to_map;
1157      else
1158	map->reg_map[REGNO (loc)] = reg_to_map;
1159    }
1160  else if (GET_CODE (loc) == CONCAT)
1161    {
1162      enum machine_mode departing_mode = TYPE_MODE (type);
1163      enum machine_mode arriving_mode
1164	= GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1165
1166      if (departing_mode != arriving_mode)
1167	abort ();
1168      if (GET_CODE (XEXP (loc, 0)) != REG
1169	  || GET_CODE (XEXP (loc, 1)) != REG)
1170	abort ();
1171
1172      /* Don't use MEMs as direct targets because on some machines
1173	 substituting a MEM for a REG makes invalid insns.
1174	 Let the combiner substitute the MEM if that is valid.  */
1175      if (target == 0 || GET_CODE (target) != REG
1176	  || GET_MODE (target) != departing_mode)
1177	target = gen_reg_rtx (departing_mode);
1178
1179      if (GET_CODE (target) != CONCAT)
1180	abort ();
1181
1182      map->reg_map[REGNO (XEXP (loc, 0))] = XEXP (target, 0);
1183      map->reg_map[REGNO (XEXP (loc, 1))] = XEXP (target, 1);
1184    }
1185  else
1186    abort ();
1187
1188  /* Remap the exception handler data pointer from one to the other.  */
1189  temp = get_exception_pointer (inl_f);
1190  if (temp)
1191    map->reg_map[REGNO (temp)] = get_exception_pointer (cfun);
1192
1193  /* Initialize label_map.  get_label_from_map will actually make
1194     the labels.  */
1195  memset ((char *) &map->label_map[min_labelno], 0,
1196	 (max_labelno - min_labelno) * sizeof (rtx));
1197
1198  /* Make copies of the decls of the symbols in the inline function, so that
1199     the copies of the variables get declared in the current function.  Set
1200     up things so that lookup_static_chain knows that to interpret registers
1201     in SAVE_EXPRs for TYPE_SIZEs as local.  */
1202  inline_function_decl = fndecl;
1203  integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1204  block = integrate_decl_tree (inl_f->original_decl_initial, map);
1205  BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1206  inline_function_decl = 0;
1207
1208  /* Make a fresh binding contour that we can easily remove.  Do this after
1209     expanding our arguments so cleanups are properly scoped.  */
1210  expand_start_bindings_and_block (0, block);
1211
1212  /* Sort the block-map so that it will be easy to find remapped
1213     blocks later.  */
1214  qsort (&VARRAY_TREE (map->block_map, 0),
1215	 map->block_map->elements_used,
1216	 sizeof (tree),
1217	 compare_blocks);
1218
1219  /* Perform postincrements before actually calling the function.  */
1220  emit_queue ();
1221
1222  /* Clean up stack so that variables might have smaller offsets.  */
1223  do_pending_stack_adjust ();
1224
1225  /* Save a copy of the location of const_equiv_varray for
1226     mark_stores, called via note_stores.  */
1227  global_const_equiv_varray = map->const_equiv_varray;
1228
1229  /* If the called function does an alloca, save and restore the
1230     stack pointer around the call.  This saves stack space, but
1231     also is required if this inline is being done between two
1232     pushes.  */
1233  if (inl_f->calls_alloca)
1234    emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1235
1236  /* Map pseudos used for initial hard reg values.  */
1237  setup_initial_hard_reg_value_integration (inl_f, map);
1238
1239  /* Now copy the insns one by one.  */
1240  copy_insn_list (insns, map, static_chain_value);
1241
1242  /* Duplicate the EH regions.  This will create an offset from the
1243     region numbers in the function we're inlining to the region
1244     numbers in the calling function.  This must wait until after
1245     copy_insn_list, as we need the insn map to be complete.  */
1246  eh_region_offset = duplicate_eh_regions (inl_f, map);
1247
1248  /* Now copy the REG_NOTES for those insns.  */
1249  copy_insn_notes (insns, map, eh_region_offset);
1250
1251  /* If the insn sequence required one, emit the return label.  */
1252  if (map->local_return_label)
1253    emit_label (map->local_return_label);
1254
1255  /* Restore the stack pointer if we saved it above.  */
1256  if (inl_f->calls_alloca)
1257    emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1258
1259  if (! cfun->x_whole_function_mode_p)
1260    /* In statement-at-a-time mode, we just tell the front-end to add
1261       this block to the list of blocks at this binding level.  We
1262       can't do it the way it's done for function-at-a-time mode the
1263       superblocks have not been created yet.  */
1264    (*lang_hooks.decls.insert_block) (block);
1265  else
1266    {
1267      BLOCK_CHAIN (block)
1268	= BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1269      BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1270    }
1271
1272  /* End the scope containing the copied formal parameter variables
1273     and copied LABEL_DECLs.  We pass NULL_TREE for the variables list
1274     here so that expand_end_bindings will not check for unused
1275     variables.  That's already been checked for when the inlined
1276     function was defined.  */
1277  expand_end_bindings (NULL_TREE, 1, 1);
1278
1279  /* Must mark the line number note after inlined functions as a repeat, so
1280     that the test coverage code can avoid counting the call twice.  This
1281     just tells the code to ignore the immediately following line note, since
1282     there already exists a copy of this note before the expanded inline call.
1283     This line number note is still needed for debugging though, so we can't
1284     delete it.  */
1285  if (flag_test_coverage)
1286    emit_note (0, NOTE_INSN_REPEATED_LINE_NUMBER);
1287
1288  emit_line_note (input_filename, lineno);
1289
1290  /* If the function returns a BLKmode object in a register, copy it
1291     out of the temp register into a BLKmode memory object.  */
1292  if (target
1293      && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1294      && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1295    target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1296
1297  if (structure_value_addr)
1298    {
1299      target = gen_rtx_MEM (TYPE_MODE (type),
1300			    memory_address (TYPE_MODE (type),
1301					    structure_value_addr));
1302      set_mem_attributes (target, type, 1);
1303    }
1304
1305  /* Make sure we free the things we explicitly allocated with xmalloc.  */
1306  if (real_label_map)
1307    free (real_label_map);
1308  VARRAY_FREE (map->const_equiv_varray);
1309  free (map->reg_map);
1310  free (map->insn_map);
1311  free (map);
1312  free (arg_vals);
1313  free (arg_trees);
1314
1315  inlining = inlining_previous;
1316
1317  return target;
1318}
1319
1320/* Make copies of each insn in the given list using the mapping
1321   computed in expand_inline_function. This function may call itself for
1322   insns containing sequences.
1323
1324   Copying is done in two passes, first the insns and then their REG_NOTES.
1325
1326   If static_chain_value is nonzero, it represents the context-pointer
1327   register for the function.  */
1328
1329static void
1330copy_insn_list (insns, map, static_chain_value)
1331     rtx insns;
1332     struct inline_remap *map;
1333     rtx static_chain_value;
1334{
1335  int i;
1336  rtx insn;
1337  rtx temp;
1338#ifdef HAVE_cc0
1339  rtx cc0_insn = 0;
1340#endif
1341  rtx static_chain_mem = 0;
1342
1343  /* Copy the insns one by one.  Do this in two passes, first the insns and
1344     then their REG_NOTES.  */
1345
1346  /* This loop is very similar to the loop in copy_loop_body in unroll.c.  */
1347
1348  for (insn = insns; insn; insn = NEXT_INSN (insn))
1349    {
1350      rtx copy, pattern, set;
1351
1352      map->orig_asm_operands_vector = 0;
1353
1354      switch (GET_CODE (insn))
1355	{
1356	case INSN:
1357	  pattern = PATTERN (insn);
1358	  set = single_set (insn);
1359	  copy = 0;
1360	  if (GET_CODE (pattern) == USE
1361	      && GET_CODE (XEXP (pattern, 0)) == REG
1362	      && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1363	    /* The (USE (REG n)) at return from the function should
1364	       be ignored since we are changing (REG n) into
1365	       inline_target.  */
1366	    break;
1367
1368	  /* Ignore setting a function value that we don't want to use.  */
1369	  if (map->inline_target == 0
1370	      && set != 0
1371	      && GET_CODE (SET_DEST (set)) == REG
1372	      && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1373	    {
1374	      if (volatile_refs_p (SET_SRC (set)))
1375		{
1376		  rtx new_set;
1377
1378		  /* If we must not delete the source,
1379		     load it into a new temporary.  */
1380		  copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1381
1382		  new_set = single_set (copy);
1383		  if (new_set == 0)
1384		    abort ();
1385
1386		  SET_DEST (new_set)
1387		    = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1388		}
1389	      /* If the source and destination are the same and it
1390		 has a note on it, keep the insn.  */
1391	      else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1392		       && REG_NOTES (insn) != 0)
1393		copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1394	      else
1395		break;
1396	    }
1397
1398	  /* Similarly if an ignored return value is clobbered.  */
1399	  else if (map->inline_target == 0
1400		   && GET_CODE (pattern) == CLOBBER
1401		   && GET_CODE (XEXP (pattern, 0)) == REG
1402		   && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1403	    break;
1404
1405	  /* Look for the address of the static chain slot. The
1406             rtx_equal_p comparisons against the
1407             static_chain_incoming_rtx below may fail if the static
1408             chain is in memory and the address specified is not
1409             "legitimate".  This happens on Xtensa where the static
1410             chain is at a negative offset from argp and where only
1411             positive offsets are legitimate.  When the RTL is
1412             generated, the address is "legitimized" by copying it
1413             into a register, causing the rtx_equal_p comparisons to
1414             fail.  This workaround looks for code that sets a
1415             register to the address of the static chain.  Subsequent
1416             memory references via that register can then be
1417             identified as static chain references.  We assume that
1418             the register is only assigned once, and that the static
1419             chain address is only live in one register at a time.  */
1420
1421	  else if (static_chain_value != 0
1422		   && set != 0
1423		   && GET_CODE (static_chain_incoming_rtx) == MEM
1424		   && GET_CODE (SET_DEST (set)) == REG
1425		   && rtx_equal_p (SET_SRC (set),
1426				   XEXP (static_chain_incoming_rtx, 0)))
1427	    {
1428	      static_chain_mem =
1429		  gen_rtx_MEM (GET_MODE (static_chain_incoming_rtx),
1430			       SET_DEST (set));
1431
1432	      /* emit the instruction in case it is used for something
1433		 other than setting the static chain; if it's not used,
1434		 it can always be removed as dead code */
1435	      copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1436	    }
1437
1438	  /* If this is setting the static chain rtx, omit it.  */
1439	  else if (static_chain_value != 0
1440		   && set != 0
1441		   && (rtx_equal_p (SET_DEST (set),
1442				    static_chain_incoming_rtx)
1443		       || (static_chain_mem
1444			   && rtx_equal_p (SET_DEST (set), static_chain_mem))))
1445	    break;
1446
1447	  /* If this is setting the static chain pseudo, set it from
1448	     the value we want to give it instead.  */
1449	  else if (static_chain_value != 0
1450		   && set != 0
1451		   && (rtx_equal_p (SET_SRC (set),
1452				    static_chain_incoming_rtx)
1453		       || (static_chain_mem
1454			   && rtx_equal_p (SET_SRC (set), static_chain_mem))))
1455	    {
1456	      rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1457
1458	      copy = emit_move_insn (newdest, static_chain_value);
1459	      if (GET_CODE (static_chain_incoming_rtx) != MEM)
1460		static_chain_value = 0;
1461	    }
1462
1463	  /* If this is setting the virtual stack vars register, this must
1464	     be the code at the handler for a builtin longjmp.  The value
1465	     saved in the setjmp buffer will be the address of the frame
1466	     we've made for this inlined instance within our frame.  But we
1467	     know the offset of that value so we can use it to reconstruct
1468	     our virtual stack vars register from that value.  If we are
1469	     copying it from the stack pointer, leave it unchanged.  */
1470	  else if (set != 0
1471		   && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1472	    {
1473	      HOST_WIDE_INT offset;
1474	      temp = map->reg_map[REGNO (SET_DEST (set))];
1475	      temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1476					 REGNO (temp)).rtx;
1477
1478	      if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1479		offset = 0;
1480	      else if (GET_CODE (temp) == PLUS
1481		       && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1482		       && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1483		offset = INTVAL (XEXP (temp, 1));
1484	      else
1485		abort ();
1486
1487	      if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1488		temp = SET_SRC (set);
1489	      else
1490		temp = force_operand (plus_constant (SET_SRC (set),
1491						     - offset),
1492				      NULL_RTX);
1493
1494	      copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1495	    }
1496
1497	  else
1498	    copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1499	  /* REG_NOTES will be copied later.  */
1500
1501#ifdef HAVE_cc0
1502	  /* If this insn is setting CC0, it may need to look at
1503	     the insn that uses CC0 to see what type of insn it is.
1504	     In that case, the call to recog via validate_change will
1505	     fail.  So don't substitute constants here.  Instead,
1506	     do it when we emit the following insn.
1507
1508	     For example, see the pyr.md file.  That machine has signed and
1509	     unsigned compares.  The compare patterns must check the
1510	     following branch insn to see which what kind of compare to
1511	     emit.
1512
1513	     If the previous insn set CC0, substitute constants on it as
1514	     well.  */
1515	  if (sets_cc0_p (PATTERN (copy)) != 0)
1516	    cc0_insn = copy;
1517	  else
1518	    {
1519	      if (cc0_insn)
1520		try_constants (cc0_insn, map);
1521	      cc0_insn = 0;
1522	      try_constants (copy, map);
1523	    }
1524#else
1525	  try_constants (copy, map);
1526#endif
1527	  INSN_SCOPE (copy) = INSN_SCOPE (insn);
1528	  break;
1529
1530	case JUMP_INSN:
1531	  if (map->integrating && returnjump_p (insn))
1532	    {
1533	      if (map->local_return_label == 0)
1534		map->local_return_label = gen_label_rtx ();
1535	      pattern = gen_jump (map->local_return_label);
1536	    }
1537	  else
1538	    pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1539
1540	  copy = emit_jump_insn (pattern);
1541
1542#ifdef HAVE_cc0
1543	  if (cc0_insn)
1544	    try_constants (cc0_insn, map);
1545	  cc0_insn = 0;
1546#endif
1547	  try_constants (copy, map);
1548	  INSN_SCOPE (copy) = INSN_SCOPE (insn);
1549
1550	  /* If this used to be a conditional jump insn but whose branch
1551	     direction is now know, we must do something special.  */
1552	  if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
1553	    {
1554#ifdef HAVE_cc0
1555	      /* If the previous insn set cc0 for us, delete it.  */
1556	      if (only_sets_cc0_p (PREV_INSN (copy)))
1557		delete_related_insns (PREV_INSN (copy));
1558#endif
1559
1560	      /* If this is now a no-op, delete it.  */
1561	      if (map->last_pc_value == pc_rtx)
1562		{
1563		  delete_related_insns (copy);
1564		  copy = 0;
1565		}
1566	      else
1567		/* Otherwise, this is unconditional jump so we must put a
1568		   BARRIER after it.  We could do some dead code elimination
1569		   here, but jump.c will do it just as well.  */
1570		emit_barrier ();
1571	    }
1572	  break;
1573
1574	case CALL_INSN:
1575	  /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1576	     three attached sequences: normal call, sibling call and tail
1577	     recursion.  */
1578	  if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1579	    {
1580	      rtx sequence[3];
1581	      rtx tail_label;
1582
1583	      for (i = 0; i < 3; i++)
1584		{
1585		  rtx seq;
1586
1587		  sequence[i] = NULL_RTX;
1588		  seq = XEXP (PATTERN (insn), i);
1589		  if (seq)
1590		    {
1591		      start_sequence ();
1592		      copy_insn_list (seq, map, static_chain_value);
1593		      sequence[i] = get_insns ();
1594		      end_sequence ();
1595		    }
1596		}
1597
1598	      /* Find the new tail recursion label.
1599	         It will already be substituted into sequence[2].  */
1600	      tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1601						    map, 0);
1602
1603	      copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1604							       sequence[0],
1605							       sequence[1],
1606							       sequence[2],
1607							       tail_label));
1608	      break;
1609	    }
1610
1611	  pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1612	  copy = emit_call_insn (pattern);
1613
1614	  SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
1615	  CONST_OR_PURE_CALL_P (copy) = CONST_OR_PURE_CALL_P (insn);
1616	  INSN_SCOPE (copy) = INSN_SCOPE (insn);
1617
1618	  /* Because the USAGE information potentially contains objects other
1619	     than hard registers, we need to copy it.  */
1620
1621	  CALL_INSN_FUNCTION_USAGE (copy)
1622	    = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1623				       map, 0);
1624
1625#ifdef HAVE_cc0
1626	  if (cc0_insn)
1627	    try_constants (cc0_insn, map);
1628	  cc0_insn = 0;
1629#endif
1630	  try_constants (copy, map);
1631
1632	  /* Be lazy and assume CALL_INSNs clobber all hard registers.  */
1633	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1634	    VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1635	  break;
1636
1637	case CODE_LABEL:
1638	  copy = emit_label (get_label_from_map (map,
1639						 CODE_LABEL_NUMBER (insn)));
1640	  LABEL_NAME (copy) = LABEL_NAME (insn);
1641	  map->const_age++;
1642	  break;
1643
1644	case BARRIER:
1645	  copy = emit_barrier ();
1646	  break;
1647
1648	case NOTE:
1649	  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)
1650	    {
1651	      copy = emit_label (get_label_from_map (map,
1652						    CODE_LABEL_NUMBER (insn)));
1653	      LABEL_NAME (copy) = NOTE_SOURCE_FILE (insn);
1654	      map->const_age++;
1655	      break;
1656	    }
1657
1658	  /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1659	     discarded because it is important to have only one of
1660	     each in the current function.
1661
1662	     NOTE_INSN_DELETED notes aren't useful.  */
1663
1664	  if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1665	      && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1666	      && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1667	    {
1668	      copy = emit_note (NOTE_SOURCE_FILE (insn),
1669				NOTE_LINE_NUMBER (insn));
1670	      if (copy
1671		  && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1672		      || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1673		  && NOTE_BLOCK (insn))
1674		{
1675		  tree *mapped_block_p;
1676
1677		  mapped_block_p
1678		    = (tree *) bsearch (NOTE_BLOCK (insn),
1679					&VARRAY_TREE (map->block_map, 0),
1680					map->block_map->elements_used,
1681					sizeof (tree),
1682					find_block);
1683
1684		  if (!mapped_block_p)
1685		    abort ();
1686		  else
1687		    NOTE_BLOCK (copy) = *mapped_block_p;
1688		}
1689	      else if (copy
1690		       && NOTE_LINE_NUMBER (copy) == NOTE_INSN_EXPECTED_VALUE)
1691		NOTE_EXPECTED_VALUE (copy)
1692		  = copy_rtx_and_substitute (NOTE_EXPECTED_VALUE (insn),
1693					     map, 0);
1694	    }
1695	  else
1696	    copy = 0;
1697	  break;
1698
1699	default:
1700	  abort ();
1701	}
1702
1703      if (copy)
1704	RTX_INTEGRATED_P (copy) = 1;
1705
1706      map->insn_map[INSN_UID (insn)] = copy;
1707    }
1708}
1709
1710/* Copy the REG_NOTES.  Increment const_age, so that only constants
1711   from parameters can be substituted in.  These are the only ones
1712   that are valid across the entire function.  */
1713
1714static void
1715copy_insn_notes (insns, map, eh_region_offset)
1716     rtx insns;
1717     struct inline_remap *map;
1718     int eh_region_offset;
1719{
1720  rtx insn, new_insn;
1721
1722  map->const_age++;
1723  for (insn = insns; insn; insn = NEXT_INSN (insn))
1724    {
1725      if (! INSN_P (insn))
1726	continue;
1727
1728      new_insn = map->insn_map[INSN_UID (insn)];
1729      if (! new_insn)
1730	continue;
1731
1732      if (REG_NOTES (insn))
1733        {
1734	  rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1735
1736	  /* We must also do subst_constants, in case one of our parameters
1737	     has const type and constant value.  */
1738	  subst_constants (&note, NULL_RTX, map, 0);
1739	  apply_change_group ();
1740	  REG_NOTES (new_insn) = note;
1741
1742	  /* Delete any REG_LABEL notes from the chain.  Remap any
1743             REG_EH_REGION notes.  */
1744	  for (; note; note = next)
1745	    {
1746	      next = XEXP (note, 1);
1747	      if (REG_NOTE_KIND (note) == REG_LABEL)
1748	        remove_note (new_insn, note);
1749	      else if (REG_NOTE_KIND (note) == REG_EH_REGION
1750		       && INTVAL (XEXP (note, 0)) > 0)
1751	        XEXP (note, 0) = GEN_INT (INTVAL (XEXP (note, 0))
1752					  + eh_region_offset);
1753	    }
1754        }
1755
1756      if (GET_CODE (insn) == CALL_INSN
1757	  && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1758	{
1759	  int i;
1760	  for (i = 0; i < 3; i++)
1761	    copy_insn_notes (XEXP (PATTERN (insn), i), map, eh_region_offset);
1762	}
1763
1764      if (GET_CODE (insn) == JUMP_INSN
1765	  && GET_CODE (PATTERN (insn)) == RESX)
1766	XINT (PATTERN (new_insn), 0) += eh_region_offset;
1767    }
1768}
1769
1770/* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1771   push all of those decls and give each one the corresponding home.  */
1772
1773static void
1774integrate_parm_decls (args, map, arg_vector)
1775     tree args;
1776     struct inline_remap *map;
1777     rtvec arg_vector;
1778{
1779  tree tail;
1780  int i;
1781
1782  for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1783    {
1784      tree decl = copy_decl_for_inlining (tail, map->fndecl,
1785					  current_function_decl);
1786      rtx new_decl_rtl
1787	= copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1788
1789      /* We really should be setting DECL_INCOMING_RTL to something reasonable
1790	 here, but that's going to require some more work.  */
1791      /* DECL_INCOMING_RTL (decl) = ?; */
1792      /* Fully instantiate the address with the equivalent form so that the
1793	 debugging information contains the actual register, instead of the
1794	 virtual register.   Do this by not passing an insn to
1795	 subst_constants.  */
1796      subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1797      apply_change_group ();
1798      SET_DECL_RTL (decl, new_decl_rtl);
1799    }
1800}
1801
1802/* Given a BLOCK node LET, push decls and levels so as to construct in the
1803   current function a tree of contexts isomorphic to the one that is given.
1804
1805   MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1806   registers used in the DECL_RTL field should be remapped.  If it is zero,
1807   no mapping is necessary.  */
1808
1809static tree
1810integrate_decl_tree (let, map)
1811     tree let;
1812     struct inline_remap *map;
1813{
1814  tree t;
1815  tree new_block;
1816  tree *next;
1817
1818  new_block = make_node (BLOCK);
1819  VARRAY_PUSH_TREE (map->block_map, new_block);
1820  next = &BLOCK_VARS (new_block);
1821
1822  for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1823    {
1824      tree d;
1825
1826      d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1827
1828      if (DECL_RTL_SET_P (t))
1829	{
1830	  rtx r;
1831
1832	  SET_DECL_RTL (d, copy_rtx_and_substitute (DECL_RTL (t), map, 1));
1833
1834	  /* Fully instantiate the address with the equivalent form so that the
1835	     debugging information contains the actual register, instead of the
1836	     virtual register.   Do this by not passing an insn to
1837	     subst_constants.  */
1838	  r = DECL_RTL (d);
1839	  subst_constants (&r, NULL_RTX, map, 1);
1840	  SET_DECL_RTL (d, r);
1841
1842	  if (GET_CODE (r) == REG)
1843	    REGNO_DECL (REGNO (r)) = d;
1844	  else if (GET_CODE (r) == CONCAT)
1845	    {
1846	      REGNO_DECL (REGNO (XEXP (r, 0))) = d;
1847	      REGNO_DECL (REGNO (XEXP (r, 1))) = d;
1848	    }
1849
1850	  apply_change_group ();
1851	}
1852
1853      /* Add this declaration to the list of variables in the new
1854	 block.  */
1855      *next = d;
1856      next = &TREE_CHAIN (d);
1857    }
1858
1859  next = &BLOCK_SUBBLOCKS (new_block);
1860  for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1861    {
1862      *next = integrate_decl_tree (t, map);
1863      BLOCK_SUPERCONTEXT (*next) = new_block;
1864      next = &BLOCK_CHAIN (*next);
1865    }
1866
1867  TREE_USED (new_block) = TREE_USED (let);
1868  BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1869
1870  return new_block;
1871}
1872
1873/* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1874   except for those few rtx codes that are sharable.
1875
1876   We always return an rtx that is similar to that incoming rtx, with the
1877   exception of possibly changing a REG to a SUBREG or vice versa.  No
1878   rtl is ever emitted.
1879
1880   If FOR_LHS is nonzero, if means we are processing something that will
1881   be the LHS of a SET.  In that case, we copy RTX_UNCHANGING_P even if
1882   inlining since we need to be conservative in how it is set for
1883   such cases.
1884
1885   Handle constants that need to be placed in the constant pool by
1886   calling `force_const_mem'.  */
1887
1888rtx
1889copy_rtx_and_substitute (orig, map, for_lhs)
1890     rtx orig;
1891     struct inline_remap *map;
1892     int for_lhs;
1893{
1894  rtx copy, temp;
1895  int i, j;
1896  RTX_CODE code;
1897  enum machine_mode mode;
1898  const char *format_ptr;
1899  int regno;
1900
1901  if (orig == 0)
1902    return 0;
1903
1904  code = GET_CODE (orig);
1905  mode = GET_MODE (orig);
1906
1907  switch (code)
1908    {
1909    case REG:
1910      /* If the stack pointer register shows up, it must be part of
1911	 stack-adjustments (*not* because we eliminated the frame pointer!).
1912	 Small hard registers are returned as-is.  Pseudo-registers
1913	 go through their `reg_map'.  */
1914      regno = REGNO (orig);
1915      if (regno <= LAST_VIRTUAL_REGISTER
1916	  || (map->integrating
1917	      && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1918	{
1919	  /* Some hard registers are also mapped,
1920	     but others are not translated.  */
1921	  if (map->reg_map[regno] != 0)
1922	    return map->reg_map[regno];
1923
1924	  /* If this is the virtual frame pointer, make space in current
1925	     function's stack frame for the stack frame of the inline function.
1926
1927	     Copy the address of this area into a pseudo.  Map
1928	     virtual_stack_vars_rtx to this pseudo and set up a constant
1929	     equivalence for it to be the address.  This will substitute the
1930	     address into insns where it can be substituted and use the new
1931	     pseudo where it can't.  */
1932	  else if (regno == VIRTUAL_STACK_VARS_REGNUM)
1933	    {
1934	      rtx loc, seq;
1935	      int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1936#ifdef FRAME_GROWS_DOWNWARD
1937	      int alignment
1938		= (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1939		   / BITS_PER_UNIT);
1940
1941	      /* In this case, virtual_stack_vars_rtx points to one byte
1942		 higher than the top of the frame area.  So make sure we
1943		 allocate a big enough chunk to keep the frame pointer
1944		 aligned like a real one.  */
1945	      if (alignment)
1946		size = CEIL_ROUND (size, alignment);
1947#endif
1948	      start_sequence ();
1949	      loc = assign_stack_temp (BLKmode, size, 1);
1950	      loc = XEXP (loc, 0);
1951#ifdef FRAME_GROWS_DOWNWARD
1952	      /* In this case, virtual_stack_vars_rtx points to one byte
1953		 higher than the top of the frame area.  So compute the offset
1954		 to one byte higher than our substitute frame.  */
1955	      loc = plus_constant (loc, size);
1956#endif
1957	      map->reg_map[regno] = temp
1958		= force_reg (Pmode, force_operand (loc, NULL_RTX));
1959
1960#ifdef STACK_BOUNDARY
1961	      mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1962#endif
1963
1964	      SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1965
1966	      seq = get_insns ();
1967	      end_sequence ();
1968	      emit_insn_after (seq, map->insns_at_start);
1969	      return temp;
1970	    }
1971	  else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1972		   || (map->integrating
1973		       && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1974			   == orig)))
1975	    {
1976	      /* Do the same for a block to contain any arguments referenced
1977		 in memory.  */
1978	      rtx loc, seq;
1979	      int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1980
1981	      start_sequence ();
1982	      loc = assign_stack_temp (BLKmode, size, 1);
1983	      loc = XEXP (loc, 0);
1984	      /* When arguments grow downward, the virtual incoming
1985		 args pointer points to the top of the argument block,
1986		 so the remapped location better do the same.  */
1987#ifdef ARGS_GROW_DOWNWARD
1988	      loc = plus_constant (loc, size);
1989#endif
1990	      map->reg_map[regno] = temp
1991		= force_reg (Pmode, force_operand (loc, NULL_RTX));
1992
1993#ifdef STACK_BOUNDARY
1994	      mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
1995#endif
1996
1997	      SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1998
1999	      seq = get_insns ();
2000	      end_sequence ();
2001	      emit_insn_after (seq, map->insns_at_start);
2002	      return temp;
2003	    }
2004	  else if (REG_FUNCTION_VALUE_P (orig))
2005	    {
2006	      /* This is a reference to the function return value.  If
2007		 the function doesn't have a return value, error.  If the
2008		 mode doesn't agree, and it ain't BLKmode, make a SUBREG.  */
2009	      if (map->inline_target == 0)
2010		{
2011		  if (rtx_equal_function_value_matters)
2012		    /* This is an ignored return value.  We must not
2013		       leave it in with REG_FUNCTION_VALUE_P set, since
2014		       that would confuse subsequent inlining of the
2015		       current function into a later function.  */
2016		    return gen_rtx_REG (GET_MODE (orig), regno);
2017		  else
2018		    /* Must be unrolling loops or replicating code if we
2019		       reach here, so return the register unchanged.  */
2020		    return orig;
2021		}
2022	      else if (GET_MODE (map->inline_target) != BLKmode
2023		       && mode != GET_MODE (map->inline_target))
2024		return gen_lowpart (mode, map->inline_target);
2025	      else
2026		return map->inline_target;
2027	    }
2028#if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
2029	  /* If leaf_renumber_regs_insn() might remap this register to
2030	     some other number, make sure we don't share it with the
2031	     inlined function, otherwise delayed optimization of the
2032	     inlined function may change it in place, breaking our
2033	     reference to it.  We may still shared it within the
2034	     function, so create an entry for this register in the
2035	     reg_map.  */
2036	  if (map->integrating && regno < FIRST_PSEUDO_REGISTER
2037	      && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
2038	    {
2039	      if (!map->leaf_reg_map[regno][mode])
2040		map->leaf_reg_map[regno][mode] = gen_rtx_REG (mode, regno);
2041	      return map->leaf_reg_map[regno][mode];
2042	    }
2043#endif
2044	  else
2045	    return orig;
2046
2047	  abort ();
2048	}
2049      if (map->reg_map[regno] == NULL)
2050	{
2051	  map->reg_map[regno] = gen_reg_rtx (mode);
2052	  REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2053	  REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2054	  RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2055	  /* A reg with REG_FUNCTION_VALUE_P true will never reach here.  */
2056
2057	  if (REG_POINTER (map->x_regno_reg_rtx[regno]))
2058	    mark_reg_pointer (map->reg_map[regno],
2059			      map->regno_pointer_align[regno]);
2060	}
2061      return map->reg_map[regno];
2062
2063    case SUBREG:
2064      copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
2065      return simplify_gen_subreg (GET_MODE (orig), copy,
2066				  GET_MODE (SUBREG_REG (orig)),
2067				  SUBREG_BYTE (orig));
2068
2069    case ADDRESSOF:
2070      copy = gen_rtx_ADDRESSOF (mode,
2071				copy_rtx_and_substitute (XEXP (orig, 0),
2072							 map, for_lhs),
2073				0, ADDRESSOF_DECL (orig));
2074      regno = ADDRESSOF_REGNO (orig);
2075      if (map->reg_map[regno])
2076	regno = REGNO (map->reg_map[regno]);
2077      else if (regno > LAST_VIRTUAL_REGISTER)
2078	{
2079	  temp = XEXP (orig, 0);
2080	  map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2081	  REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2082	  REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2083	  RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2084	  /* A reg with REG_FUNCTION_VALUE_P true will never reach here.  */
2085
2086	  /* Objects may initially be represented as registers, but
2087	     but turned into a MEM if their address is taken by
2088	     put_var_into_stack.  Therefore, the register table may have
2089	     entries which are MEMs.
2090
2091	     We briefly tried to clear such entries, but that ended up
2092	     cascading into many changes due to the optimizers not being
2093	     prepared for empty entries in the register table.  So we've
2094	     decided to allow the MEMs in the register table for now.  */
2095	  if (REG_P (map->x_regno_reg_rtx[regno])
2096	      && REG_POINTER (map->x_regno_reg_rtx[regno]))
2097	    mark_reg_pointer (map->reg_map[regno],
2098			      map->regno_pointer_align[regno]);
2099	  regno = REGNO (map->reg_map[regno]);
2100	}
2101      ADDRESSOF_REGNO (copy) = regno;
2102      return copy;
2103
2104    case USE:
2105    case CLOBBER:
2106      /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2107	 to (use foo) if the original insn didn't have a subreg.
2108	 Removing the subreg distorts the VAX movstrhi pattern
2109	 by changing the mode of an operand.  */
2110      copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
2111      if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2112	copy = SUBREG_REG (copy);
2113      return gen_rtx_fmt_e (code, VOIDmode, copy);
2114
2115    /* We need to handle "deleted" labels that appear in the DECL_RTL
2116       of a LABEL_DECL.  */
2117    case NOTE:
2118      if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
2119	break;
2120
2121      /* ... FALLTHRU ...  */
2122    case CODE_LABEL:
2123      LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2124	= LABEL_PRESERVE_P (orig);
2125      return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2126
2127    case LABEL_REF:
2128      copy
2129	= gen_rtx_LABEL_REF
2130	  (mode,
2131	   LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2132	   : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
2133
2134      LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2135
2136      /* The fact that this label was previously nonlocal does not mean
2137	 it still is, so we must check if it is within the range of
2138	 this function's labels.  */
2139      LABEL_REF_NONLOCAL_P (copy)
2140	= (LABEL_REF_NONLOCAL_P (orig)
2141	   && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2142		 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2143
2144      /* If we have made a nonlocal label local, it means that this
2145	 inlined call will be referring to our nonlocal goto handler.
2146	 So make sure we create one for this block; we normally would
2147	 not since this is not otherwise considered a "call".  */
2148      if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2149	function_call_count++;
2150
2151      return copy;
2152
2153    case PC:
2154    case CC0:
2155    case CONST_INT:
2156    case CONST_VECTOR:
2157      return orig;
2158
2159    case SYMBOL_REF:
2160      /* Symbols which represent the address of a label stored in the constant
2161	 pool must be modified to point to a constant pool entry for the
2162	 remapped label.  Otherwise, symbols are returned unchanged.  */
2163      if (CONSTANT_POOL_ADDRESS_P (orig))
2164	{
2165	  struct function *f = inlining ? inlining : cfun;
2166	  rtx constant = get_pool_constant_for_function (f, orig);
2167	  enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2168	  if (inlining)
2169	    {
2170	      rtx temp = force_const_mem (const_mode,
2171					  copy_rtx_and_substitute (constant,
2172								   map, 0));
2173
2174#if 0
2175	      /* Legitimizing the address here is incorrect.
2176
2177		 Since we had a SYMBOL_REF before, we can assume it is valid
2178		 to have one in this position in the insn.
2179
2180		 Also, change_address may create new registers.  These
2181		 registers will not have valid reg_map entries.  This can
2182		 cause try_constants() to fail because assumes that all
2183		 registers in the rtx have valid reg_map entries, and it may
2184		 end up replacing one of these new registers with junk.  */
2185
2186	      if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2187		temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2188#endif
2189
2190	      temp = XEXP (temp, 0);
2191
2192#ifdef POINTERS_EXTEND_UNSIGNED
2193	      if (GET_MODE (temp) != GET_MODE (orig))
2194		temp = convert_memory_address (GET_MODE (orig), temp);
2195#endif
2196	      return temp;
2197	    }
2198	  else if (GET_CODE (constant) == LABEL_REF)
2199	    return XEXP (force_const_mem
2200			 (GET_MODE (orig),
2201			  copy_rtx_and_substitute (constant, map, for_lhs)),
2202			 0);
2203	}
2204
2205      return orig;
2206
2207    case CONST_DOUBLE:
2208      /* We have to make a new copy of this CONST_DOUBLE because don't want
2209	 to use the old value of CONST_DOUBLE_MEM.  Also, this may be a
2210	 duplicate of a CONST_DOUBLE we have already seen.  */
2211      if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2212	{
2213	  REAL_VALUE_TYPE d;
2214
2215	  REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2216	  return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2217	}
2218      else
2219	return immed_double_const (CONST_DOUBLE_LOW (orig),
2220				   CONST_DOUBLE_HIGH (orig), VOIDmode);
2221
2222    case CONST:
2223      /* Make new constant pool entry for a constant
2224	 that was in the pool of the inline function.  */
2225      if (RTX_INTEGRATED_P (orig))
2226	abort ();
2227      break;
2228
2229    case ASM_OPERANDS:
2230      /* If a single asm insn contains multiple output operands then
2231	 it contains multiple ASM_OPERANDS rtx's that share the input
2232	 and constraint vecs.  We must make sure that the copied insn
2233	 continues to share it.  */
2234      if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
2235	{
2236	  copy = rtx_alloc (ASM_OPERANDS);
2237	  RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2238	  PUT_MODE (copy, GET_MODE (orig));
2239	  ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2240	  ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2241	    = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2242	  ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2243	  ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2244	  ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2245	    = map->copy_asm_constraints_vector;
2246	  ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2247	  ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
2248	  return copy;
2249	}
2250      break;
2251
2252    case CALL:
2253      /* This is given special treatment because the first
2254	 operand of a CALL is a (MEM ...) which may get
2255	 forced into a register for cse.  This is undesirable
2256	 if function-address cse isn't wanted or if we won't do cse.  */
2257#ifndef NO_FUNCTION_CSE
2258      if (! (optimize && ! flag_no_function_cse))
2259#endif
2260	{
2261	  rtx copy
2262	    = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2263			   copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2264						    map, 0));
2265
2266	  MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
2267
2268	  return
2269	    gen_rtx_CALL (GET_MODE (orig), copy,
2270			  copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
2271	}
2272      break;
2273
2274#if 0
2275      /* Must be ifdefed out for loop unrolling to work.  */
2276    case RETURN:
2277      abort ();
2278#endif
2279
2280    case SET:
2281      /* If this is setting fp or ap, it means that we have a nonlocal goto.
2282	 Adjust the setting by the offset of the area we made.
2283	 If the nonlocal goto is into the current function,
2284	 this will result in unnecessarily bad code, but should work.  */
2285      if (SET_DEST (orig) == virtual_stack_vars_rtx
2286	  || SET_DEST (orig) == virtual_incoming_args_rtx)
2287	{
2288	  /* In case a translation hasn't occurred already, make one now.  */
2289	  rtx equiv_reg;
2290	  rtx equiv_loc;
2291	  HOST_WIDE_INT loc_offset;
2292
2293	  copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
2294	  equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2295	  equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2296					  REGNO (equiv_reg)).rtx;
2297	  loc_offset
2298	    = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2299
2300	  return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2301			      force_operand
2302			      (plus_constant
2303			       (copy_rtx_and_substitute (SET_SRC (orig),
2304							 map, 0),
2305				- loc_offset),
2306			       NULL_RTX));
2307	}
2308      else
2309	return gen_rtx_SET (VOIDmode,
2310			    copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2311			    copy_rtx_and_substitute (SET_SRC (orig), map, 0));
2312      break;
2313
2314    case MEM:
2315      if (inlining
2316	  && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2317	  && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2318	{
2319	  enum machine_mode const_mode
2320	    = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2321	  rtx constant
2322	    = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2323
2324	  constant = copy_rtx_and_substitute (constant, map, 0);
2325
2326	  /* If this was an address of a constant pool entry that itself
2327	     had to be placed in the constant pool, it might not be a
2328	     valid address.  So the recursive call might have turned it
2329	     into a register.  In that case, it isn't a constant any
2330	     more, so return it.  This has the potential of changing a
2331	     MEM into a REG, but we'll assume that it safe.  */
2332	  if (! CONSTANT_P (constant))
2333	    return constant;
2334
2335	  return validize_mem (force_const_mem (const_mode, constant));
2336	}
2337
2338      copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
2339							 map, 0));
2340      MEM_COPY_ATTRIBUTES (copy, orig);
2341
2342      /* If inlining and this is not for the LHS, turn off RTX_UNCHANGING_P
2343	 since this may be an indirect reference to a parameter and the
2344	 actual may not be readonly.  */
2345      if (inlining && !for_lhs)
2346	RTX_UNCHANGING_P (copy) = 0;
2347
2348      /* If inlining, squish aliasing data that references the subroutine's
2349	 parameter list, since that's no longer applicable.  */
2350      if (inlining && MEM_EXPR (copy)
2351	  && TREE_CODE (MEM_EXPR (copy)) == INDIRECT_REF
2352	  && TREE_CODE (TREE_OPERAND (MEM_EXPR (copy), 0)) == PARM_DECL)
2353	set_mem_expr (copy, NULL_TREE);
2354
2355      return copy;
2356
2357    default:
2358      break;
2359    }
2360
2361  copy = rtx_alloc (code);
2362  PUT_MODE (copy, mode);
2363  RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2364  RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2365  RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2366
2367  format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2368
2369  for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2370    {
2371      switch (*format_ptr++)
2372	{
2373	case '0':
2374	  /* Copy this through the wide int field; that's safest.  */
2375	  X0WINT (copy, i) = X0WINT (orig, i);
2376	  break;
2377
2378	case 'e':
2379	  XEXP (copy, i)
2380	    = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2381	  break;
2382
2383	case 'u':
2384	  /* Change any references to old-insns to point to the
2385	     corresponding copied insns.  */
2386	  XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2387	  break;
2388
2389	case 'E':
2390	  XVEC (copy, i) = XVEC (orig, i);
2391	  if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2392	    {
2393	      XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2394	      for (j = 0; j < XVECLEN (copy, i); j++)
2395		XVECEXP (copy, i, j)
2396		  = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2397					     map, for_lhs);
2398	    }
2399	  break;
2400
2401	case 'w':
2402	  XWINT (copy, i) = XWINT (orig, i);
2403	  break;
2404
2405	case 'i':
2406	  XINT (copy, i) = XINT (orig, i);
2407	  break;
2408
2409	case 's':
2410	  XSTR (copy, i) = XSTR (orig, i);
2411	  break;
2412
2413	case 't':
2414	  XTREE (copy, i) = XTREE (orig, i);
2415	  break;
2416
2417	default:
2418	  abort ();
2419	}
2420    }
2421
2422  if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2423    {
2424      map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2425      map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2426      map->copy_asm_constraints_vector
2427	= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
2428    }
2429
2430  return copy;
2431}
2432
2433/* Substitute known constant values into INSN, if that is valid.  */
2434
2435void
2436try_constants (insn, map)
2437     rtx insn;
2438     struct inline_remap *map;
2439{
2440  int i;
2441
2442  map->num_sets = 0;
2443
2444  /* First try just updating addresses, then other things.  This is
2445     important when we have something like the store of a constant
2446     into memory and we can update the memory address but the machine
2447     does not support a constant source.  */
2448  subst_constants (&PATTERN (insn), insn, map, 1);
2449  apply_change_group ();
2450  subst_constants (&PATTERN (insn), insn, map, 0);
2451  apply_change_group ();
2452
2453  /* Show we don't know the value of anything stored or clobbered.  */
2454  note_stores (PATTERN (insn), mark_stores, NULL);
2455  map->last_pc_value = 0;
2456#ifdef HAVE_cc0
2457  map->last_cc0_value = 0;
2458#endif
2459
2460  /* Set up any constant equivalences made in this insn.  */
2461  for (i = 0; i < map->num_sets; i++)
2462    {
2463      if (GET_CODE (map->equiv_sets[i].dest) == REG)
2464	{
2465	  int regno = REGNO (map->equiv_sets[i].dest);
2466
2467	  MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2468	  if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2469	      /* Following clause is a hack to make case work where GNU C++
2470		 reassigns a variable to make cse work right.  */
2471	      || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2472						    regno).rtx,
2473				map->equiv_sets[i].equiv))
2474	    SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2475				  map->equiv_sets[i].equiv, map->const_age);
2476	}
2477      else if (map->equiv_sets[i].dest == pc_rtx)
2478	map->last_pc_value = map->equiv_sets[i].equiv;
2479#ifdef HAVE_cc0
2480      else if (map->equiv_sets[i].dest == cc0_rtx)
2481	map->last_cc0_value = map->equiv_sets[i].equiv;
2482#endif
2483    }
2484}
2485
2486/* Substitute known constants for pseudo regs in the contents of LOC,
2487   which are part of INSN.
2488   If INSN is zero, the substitution should always be done (this is used to
2489   update DECL_RTL).
2490   These changes are taken out by try_constants if the result is not valid.
2491
2492   Note that we are more concerned with determining when the result of a SET
2493   is a constant, for further propagation, than actually inserting constants
2494   into insns; cse will do the latter task better.
2495
2496   This function is also used to adjust address of items previously addressed
2497   via the virtual stack variable or virtual incoming arguments registers.
2498
2499   If MEMONLY is nonzero, only make changes inside a MEM.  */
2500
2501static void
2502subst_constants (loc, insn, map, memonly)
2503     rtx *loc;
2504     rtx insn;
2505     struct inline_remap *map;
2506     int memonly;
2507{
2508  rtx x = *loc;
2509  int i, j;
2510  enum rtx_code code;
2511  const char *format_ptr;
2512  int num_changes = num_validated_changes ();
2513  rtx new = 0;
2514  enum machine_mode op0_mode = MAX_MACHINE_MODE;
2515
2516  code = GET_CODE (x);
2517
2518  switch (code)
2519    {
2520    case PC:
2521    case CONST_INT:
2522    case CONST_DOUBLE:
2523    case CONST_VECTOR:
2524    case SYMBOL_REF:
2525    case CONST:
2526    case LABEL_REF:
2527    case ADDRESS:
2528      return;
2529
2530#ifdef HAVE_cc0
2531    case CC0:
2532      if (! memonly)
2533	validate_change (insn, loc, map->last_cc0_value, 1);
2534      return;
2535#endif
2536
2537    case USE:
2538    case CLOBBER:
2539      /* The only thing we can do with a USE or CLOBBER is possibly do
2540	 some substitutions in a MEM within it.  */
2541      if (GET_CODE (XEXP (x, 0)) == MEM)
2542	subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2543      return;
2544
2545    case REG:
2546      /* Substitute for parms and known constants.  Don't replace
2547	 hard regs used as user variables with constants.  */
2548      if (! memonly)
2549	{
2550	  int regno = REGNO (x);
2551	  struct const_equiv_data *p;
2552
2553	  if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2554	      && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2555	      && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2556		  p->rtx != 0)
2557	      && p->age >= map->const_age)
2558	    validate_change (insn, loc, p->rtx, 1);
2559	}
2560      return;
2561
2562    case SUBREG:
2563      /* SUBREG applied to something other than a reg
2564	 should be treated as ordinary, since that must
2565	 be a special hack and we don't know how to treat it specially.
2566	 Consider for example mulsidi3 in m68k.md.
2567	 Ordinary SUBREG of a REG needs this special treatment.  */
2568      if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2569	{
2570	  rtx inner = SUBREG_REG (x);
2571	  rtx new = 0;
2572
2573	  /* We can't call subst_constants on &SUBREG_REG (x) because any
2574	     constant or SUBREG wouldn't be valid inside our SUBEG.  Instead,
2575	     see what is inside, try to form the new SUBREG and see if that is
2576	     valid.  We handle two cases: extracting a full word in an
2577	     integral mode and extracting the low part.  */
2578	  subst_constants (&inner, NULL_RTX, map, 0);
2579	  new = simplify_gen_subreg (GET_MODE (x), inner,
2580			 	     GET_MODE (SUBREG_REG (x)),
2581				     SUBREG_BYTE (x));
2582
2583	  if (new)
2584	    validate_change (insn, loc, new, 1);
2585	  else
2586	    cancel_changes (num_changes);
2587
2588	  return;
2589	}
2590      break;
2591
2592    case MEM:
2593      subst_constants (&XEXP (x, 0), insn, map, 0);
2594
2595      /* If a memory address got spoiled, change it back.  */
2596      if (! memonly && insn != 0 && num_validated_changes () != num_changes
2597	  && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2598	cancel_changes (num_changes);
2599      return;
2600
2601    case SET:
2602      {
2603	/* Substitute constants in our source, and in any arguments to a
2604	   complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2605	   itself.  */
2606	rtx *dest_loc = &SET_DEST (x);
2607	rtx dest = *dest_loc;
2608	rtx src, tem;
2609	enum machine_mode compare_mode = VOIDmode;
2610
2611	/* If SET_SRC is a COMPARE which subst_constants would turn into
2612	   COMPARE of 2 VOIDmode constants, note the mode in which comparison
2613	   is to be done.  */
2614	if (GET_CODE (SET_SRC (x)) == COMPARE)
2615	  {
2616	    src = SET_SRC (x);
2617	    if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2618#ifdef HAVE_cc0
2619		|| dest == cc0_rtx
2620#endif
2621		)
2622	      {
2623		compare_mode = GET_MODE (XEXP (src, 0));
2624		if (compare_mode == VOIDmode)
2625		  compare_mode = GET_MODE (XEXP (src, 1));
2626	      }
2627	  }
2628
2629	subst_constants (&SET_SRC (x), insn, map, memonly);
2630	src = SET_SRC (x);
2631
2632	while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2633	       || GET_CODE (*dest_loc) == SUBREG
2634	       || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2635	  {
2636	    if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2637	      {
2638		subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2639		subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2640	      }
2641	    dest_loc = &XEXP (*dest_loc, 0);
2642	  }
2643
2644	/* Do substitute in the address of a destination in memory.  */
2645	if (GET_CODE (*dest_loc) == MEM)
2646	  subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2647
2648	/* Check for the case of DEST a SUBREG, both it and the underlying
2649	   register are less than one word, and the SUBREG has the wider mode.
2650	   In the case, we are really setting the underlying register to the
2651	   source converted to the mode of DEST.  So indicate that.  */
2652	if (GET_CODE (dest) == SUBREG
2653	    && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2654	    && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2655	    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2656		      <= GET_MODE_SIZE (GET_MODE (dest)))
2657	    && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2658					       src)))
2659	  src = tem, dest = SUBREG_REG (dest);
2660
2661	/* If storing a recognizable value save it for later recording.  */
2662	if ((map->num_sets < MAX_RECOG_OPERANDS)
2663	    && (CONSTANT_P (src)
2664		|| (GET_CODE (src) == REG
2665		    && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2666			|| REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2667		|| (GET_CODE (src) == PLUS
2668		    && GET_CODE (XEXP (src, 0)) == REG
2669		    && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2670			|| REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2671		    && CONSTANT_P (XEXP (src, 1)))
2672		|| GET_CODE (src) == COMPARE
2673#ifdef HAVE_cc0
2674		|| dest == cc0_rtx
2675#endif
2676		|| (dest == pc_rtx
2677		    && (src == pc_rtx || GET_CODE (src) == RETURN
2678			|| GET_CODE (src) == LABEL_REF))))
2679	  {
2680	    /* Normally, this copy won't do anything.  But, if SRC is a COMPARE
2681	       it will cause us to save the COMPARE with any constants
2682	       substituted, which is what we want for later.  */
2683	    rtx src_copy = copy_rtx (src);
2684	    map->equiv_sets[map->num_sets].equiv = src_copy;
2685	    map->equiv_sets[map->num_sets++].dest = dest;
2686	    if (compare_mode != VOIDmode
2687		&& GET_CODE (src) == COMPARE
2688		&& (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
2689#ifdef HAVE_cc0
2690		    || dest == cc0_rtx
2691#endif
2692		    )
2693		&& GET_MODE (XEXP (src, 0)) == VOIDmode
2694		&& GET_MODE (XEXP (src, 1)) == VOIDmode)
2695	      {
2696		map->compare_src = src_copy;
2697		map->compare_mode = compare_mode;
2698	      }
2699	  }
2700      }
2701      return;
2702
2703    default:
2704      break;
2705    }
2706
2707  format_ptr = GET_RTX_FORMAT (code);
2708
2709  /* If the first operand is an expression, save its mode for later.  */
2710  if (*format_ptr == 'e')
2711    op0_mode = GET_MODE (XEXP (x, 0));
2712
2713  for (i = 0; i < GET_RTX_LENGTH (code); i++)
2714    {
2715      switch (*format_ptr++)
2716	{
2717	case '0':
2718	  break;
2719
2720	case 'e':
2721	  if (XEXP (x, i))
2722	    subst_constants (&XEXP (x, i), insn, map, memonly);
2723	  break;
2724
2725	case 'u':
2726	case 'i':
2727	case 's':
2728	case 'w':
2729	case 'n':
2730	case 't':
2731	case 'B':
2732	  break;
2733
2734	case 'E':
2735	  if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2736	    for (j = 0; j < XVECLEN (x, i); j++)
2737	      subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2738
2739	  break;
2740
2741	default:
2742	  abort ();
2743	}
2744    }
2745
2746  /* If this is a commutative operation, move a constant to the second
2747     operand unless the second operand is already a CONST_INT.  */
2748  if (! memonly
2749      && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2750      && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2751    {
2752      rtx tem = XEXP (x, 0);
2753      validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2754      validate_change (insn, &XEXP (x, 1), tem, 1);
2755    }
2756
2757  /* Simplify the expression in case we put in some constants.  */
2758  if (! memonly)
2759    switch (GET_RTX_CLASS (code))
2760      {
2761      case '1':
2762	if (op0_mode == MAX_MACHINE_MODE)
2763	  abort ();
2764	new = simplify_unary_operation (code, GET_MODE (x),
2765					XEXP (x, 0), op0_mode);
2766	break;
2767
2768      case '<':
2769	{
2770	  enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2771
2772	  if (op_mode == VOIDmode)
2773	    op_mode = GET_MODE (XEXP (x, 1));
2774	  new = simplify_relational_operation (code, op_mode,
2775					       XEXP (x, 0), XEXP (x, 1));
2776#ifdef FLOAT_STORE_FLAG_VALUE
2777	  if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2778	    {
2779	      enum machine_mode mode = GET_MODE (x);
2780	      if (new == const0_rtx)
2781		new = CONST0_RTX (mode);
2782	      else
2783		{
2784		  REAL_VALUE_TYPE val;
2785
2786		  /* Avoid automatic aggregate initialization.  */
2787		  val = FLOAT_STORE_FLAG_VALUE (mode);
2788		  new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2789		}
2790	    }
2791#endif
2792	  break;
2793	}
2794
2795      case '2':
2796      case 'c':
2797	new = simplify_binary_operation (code, GET_MODE (x),
2798					 XEXP (x, 0), XEXP (x, 1));
2799	break;
2800
2801      case 'b':
2802      case '3':
2803	if (op0_mode == MAX_MACHINE_MODE)
2804	  abort ();
2805
2806	if (code == IF_THEN_ELSE)
2807	  {
2808	    rtx op0 = XEXP (x, 0);
2809
2810	    if (GET_RTX_CLASS (GET_CODE (op0)) == '<'
2811		&& GET_MODE (op0) == VOIDmode
2812		&& ! side_effects_p (op0)
2813		&& XEXP (op0, 0) == map->compare_src
2814		&& GET_MODE (XEXP (op0, 1)) == VOIDmode)
2815	      {
2816		/* We have compare of two VOIDmode constants for which
2817		   we recorded the comparison mode.  */
2818		rtx temp =
2819		  simplify_relational_operation (GET_CODE (op0),
2820						 map->compare_mode,
2821						 XEXP (op0, 0),
2822						 XEXP (op0, 1));
2823
2824		if (temp == const0_rtx)
2825		  new = XEXP (x, 2);
2826		else if (temp == const1_rtx)
2827		  new = XEXP (x, 1);
2828	      }
2829	  }
2830	if (!new)
2831	  new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2832					    XEXP (x, 0), XEXP (x, 1),
2833					    XEXP (x, 2));
2834	break;
2835      }
2836
2837  if (new)
2838    validate_change (insn, loc, new, 1);
2839}
2840
2841/* Show that register modified no longer contain known constants.  We are
2842   called from note_stores with parts of the new insn.  */
2843
2844static void
2845mark_stores (dest, x, data)
2846     rtx dest;
2847     rtx x ATTRIBUTE_UNUSED;
2848     void *data ATTRIBUTE_UNUSED;
2849{
2850  int regno = -1;
2851  enum machine_mode mode = VOIDmode;
2852
2853  /* DEST is always the innermost thing set, except in the case of
2854     SUBREGs of hard registers.  */
2855
2856  if (GET_CODE (dest) == REG)
2857    regno = REGNO (dest), mode = GET_MODE (dest);
2858  else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2859    {
2860      regno = REGNO (SUBREG_REG (dest));
2861      if (regno < FIRST_PSEUDO_REGISTER)
2862	regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
2863				      GET_MODE (SUBREG_REG (dest)),
2864				      SUBREG_BYTE (dest),
2865				      GET_MODE (dest));
2866      mode = GET_MODE (SUBREG_REG (dest));
2867    }
2868
2869  if (regno >= 0)
2870    {
2871      unsigned int uregno = regno;
2872      unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
2873			       : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
2874      unsigned int i;
2875
2876      /* Ignore virtual stack var or virtual arg register since those
2877	 are handled separately.  */
2878      if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2879	  && uregno != VIRTUAL_STACK_VARS_REGNUM)
2880	for (i = uregno; i <= last_reg; i++)
2881	  if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2882	    VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2883    }
2884}
2885
2886/* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2887   given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2888   that it points to the node itself, thus indicating that the node is its
2889   own (abstract) origin.  Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2890   the given node is NULL, recursively descend the decl/block tree which
2891   it is the root of, and for each other ..._DECL or BLOCK node contained
2892   therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2893   still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2894   values to point to themselves.  */
2895
2896static void
2897set_block_origin_self (stmt)
2898     tree stmt;
2899{
2900  if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2901    {
2902      BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2903
2904      {
2905	tree local_decl;
2906
2907	for (local_decl = BLOCK_VARS (stmt);
2908	     local_decl != NULL_TREE;
2909	     local_decl = TREE_CHAIN (local_decl))
2910	  set_decl_origin_self (local_decl);	/* Potential recursion.  */
2911      }
2912
2913      {
2914	tree subblock;
2915
2916	for (subblock = BLOCK_SUBBLOCKS (stmt);
2917	     subblock != NULL_TREE;
2918	     subblock = BLOCK_CHAIN (subblock))
2919	  set_block_origin_self (subblock);	/* Recurse.  */
2920      }
2921    }
2922}
2923
2924/* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2925   the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2926   node to so that it points to the node itself, thus indicating that the
2927   node represents its own (abstract) origin.  Additionally, if the
2928   DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2929   the decl/block tree of which the given node is the root of, and for
2930   each other ..._DECL or BLOCK node contained therein whose
2931   DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2932   set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2933   point to themselves.  */
2934
2935void
2936set_decl_origin_self (decl)
2937     tree decl;
2938{
2939  if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2940    {
2941      DECL_ABSTRACT_ORIGIN (decl) = decl;
2942      if (TREE_CODE (decl) == FUNCTION_DECL)
2943	{
2944	  tree arg;
2945
2946	  for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2947	    DECL_ABSTRACT_ORIGIN (arg) = arg;
2948	  if (DECL_INITIAL (decl) != NULL_TREE
2949	      && DECL_INITIAL (decl) != error_mark_node)
2950	    set_block_origin_self (DECL_INITIAL (decl));
2951	}
2952    }
2953}
2954
2955/* Given a pointer to some BLOCK node, and a boolean value to set the
2956   "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2957   the given block, and for all local decls and all local sub-blocks
2958   (recursively) which are contained therein.  */
2959
2960static void
2961set_block_abstract_flags (stmt, setting)
2962     tree stmt;
2963     int setting;
2964{
2965  tree local_decl;
2966  tree subblock;
2967
2968  BLOCK_ABSTRACT (stmt) = setting;
2969
2970  for (local_decl = BLOCK_VARS (stmt);
2971       local_decl != NULL_TREE;
2972       local_decl = TREE_CHAIN (local_decl))
2973    set_decl_abstract_flags (local_decl, setting);
2974
2975  for (subblock = BLOCK_SUBBLOCKS (stmt);
2976       subblock != NULL_TREE;
2977       subblock = BLOCK_CHAIN (subblock))
2978    set_block_abstract_flags (subblock, setting);
2979}
2980
2981/* Given a pointer to some ..._DECL node, and a boolean value to set the
2982   "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2983   given decl, and (in the case where the decl is a FUNCTION_DECL) also
2984   set the abstract flags for all of the parameters, local vars, local
2985   blocks and sub-blocks (recursively) to the same setting.  */
2986
2987void
2988set_decl_abstract_flags (decl, setting)
2989     tree decl;
2990     int setting;
2991{
2992  DECL_ABSTRACT (decl) = setting;
2993  if (TREE_CODE (decl) == FUNCTION_DECL)
2994    {
2995      tree arg;
2996
2997      for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2998	DECL_ABSTRACT (arg) = setting;
2999      if (DECL_INITIAL (decl) != NULL_TREE
3000	  && DECL_INITIAL (decl) != error_mark_node)
3001	set_block_abstract_flags (DECL_INITIAL (decl), setting);
3002    }
3003}
3004
3005/* Output the assembly language code for the function FNDECL
3006   from its DECL_SAVED_INSNS.  Used for inline functions that are output
3007   at end of compilation instead of where they came in the source.  */
3008
3009static GTY(()) struct function *old_cfun;
3010
3011void
3012output_inline_function (fndecl)
3013     tree fndecl;
3014{
3015  enum debug_info_type old_write_symbols = write_symbols;
3016  const struct gcc_debug_hooks *const old_debug_hooks = debug_hooks;
3017  struct function *f = DECL_SAVED_INSNS (fndecl);
3018
3019  old_cfun = cfun;
3020  cfun = f;
3021  current_function_decl = fndecl;
3022
3023  set_new_last_label_num (f->inl_max_label_num);
3024
3025  /* We're not deferring this any longer.  */
3026  DECL_DEFER_OUTPUT (fndecl) = 0;
3027
3028  /* If requested, suppress debugging information.  */
3029  if (f->no_debugging_symbols)
3030    {
3031      write_symbols = NO_DEBUG;
3032      debug_hooks = &do_nothing_debug_hooks;
3033    }
3034
3035  /* Make sure warnings emitted by the optimizers (e.g. control reaches
3036     end of non-void function) is not wildly incorrect.  */
3037  input_filename = DECL_SOURCE_FILE (fndecl);
3038  lineno = DECL_SOURCE_LINE (fndecl);
3039
3040  /* Compile this function all the way down to assembly code.  As a
3041     side effect this destroys the saved RTL representation, but
3042     that's okay, because we don't need to inline this anymore.  */
3043  rest_of_compilation (fndecl);
3044  DECL_INLINE (fndecl) = 0;
3045
3046  cfun = old_cfun;
3047  current_function_decl = old_cfun ? old_cfun->decl : 0;
3048  write_symbols = old_write_symbols;
3049  debug_hooks = old_debug_hooks;
3050}
3051
3052
3053/* Functions to keep track of the values hard regs had at the start of
3054   the function.  */
3055
3056rtx
3057get_hard_reg_initial_reg (fun, reg)
3058     struct function *fun;
3059     rtx reg;
3060{
3061  struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3062  int i;
3063
3064  if (ivs == 0)
3065    return NULL_RTX;
3066
3067  for (i = 0; i < ivs->num_entries; i++)
3068    if (rtx_equal_p (ivs->entries[i].pseudo, reg))
3069      return ivs->entries[i].hard_reg;
3070
3071  return NULL_RTX;
3072}
3073
3074rtx
3075has_func_hard_reg_initial_val (fun, reg)
3076     struct function *fun;
3077     rtx reg;
3078{
3079  struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3080  int i;
3081
3082  if (ivs == 0)
3083    return NULL_RTX;
3084
3085  for (i = 0; i < ivs->num_entries; i++)
3086    if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
3087      return ivs->entries[i].pseudo;
3088
3089  return NULL_RTX;
3090}
3091
3092rtx
3093get_func_hard_reg_initial_val (fun, reg)
3094     struct function *fun;
3095     rtx reg;
3096{
3097  struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
3098  rtx rv = has_func_hard_reg_initial_val (fun, reg);
3099
3100  if (rv)
3101    return rv;
3102
3103  if (ivs == 0)
3104    {
3105      fun->hard_reg_initial_vals = (void *) ggc_alloc (sizeof (initial_value_struct));
3106      ivs = fun->hard_reg_initial_vals;
3107      ivs->num_entries = 0;
3108      ivs->max_entries = 5;
3109      ivs->entries = (initial_value_pair *) ggc_alloc (5 * sizeof (initial_value_pair));
3110    }
3111
3112  if (ivs->num_entries >= ivs->max_entries)
3113    {
3114      ivs->max_entries += 5;
3115      ivs->entries =
3116	(initial_value_pair *) ggc_realloc (ivs->entries,
3117					    ivs->max_entries
3118					    * sizeof (initial_value_pair));
3119    }
3120
3121  ivs->entries[ivs->num_entries].hard_reg = reg;
3122  ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
3123
3124  return ivs->entries[ivs->num_entries++].pseudo;
3125}
3126
3127rtx
3128get_hard_reg_initial_val (mode, regno)
3129     enum machine_mode mode;
3130     int regno;
3131{
3132  return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3133}
3134
3135rtx
3136has_hard_reg_initial_val (mode, regno)
3137     enum machine_mode mode;
3138     int regno;
3139{
3140  return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
3141}
3142
3143static void
3144setup_initial_hard_reg_value_integration (inl_f, remap)
3145     struct function *inl_f;
3146     struct inline_remap *remap;
3147{
3148  struct initial_value_struct *ivs = inl_f->hard_reg_initial_vals;
3149  int i;
3150
3151  if (ivs == 0)
3152    return;
3153
3154  for (i = 0; i < ivs->num_entries; i ++)
3155    remap->reg_map[REGNO (ivs->entries[i].pseudo)]
3156      = get_func_hard_reg_initial_val (cfun, ivs->entries[i].hard_reg);
3157}
3158
3159
3160void
3161emit_initial_value_sets ()
3162{
3163  struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3164  int i;
3165  rtx seq;
3166
3167  if (ivs == 0)
3168    return;
3169
3170  start_sequence ();
3171  for (i = 0; i < ivs->num_entries; i++)
3172    emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
3173  seq = get_insns ();
3174  end_sequence ();
3175
3176  emit_insn_after (seq, get_insns ());
3177}
3178
3179/* If the backend knows where to allocate pseudos for hard
3180   register initial values, register these allocations now.  */
3181void
3182allocate_initial_values (reg_equiv_memory_loc)
3183     rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED;
3184{
3185#ifdef ALLOCATE_INITIAL_VALUE
3186  struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
3187  int i;
3188
3189  if (ivs == 0)
3190    return;
3191
3192  for (i = 0; i < ivs->num_entries; i++)
3193    {
3194      int regno = REGNO (ivs->entries[i].pseudo);
3195      rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
3196
3197      if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
3198	; /* Do nothing.  */
3199      else if (GET_CODE (x) == MEM)
3200	reg_equiv_memory_loc[regno] = x;
3201      else if (GET_CODE (x) == REG)
3202	{
3203	  reg_renumber[regno] = REGNO (x);
3204	  /* Poke the regno right into regno_reg_rtx
3205	     so that even fixed regs are accepted.  */
3206	  REGNO (ivs->entries[i].pseudo) = REGNO (x);
3207	}
3208      else abort ();
3209    }
3210#endif
3211}
3212
3213#include "gt-integrate.h"
3214