function.c revision 117404
1/* Expands front end tree to back end RTL for GNU C-Compiler
2   Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3   1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22/* This file handles the generation of rtl code from tree structure
23   at the level of the function as a whole.
24   It creates the rtl expressions for parameters and auto variables
25   and has full responsibility for allocating stack slots.
26
27   `expand_function_start' is called at the beginning of a function,
28   before the function body is parsed, and `expand_function_end' is
29   called after parsing the body.
30
31   Call `assign_stack_local' to allocate a stack slot for a local variable.
32   This is usually done during the RTL generation for the function body,
33   but it can also be done in the reload pass when a pseudo-register does
34   not get a hard register.
35
36   Call `put_var_into_stack' when you learn, belatedly, that a variable
37   previously given a pseudo-register must in fact go in the stack.
38   This function changes the DECL_RTL to be a stack slot instead of a reg
39   then scans all the RTL instructions so far generated to correct them.  */
40
41#include "config.h"
42#include "system.h"
43#include "rtl.h"
44#include "tree.h"
45#include "flags.h"
46#include "except.h"
47#include "function.h"
48#include "expr.h"
49#include "libfuncs.h"
50#include "regs.h"
51#include "hard-reg-set.h"
52#include "insn-config.h"
53#include "recog.h"
54#include "output.h"
55#include "basic-block.h"
56#include "toplev.h"
57#include "hashtab.h"
58#include "ggc.h"
59#include "tm_p.h"
60#include "integrate.h"
61#include "langhooks.h"
62
63#ifndef TRAMPOLINE_ALIGNMENT
64#define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
65#endif
66
67#ifndef LOCAL_ALIGNMENT
68#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69#endif
70
71/* Some systems use __main in a way incompatible with its use in gcc, in these
72   cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
73   give the same symbol without quotes for an alternative entry point.  You
74   must define both, or neither.  */
75#ifndef NAME__MAIN
76#define NAME__MAIN "__main"
77#endif
78
79/* Round a value to the lowest integer less than it that is a multiple of
80   the required alignment.  Avoid using division in case the value is
81   negative.  Assume the alignment is a power of two.  */
82#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
83
84/* Similar, but round to the next highest integer that meets the
85   alignment.  */
86#define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
87
88/* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
89   during rtl generation.  If they are different register numbers, this is
90   always true.  It may also be true if
91   FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
92   generation.  See fix_lexical_addr for details.  */
93
94#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
95#define NEED_SEPARATE_AP
96#endif
97
98/* Nonzero if function being compiled doesn't contain any calls
99   (ignoring the prologue and epilogue).  This is set prior to
100   local register allocation and is valid for the remaining
101   compiler passes.  */
102int current_function_is_leaf;
103
104/* Nonzero if function being compiled doesn't contain any instructions
105   that can throw an exception.  This is set prior to final.  */
106
107int current_function_nothrow;
108
109/* Nonzero if function being compiled doesn't modify the stack pointer
110   (ignoring the prologue and epilogue).  This is only valid after
111   life_analysis has run.  */
112int current_function_sp_is_unchanging;
113
114/* Nonzero if the function being compiled is a leaf function which only
115   uses leaf registers.  This is valid after reload (specifically after
116   sched2) and is useful only if the port defines LEAF_REGISTERS.  */
117int current_function_uses_only_leaf_regs;
118
119/* Nonzero once virtual register instantiation has been done.
120   assign_stack_local uses frame_pointer_rtx when this is nonzero.
121   calls.c:emit_library_call_value_1 uses it to set up
122   post-instantiation libcalls.  */
123int virtuals_instantiated;
124
125/* Assign unique numbers to labels generated for profiling, debugging, etc.  */
126static int funcdef_no;
127
128/* These variables hold pointers to functions to create and destroy
129   target specific, per-function data structures.  */
130struct machine_function * (*init_machine_status) PARAMS ((void));
131
132/* The FUNCTION_DECL for an inline function currently being expanded.  */
133tree inline_function_decl;
134
135/* The currently compiled function.  */
136struct function *cfun = 0;
137
138/* These arrays record the INSN_UIDs of the prologue and epilogue insns.  */
139static GTY(()) varray_type prologue;
140static GTY(()) varray_type epilogue;
141
142/* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
143   in this function.  */
144static GTY(()) varray_type sibcall_epilogue;
145
146/* In order to evaluate some expressions, such as function calls returning
147   structures in memory, we need to temporarily allocate stack locations.
148   We record each allocated temporary in the following structure.
149
150   Associated with each temporary slot is a nesting level.  When we pop up
151   one level, all temporaries associated with the previous level are freed.
152   Normally, all temporaries are freed after the execution of the statement
153   in which they were created.  However, if we are inside a ({...}) grouping,
154   the result may be in a temporary and hence must be preserved.  If the
155   result could be in a temporary, we preserve it if we can determine which
156   one it is in.  If we cannot determine which temporary may contain the
157   result, all temporaries are preserved.  A temporary is preserved by
158   pretending it was allocated at the previous nesting level.
159
160   Automatic variables are also assigned temporary slots, at the nesting
161   level where they are defined.  They are marked a "kept" so that
162   free_temp_slots will not free them.  */
163
164struct temp_slot GTY(())
165{
166  /* Points to next temporary slot.  */
167  struct temp_slot *next;
168  /* The rtx to used to reference the slot.  */
169  rtx slot;
170  /* The rtx used to represent the address if not the address of the
171     slot above.  May be an EXPR_LIST if multiple addresses exist.  */
172  rtx address;
173  /* The alignment (in bits) of the slot.  */
174  unsigned int align;
175  /* The size, in units, of the slot.  */
176  HOST_WIDE_INT size;
177  /* The type of the object in the slot, or zero if it doesn't correspond
178     to a type.  We use this to determine whether a slot can be reused.
179     It can be reused if objects of the type of the new slot will always
180     conflict with objects of the type of the old slot.  */
181  tree type;
182  /* The value of `sequence_rtl_expr' when this temporary is allocated.  */
183  tree rtl_expr;
184  /* Nonzero if this temporary is currently in use.  */
185  char in_use;
186  /* Nonzero if this temporary has its address taken.  */
187  char addr_taken;
188  /* Nesting level at which this slot is being used.  */
189  int level;
190  /* Nonzero if this should survive a call to free_temp_slots.  */
191  int keep;
192  /* The offset of the slot from the frame_pointer, including extra space
193     for alignment.  This info is for combine_temp_slots.  */
194  HOST_WIDE_INT base_offset;
195  /* The size of the slot, including extra space for alignment.  This
196     info is for combine_temp_slots.  */
197  HOST_WIDE_INT full_size;
198};
199
200/* This structure is used to record MEMs or pseudos used to replace VAR, any
201   SUBREGs of VAR, and any MEMs containing VAR as an address.  We need to
202   maintain this list in case two operands of an insn were required to match;
203   in that case we must ensure we use the same replacement.  */
204
205struct fixup_replacement GTY(())
206{
207  rtx old;
208  rtx new;
209  struct fixup_replacement *next;
210};
211
212struct insns_for_mem_entry
213{
214  /* A MEM.  */
215  rtx key;
216  /* These are the INSNs which reference the MEM.  */
217  rtx insns;
218};
219
220/* Forward declarations.  */
221
222static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
223					 int, struct function *));
224static struct temp_slot *find_temp_slot_from_address  PARAMS ((rtx));
225static void put_reg_into_stack	PARAMS ((struct function *, rtx, tree,
226					 enum machine_mode, enum machine_mode,
227					 int, unsigned int, int,
228					 htab_t));
229static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
230					     enum machine_mode,
231					     htab_t));
232static void fixup_var_refs	PARAMS ((rtx, enum machine_mode, int, rtx,
233					 htab_t));
234static struct fixup_replacement
235  *find_fixup_replacement	PARAMS ((struct fixup_replacement **, rtx));
236static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
237					  int, int, rtx));
238static void fixup_var_refs_insns_with_hash
239				PARAMS ((htab_t, rtx,
240					 enum machine_mode, int, rtx));
241static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
242					 int, int, rtx));
243static void fixup_var_refs_1	PARAMS ((rtx, enum machine_mode, rtx *, rtx,
244					 struct fixup_replacement **, rtx));
245static rtx fixup_memory_subreg	PARAMS ((rtx, rtx, enum machine_mode, int));
246static rtx walk_fixup_memory_subreg  PARAMS ((rtx, rtx, enum machine_mode,
247					      int));
248static rtx fixup_stack_1	PARAMS ((rtx, rtx));
249static void optimize_bit_field	PARAMS ((rtx, rtx, rtx *));
250static void instantiate_decls	PARAMS ((tree, int));
251static void instantiate_decls_1	PARAMS ((tree, int));
252static void instantiate_decl	PARAMS ((rtx, HOST_WIDE_INT, int));
253static rtx instantiate_new_reg	PARAMS ((rtx, HOST_WIDE_INT *));
254static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
255static void delete_handlers	PARAMS ((void));
256static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
257					  struct args_size *));
258static void pad_below		PARAMS ((struct args_size *, enum machine_mode,
259					 tree));
260static rtx round_trampoline_addr PARAMS ((rtx));
261static rtx adjust_trampoline_addr PARAMS ((rtx));
262static tree *identify_blocks_1	PARAMS ((rtx, tree *, tree *, tree *));
263static void reorder_blocks_0	PARAMS ((tree));
264static void reorder_blocks_1	PARAMS ((rtx, tree, varray_type *));
265static void reorder_fix_fragments PARAMS ((tree));
266static tree blocks_nreverse	PARAMS ((tree));
267static int all_blocks		PARAMS ((tree, tree *));
268static tree *get_block_vector   PARAMS ((tree, int *));
269extern tree debug_find_var_in_block_tree PARAMS ((tree, tree));
270/* We always define `record_insns' even if its not used so that we
271   can always export `prologue_epilogue_contains'.  */
272static void record_insns	PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
273static int contains		PARAMS ((rtx, varray_type));
274#ifdef HAVE_return
275static void emit_return_into_block PARAMS ((basic_block, rtx));
276#endif
277static void put_addressof_into_stack PARAMS ((rtx, htab_t));
278static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
279					  htab_t));
280static void purge_single_hard_subreg_set PARAMS ((rtx));
281#if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
282static rtx keep_stack_depressed PARAMS ((rtx));
283#endif
284static int is_addressof		PARAMS ((rtx *, void *));
285static hashval_t insns_for_mem_hash PARAMS ((const void *));
286static int insns_for_mem_comp PARAMS ((const void *, const void *));
287static int insns_for_mem_walk   PARAMS ((rtx *, void *));
288static void compute_insns_for_mem PARAMS ((rtx, rtx, htab_t));
289static void prepare_function_start PARAMS ((void));
290static void do_clobber_return_reg PARAMS ((rtx, void *));
291static void do_use_return_reg PARAMS ((rtx, void *));
292static void instantiate_virtual_regs_lossage PARAMS ((rtx));
293
294/* Pointer to chain of `struct function' for containing functions.  */
295static GTY(()) struct function *outer_function_chain;
296
297/* Given a function decl for a containing function,
298   return the `struct function' for it.  */
299
300struct function *
301find_function_data (decl)
302     tree decl;
303{
304  struct function *p;
305
306  for (p = outer_function_chain; p; p = p->outer)
307    if (p->decl == decl)
308      return p;
309
310  abort ();
311}
312
313/* Save the current context for compilation of a nested function.
314   This is called from language-specific code.  The caller should use
315   the enter_nested langhook to save any language-specific state,
316   since this function knows only about language-independent
317   variables.  */
318
319void
320push_function_context_to (context)
321     tree context;
322{
323  struct function *p;
324
325  if (context)
326    {
327      if (context == current_function_decl)
328	cfun->contains_functions = 1;
329      else
330	{
331	  struct function *containing = find_function_data (context);
332	  containing->contains_functions = 1;
333	}
334    }
335
336  if (cfun == 0)
337    init_dummy_function_start ();
338  p = cfun;
339
340  p->outer = outer_function_chain;
341  outer_function_chain = p;
342  p->fixup_var_refs_queue = 0;
343
344  (*lang_hooks.function.enter_nested) (p);
345
346  cfun = 0;
347}
348
349void
350push_function_context ()
351{
352  push_function_context_to (current_function_decl);
353}
354
355/* Restore the last saved context, at the end of a nested function.
356   This function is called from language-specific code.  */
357
358void
359pop_function_context_from (context)
360     tree context ATTRIBUTE_UNUSED;
361{
362  struct function *p = outer_function_chain;
363  struct var_refs_queue *queue;
364
365  cfun = p;
366  outer_function_chain = p->outer;
367
368  current_function_decl = p->decl;
369  reg_renumber = 0;
370
371  restore_emit_status (p);
372
373  (*lang_hooks.function.leave_nested) (p);
374
375  /* Finish doing put_var_into_stack for any of our variables which became
376     addressable during the nested function.  If only one entry has to be
377     fixed up, just do that one.  Otherwise, first make a list of MEMs that
378     are not to be unshared.  */
379  if (p->fixup_var_refs_queue == 0)
380    ;
381  else if (p->fixup_var_refs_queue->next == 0)
382    fixup_var_refs (p->fixup_var_refs_queue->modified,
383		    p->fixup_var_refs_queue->promoted_mode,
384		    p->fixup_var_refs_queue->unsignedp,
385		    p->fixup_var_refs_queue->modified, 0);
386  else
387    {
388      rtx list = 0;
389
390      for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
391	list = gen_rtx_EXPR_LIST (VOIDmode, queue->modified, list);
392
393      for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
394	fixup_var_refs (queue->modified, queue->promoted_mode,
395			queue->unsignedp, list, 0);
396
397    }
398
399  p->fixup_var_refs_queue = 0;
400
401  /* Reset variables that have known state during rtx generation.  */
402  rtx_equal_function_value_matters = 1;
403  virtuals_instantiated = 0;
404  generating_concat_p = 1;
405}
406
407void
408pop_function_context ()
409{
410  pop_function_context_from (current_function_decl);
411}
412
413/* Clear out all parts of the state in F that can safely be discarded
414   after the function has been parsed, but not compiled, to let
415   garbage collection reclaim the memory.  */
416
417void
418free_after_parsing (f)
419     struct function *f;
420{
421  /* f->expr->forced_labels is used by code generation.  */
422  /* f->emit->regno_reg_rtx is used by code generation.  */
423  /* f->varasm is used by code generation.  */
424  /* f->eh->eh_return_stub_label is used by code generation.  */
425
426  (*lang_hooks.function.final) (f);
427  f->stmt = NULL;
428}
429
430/* Clear out all parts of the state in F that can safely be discarded
431   after the function has been compiled, to let garbage collection
432   reclaim the memory.  */
433
434void
435free_after_compilation (f)
436     struct function *f;
437{
438  f->eh = NULL;
439  f->expr = NULL;
440  f->emit = NULL;
441  f->varasm = NULL;
442  f->machine = NULL;
443
444  f->x_temp_slots = NULL;
445  f->arg_offset_rtx = NULL;
446  f->return_rtx = NULL;
447  f->internal_arg_pointer = NULL;
448  f->x_nonlocal_labels = NULL;
449  f->x_nonlocal_goto_handler_slots = NULL;
450  f->x_nonlocal_goto_handler_labels = NULL;
451  f->x_nonlocal_goto_stack_level = NULL;
452  f->x_cleanup_label = NULL;
453  f->x_return_label = NULL;
454  f->computed_goto_common_label = NULL;
455  f->computed_goto_common_reg = NULL;
456  f->x_save_expr_regs = NULL;
457  f->x_stack_slot_list = NULL;
458  f->x_rtl_expr_chain = NULL;
459  f->x_tail_recursion_label = NULL;
460  f->x_tail_recursion_reentry = NULL;
461  f->x_arg_pointer_save_area = NULL;
462  f->x_clobber_return_insn = NULL;
463  f->x_context_display = NULL;
464  f->x_trampoline_list = NULL;
465  f->x_parm_birth_insn = NULL;
466  f->x_last_parm_insn = NULL;
467  f->x_parm_reg_stack_loc = NULL;
468  f->fixup_var_refs_queue = NULL;
469  f->original_arg_vector = NULL;
470  f->original_decl_initial = NULL;
471  f->inl_last_parm_insn = NULL;
472  f->epilogue_delay_list = NULL;
473}
474
475/* Allocate fixed slots in the stack frame of the current function.  */
476
477/* Return size needed for stack frame based on slots so far allocated in
478   function F.
479   This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
480   the caller may have to do that.  */
481
482HOST_WIDE_INT
483get_func_frame_size (f)
484     struct function *f;
485{
486#ifdef FRAME_GROWS_DOWNWARD
487  return -f->x_frame_offset;
488#else
489  return f->x_frame_offset;
490#endif
491}
492
493/* Return size needed for stack frame based on slots so far allocated.
494   This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
495   the caller may have to do that.  */
496HOST_WIDE_INT
497get_frame_size ()
498{
499  return get_func_frame_size (cfun);
500}
501
502/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
503   with machine mode MODE.
504
505   ALIGN controls the amount of alignment for the address of the slot:
506   0 means according to MODE,
507   -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
508   positive specifies alignment boundary in bits.
509
510   We do not round to stack_boundary here.
511
512   FUNCTION specifies the function to allocate in.  */
513
514static rtx
515assign_stack_local_1 (mode, size, align, function)
516     enum machine_mode mode;
517     HOST_WIDE_INT size;
518     int align;
519     struct function *function;
520{
521  rtx x, addr;
522  int bigend_correction = 0;
523  int alignment;
524  int frame_off, frame_alignment, frame_phase;
525
526  if (align == 0)
527    {
528      tree type;
529
530      if (mode == BLKmode)
531	alignment = BIGGEST_ALIGNMENT;
532      else
533	alignment = GET_MODE_ALIGNMENT (mode);
534
535      /* Allow the target to (possibly) increase the alignment of this
536	 stack slot.  */
537      type = (*lang_hooks.types.type_for_mode) (mode, 0);
538      if (type)
539	alignment = LOCAL_ALIGNMENT (type, alignment);
540
541      alignment /= BITS_PER_UNIT;
542    }
543  else if (align == -1)
544    {
545      alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
546      size = CEIL_ROUND (size, alignment);
547    }
548  else
549    alignment = align / BITS_PER_UNIT;
550
551#ifdef FRAME_GROWS_DOWNWARD
552  function->x_frame_offset -= size;
553#endif
554
555  /* Ignore alignment we can't do with expected alignment of the boundary.  */
556  if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
557    alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
558
559  if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
560    function->stack_alignment_needed = alignment * BITS_PER_UNIT;
561
562  /* Calculate how many bytes the start of local variables is off from
563     stack alignment.  */
564  frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
565  frame_off = STARTING_FRAME_OFFSET % frame_alignment;
566  frame_phase = frame_off ? frame_alignment - frame_off : 0;
567
568  /* Round frame offset to that alignment.
569     We must be careful here, since FRAME_OFFSET might be negative and
570     division with a negative dividend isn't as well defined as we might
571     like.  So we instead assume that ALIGNMENT is a power of two and
572     use logical operations which are unambiguous.  */
573#ifdef FRAME_GROWS_DOWNWARD
574  function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
575#else
576  function->x_frame_offset = CEIL_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
577#endif
578
579  /* On a big-endian machine, if we are allocating more space than we will use,
580     use the least significant bytes of those that are allocated.  */
581  if (BYTES_BIG_ENDIAN && mode != BLKmode)
582    bigend_correction = size - GET_MODE_SIZE (mode);
583
584  /* If we have already instantiated virtual registers, return the actual
585     address relative to the frame pointer.  */
586  if (function == cfun && virtuals_instantiated)
587    addr = plus_constant (frame_pointer_rtx,
588			  (frame_offset + bigend_correction
589			   + STARTING_FRAME_OFFSET));
590  else
591    addr = plus_constant (virtual_stack_vars_rtx,
592			  function->x_frame_offset + bigend_correction);
593
594#ifndef FRAME_GROWS_DOWNWARD
595  function->x_frame_offset += size;
596#endif
597
598  x = gen_rtx_MEM (mode, addr);
599
600  function->x_stack_slot_list
601    = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
602
603  return x;
604}
605
606/* Wrapper around assign_stack_local_1;  assign a local stack slot for the
607   current function.  */
608
609rtx
610assign_stack_local (mode, size, align)
611     enum machine_mode mode;
612     HOST_WIDE_INT size;
613     int align;
614{
615  return assign_stack_local_1 (mode, size, align, cfun);
616}
617
618/* Allocate a temporary stack slot and record it for possible later
619   reuse.
620
621   MODE is the machine mode to be given to the returned rtx.
622
623   SIZE is the size in units of the space required.  We do no rounding here
624   since assign_stack_local will do any required rounding.
625
626   KEEP is 1 if this slot is to be retained after a call to
627   free_temp_slots.  Automatic variables for a block are allocated
628   with this flag.  KEEP is 2 if we allocate a longer term temporary,
629   whose lifetime is controlled by CLEANUP_POINT_EXPRs.  KEEP is 3
630   if we are to allocate something at an inner level to be treated as
631   a variable in the block (e.g., a SAVE_EXPR).
632
633   TYPE is the type that will be used for the stack slot.  */
634
635rtx
636assign_stack_temp_for_type (mode, size, keep, type)
637     enum machine_mode mode;
638     HOST_WIDE_INT size;
639     int keep;
640     tree type;
641{
642  unsigned int align;
643  struct temp_slot *p, *best_p = 0;
644  rtx slot;
645
646  /* If SIZE is -1 it means that somebody tried to allocate a temporary
647     of a variable size.  */
648  if (size == -1)
649    abort ();
650
651  if (mode == BLKmode)
652    align = BIGGEST_ALIGNMENT;
653  else
654    align = GET_MODE_ALIGNMENT (mode);
655
656  if (! type)
657    type = (*lang_hooks.types.type_for_mode) (mode, 0);
658
659  if (type)
660    align = LOCAL_ALIGNMENT (type, align);
661
662  /* Try to find an available, already-allocated temporary of the proper
663     mode which meets the size and alignment requirements.  Choose the
664     smallest one with the closest alignment.  */
665  for (p = temp_slots; p; p = p->next)
666    if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
667	&& ! p->in_use
668	&& objects_must_conflict_p (p->type, type)
669	&& (best_p == 0 || best_p->size > p->size
670	    || (best_p->size == p->size && best_p->align > p->align)))
671      {
672	if (p->align == align && p->size == size)
673	  {
674	    best_p = 0;
675	    break;
676	  }
677	best_p = p;
678      }
679
680  /* Make our best, if any, the one to use.  */
681  if (best_p)
682    {
683      /* If there are enough aligned bytes left over, make them into a new
684	 temp_slot so that the extra bytes don't get wasted.  Do this only
685	 for BLKmode slots, so that we can be sure of the alignment.  */
686      if (GET_MODE (best_p->slot) == BLKmode)
687	{
688	  int alignment = best_p->align / BITS_PER_UNIT;
689	  HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
690
691	  if (best_p->size - rounded_size >= alignment)
692	    {
693	      p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
694	      p->in_use = p->addr_taken = 0;
695	      p->size = best_p->size - rounded_size;
696	      p->base_offset = best_p->base_offset + rounded_size;
697	      p->full_size = best_p->full_size - rounded_size;
698	      p->slot = gen_rtx_MEM (BLKmode,
699				     plus_constant (XEXP (best_p->slot, 0),
700						    rounded_size));
701	      p->align = best_p->align;
702	      p->address = 0;
703	      p->rtl_expr = 0;
704	      p->type = best_p->type;
705	      p->next = temp_slots;
706	      temp_slots = p;
707
708	      stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
709						   stack_slot_list);
710
711	      best_p->size = rounded_size;
712	      best_p->full_size = rounded_size;
713	    }
714	}
715
716      p = best_p;
717    }
718
719  /* If we still didn't find one, make a new temporary.  */
720  if (p == 0)
721    {
722      HOST_WIDE_INT frame_offset_old = frame_offset;
723
724      p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
725
726      /* We are passing an explicit alignment request to assign_stack_local.
727	 One side effect of that is assign_stack_local will not round SIZE
728	 to ensure the frame offset remains suitably aligned.
729
730	 So for requests which depended on the rounding of SIZE, we go ahead
731	 and round it now.  We also make sure ALIGNMENT is at least
732	 BIGGEST_ALIGNMENT.  */
733      if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
734	abort ();
735      p->slot = assign_stack_local (mode,
736				    (mode == BLKmode
737				     ? CEIL_ROUND (size, align / BITS_PER_UNIT)
738				     : size),
739				    align);
740
741      p->align = align;
742
743      /* The following slot size computation is necessary because we don't
744	 know the actual size of the temporary slot until assign_stack_local
745	 has performed all the frame alignment and size rounding for the
746	 requested temporary.  Note that extra space added for alignment
747	 can be either above or below this stack slot depending on which
748	 way the frame grows.  We include the extra space if and only if it
749	 is above this slot.  */
750#ifdef FRAME_GROWS_DOWNWARD
751      p->size = frame_offset_old - frame_offset;
752#else
753      p->size = size;
754#endif
755
756      /* Now define the fields used by combine_temp_slots.  */
757#ifdef FRAME_GROWS_DOWNWARD
758      p->base_offset = frame_offset;
759      p->full_size = frame_offset_old - frame_offset;
760#else
761      p->base_offset = frame_offset_old;
762      p->full_size = frame_offset - frame_offset_old;
763#endif
764      p->address = 0;
765      p->next = temp_slots;
766      temp_slots = p;
767    }
768
769  p->in_use = 1;
770  p->addr_taken = 0;
771  p->rtl_expr = seq_rtl_expr;
772  p->type = type;
773
774  if (keep == 2)
775    {
776      p->level = target_temp_slot_level;
777      p->keep = 0;
778    }
779  else if (keep == 3)
780    {
781      p->level = var_temp_slot_level;
782      p->keep = 0;
783    }
784  else
785    {
786      p->level = temp_slot_level;
787      p->keep = keep;
788    }
789
790
791  /* Create a new MEM rtx to avoid clobbering MEM flags of old slots.  */
792  slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
793  stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
794
795  /* If we know the alias set for the memory that will be used, use
796     it.  If there's no TYPE, then we don't know anything about the
797     alias set for the memory.  */
798  set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
799  set_mem_align (slot, align);
800
801  /* If a type is specified, set the relevant flags.  */
802  if (type != 0)
803    {
804      RTX_UNCHANGING_P (slot) = (lang_hooks.honor_readonly
805				 && TYPE_READONLY (type));
806      MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
807      MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
808    }
809
810  return slot;
811}
812
813/* Allocate a temporary stack slot and record it for possible later
814   reuse.  First three arguments are same as in preceding function.  */
815
816rtx
817assign_stack_temp (mode, size, keep)
818     enum machine_mode mode;
819     HOST_WIDE_INT size;
820     int keep;
821{
822  return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
823}
824
825/* Assign a temporary.
826   If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
827   and so that should be used in error messages.  In either case, we
828   allocate of the given type.
829   KEEP is as for assign_stack_temp.
830   MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
831   it is 0 if a register is OK.
832   DONT_PROMOTE is 1 if we should not promote values in register
833   to wider modes.  */
834
835rtx
836assign_temp (type_or_decl, keep, memory_required, dont_promote)
837     tree type_or_decl;
838     int keep;
839     int memory_required;
840     int dont_promote ATTRIBUTE_UNUSED;
841{
842  tree type, decl;
843  enum machine_mode mode;
844#ifndef PROMOTE_FOR_CALL_ONLY
845  int unsignedp;
846#endif
847
848  if (DECL_P (type_or_decl))
849    decl = type_or_decl, type = TREE_TYPE (decl);
850  else
851    decl = NULL, type = type_or_decl;
852
853  mode = TYPE_MODE (type);
854#ifndef PROMOTE_FOR_CALL_ONLY
855  unsignedp = TREE_UNSIGNED (type);
856#endif
857
858  if (mode == BLKmode || memory_required)
859    {
860      HOST_WIDE_INT size = int_size_in_bytes (type);
861      rtx tmp;
862
863      /* Zero sized arrays are GNU C extension.  Set size to 1 to avoid
864	 problems with allocating the stack space.  */
865      if (size == 0)
866	size = 1;
867
868      /* Unfortunately, we don't yet know how to allocate variable-sized
869	 temporaries.  However, sometimes we have a fixed upper limit on
870	 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
871	 instead.  This is the case for Chill variable-sized strings.  */
872      if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
873	  && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
874	  && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
875	size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
876
877      /* The size of the temporary may be too large to fit into an integer.  */
878      /* ??? Not sure this should happen except for user silliness, so limit
879	 this to things that aren't compiler-generated temporaries.  The
880	 rest of the time we'll abort in assign_stack_temp_for_type.  */
881      if (decl && size == -1
882	  && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
883	{
884	  error_with_decl (decl, "size of variable `%s' is too large");
885	  size = 1;
886	}
887
888      tmp = assign_stack_temp_for_type (mode, size, keep, type);
889      return tmp;
890    }
891
892#ifndef PROMOTE_FOR_CALL_ONLY
893  if (! dont_promote)
894    mode = promote_mode (type, mode, &unsignedp, 0);
895#endif
896
897  return gen_reg_rtx (mode);
898}
899
900/* Combine temporary stack slots which are adjacent on the stack.
901
902   This allows for better use of already allocated stack space.  This is only
903   done for BLKmode slots because we can be sure that we won't have alignment
904   problems in this case.  */
905
906void
907combine_temp_slots ()
908{
909  struct temp_slot *p, *q;
910  struct temp_slot *prev_p, *prev_q;
911  int num_slots;
912
913  /* We can't combine slots, because the information about which slot
914     is in which alias set will be lost.  */
915  if (flag_strict_aliasing)
916    return;
917
918  /* If there are a lot of temp slots, don't do anything unless
919     high levels of optimization.  */
920  if (! flag_expensive_optimizations)
921    for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
922      if (num_slots > 100 || (num_slots > 10 && optimize == 0))
923	return;
924
925  for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
926    {
927      int delete_p = 0;
928
929      if (! p->in_use && GET_MODE (p->slot) == BLKmode)
930	for (q = p->next, prev_q = p; q; q = prev_q->next)
931	  {
932	    int delete_q = 0;
933	    if (! q->in_use && GET_MODE (q->slot) == BLKmode)
934	      {
935		if (p->base_offset + p->full_size == q->base_offset)
936		  {
937		    /* Q comes after P; combine Q into P.  */
938		    p->size += q->size;
939		    p->full_size += q->full_size;
940		    delete_q = 1;
941		  }
942		else if (q->base_offset + q->full_size == p->base_offset)
943		  {
944		    /* P comes after Q; combine P into Q.  */
945		    q->size += p->size;
946		    q->full_size += p->full_size;
947		    delete_p = 1;
948		    break;
949		  }
950	      }
951	    /* Either delete Q or advance past it.  */
952	    if (delete_q)
953	      prev_q->next = q->next;
954	    else
955	      prev_q = q;
956	  }
957      /* Either delete P or advance past it.  */
958      if (delete_p)
959	{
960	  if (prev_p)
961	    prev_p->next = p->next;
962	  else
963	    temp_slots = p->next;
964	}
965      else
966	prev_p = p;
967    }
968}
969
970/* Find the temp slot corresponding to the object at address X.  */
971
972static struct temp_slot *
973find_temp_slot_from_address (x)
974     rtx x;
975{
976  struct temp_slot *p;
977  rtx next;
978
979  for (p = temp_slots; p; p = p->next)
980    {
981      if (! p->in_use)
982	continue;
983
984      else if (XEXP (p->slot, 0) == x
985	       || p->address == x
986	       || (GET_CODE (x) == PLUS
987		   && XEXP (x, 0) == virtual_stack_vars_rtx
988		   && GET_CODE (XEXP (x, 1)) == CONST_INT
989		   && INTVAL (XEXP (x, 1)) >= p->base_offset
990		   && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
991	return p;
992
993      else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
994	for (next = p->address; next; next = XEXP (next, 1))
995	  if (XEXP (next, 0) == x)
996	    return p;
997    }
998
999  /* If we have a sum involving a register, see if it points to a temp
1000     slot.  */
1001  if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
1002      && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
1003    return p;
1004  else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
1005	   && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1006    return p;
1007
1008  return 0;
1009}
1010
1011/* Indicate that NEW is an alternate way of referring to the temp slot
1012   that previously was known by OLD.  */
1013
1014void
1015update_temp_slot_address (old, new)
1016     rtx old, new;
1017{
1018  struct temp_slot *p;
1019
1020  if (rtx_equal_p (old, new))
1021    return;
1022
1023  p = find_temp_slot_from_address (old);
1024
1025  /* If we didn't find one, see if both OLD is a PLUS.  If so, and NEW
1026     is a register, see if one operand of the PLUS is a temporary
1027     location.  If so, NEW points into it.  Otherwise, if both OLD and
1028     NEW are a PLUS and if there is a register in common between them.
1029     If so, try a recursive call on those values.  */
1030  if (p == 0)
1031    {
1032      if (GET_CODE (old) != PLUS)
1033	return;
1034
1035      if (GET_CODE (new) == REG)
1036	{
1037	  update_temp_slot_address (XEXP (old, 0), new);
1038	  update_temp_slot_address (XEXP (old, 1), new);
1039	  return;
1040	}
1041      else if (GET_CODE (new) != PLUS)
1042	return;
1043
1044      if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1045	update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1046      else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1047	update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1048      else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1049	update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1050      else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1051	update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1052
1053      return;
1054    }
1055
1056  /* Otherwise add an alias for the temp's address.  */
1057  else if (p->address == 0)
1058    p->address = new;
1059  else
1060    {
1061      if (GET_CODE (p->address) != EXPR_LIST)
1062	p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1063
1064      p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1065    }
1066}
1067
1068/* If X could be a reference to a temporary slot, mark the fact that its
1069   address was taken.  */
1070
1071void
1072mark_temp_addr_taken (x)
1073     rtx x;
1074{
1075  struct temp_slot *p;
1076
1077  if (x == 0)
1078    return;
1079
1080  /* If X is not in memory or is at a constant address, it cannot be in
1081     a temporary slot.  */
1082  if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1083    return;
1084
1085  p = find_temp_slot_from_address (XEXP (x, 0));
1086  if (p != 0)
1087    p->addr_taken = 1;
1088}
1089
1090/* If X could be a reference to a temporary slot, mark that slot as
1091   belonging to the to one level higher than the current level.  If X
1092   matched one of our slots, just mark that one.  Otherwise, we can't
1093   easily predict which it is, so upgrade all of them.  Kept slots
1094   need not be touched.
1095
1096   This is called when an ({...}) construct occurs and a statement
1097   returns a value in memory.  */
1098
1099void
1100preserve_temp_slots (x)
1101     rtx x;
1102{
1103  struct temp_slot *p = 0;
1104
1105  /* If there is no result, we still might have some objects whose address
1106     were taken, so we need to make sure they stay around.  */
1107  if (x == 0)
1108    {
1109      for (p = temp_slots; p; p = p->next)
1110	if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1111	  p->level--;
1112
1113      return;
1114    }
1115
1116  /* If X is a register that is being used as a pointer, see if we have
1117     a temporary slot we know it points to.  To be consistent with
1118     the code below, we really should preserve all non-kept slots
1119     if we can't find a match, but that seems to be much too costly.  */
1120  if (GET_CODE (x) == REG && REG_POINTER (x))
1121    p = find_temp_slot_from_address (x);
1122
1123  /* If X is not in memory or is at a constant address, it cannot be in
1124     a temporary slot, but it can contain something whose address was
1125     taken.  */
1126  if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1127    {
1128      for (p = temp_slots; p; p = p->next)
1129	if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1130	  p->level--;
1131
1132      return;
1133    }
1134
1135  /* First see if we can find a match.  */
1136  if (p == 0)
1137    p = find_temp_slot_from_address (XEXP (x, 0));
1138
1139  if (p != 0)
1140    {
1141      /* Move everything at our level whose address was taken to our new
1142	 level in case we used its address.  */
1143      struct temp_slot *q;
1144
1145      if (p->level == temp_slot_level)
1146	{
1147	  for (q = temp_slots; q; q = q->next)
1148	    if (q != p && q->addr_taken && q->level == p->level)
1149	      q->level--;
1150
1151	  p->level--;
1152	  p->addr_taken = 0;
1153	}
1154      return;
1155    }
1156
1157  /* Otherwise, preserve all non-kept slots at this level.  */
1158  for (p = temp_slots; p; p = p->next)
1159    if (p->in_use && p->level == temp_slot_level && ! p->keep)
1160      p->level--;
1161}
1162
1163/* X is the result of an RTL_EXPR.  If it is a temporary slot associated
1164   with that RTL_EXPR, promote it into a temporary slot at the present
1165   level so it will not be freed when we free slots made in the
1166   RTL_EXPR.  */
1167
1168void
1169preserve_rtl_expr_result (x)
1170     rtx x;
1171{
1172  struct temp_slot *p;
1173
1174  /* If X is not in memory or is at a constant address, it cannot be in
1175     a temporary slot.  */
1176  if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1177    return;
1178
1179  /* If we can find a match, move it to our level unless it is already at
1180     an upper level.  */
1181  p = find_temp_slot_from_address (XEXP (x, 0));
1182  if (p != 0)
1183    {
1184      p->level = MIN (p->level, temp_slot_level);
1185      p->rtl_expr = 0;
1186    }
1187
1188  return;
1189}
1190
1191/* Free all temporaries used so far.  This is normally called at the end
1192   of generating code for a statement.  Don't free any temporaries
1193   currently in use for an RTL_EXPR that hasn't yet been emitted.
1194   We could eventually do better than this since it can be reused while
1195   generating the same RTL_EXPR, but this is complex and probably not
1196   worthwhile.  */
1197
1198void
1199free_temp_slots ()
1200{
1201  struct temp_slot *p;
1202
1203  for (p = temp_slots; p; p = p->next)
1204    if (p->in_use && p->level == temp_slot_level && ! p->keep
1205	&& p->rtl_expr == 0)
1206      p->in_use = 0;
1207
1208  combine_temp_slots ();
1209}
1210
1211/* Free all temporary slots used in T, an RTL_EXPR node.  */
1212
1213void
1214free_temps_for_rtl_expr (t)
1215     tree t;
1216{
1217  struct temp_slot *p;
1218
1219  for (p = temp_slots; p; p = p->next)
1220    if (p->rtl_expr == t)
1221      {
1222	/* If this slot is below the current TEMP_SLOT_LEVEL, then it
1223	   needs to be preserved.  This can happen if a temporary in
1224	   the RTL_EXPR was addressed; preserve_temp_slots will move
1225	   the temporary into a higher level.  */
1226	if (temp_slot_level <= p->level)
1227	  p->in_use = 0;
1228	else
1229	  p->rtl_expr = NULL_TREE;
1230      }
1231
1232  combine_temp_slots ();
1233}
1234
1235/* Mark all temporaries ever allocated in this function as not suitable
1236   for reuse until the current level is exited.  */
1237
1238void
1239mark_all_temps_used ()
1240{
1241  struct temp_slot *p;
1242
1243  for (p = temp_slots; p; p = p->next)
1244    {
1245      p->in_use = p->keep = 1;
1246      p->level = MIN (p->level, temp_slot_level);
1247    }
1248}
1249
1250/* Push deeper into the nesting level for stack temporaries.  */
1251
1252void
1253push_temp_slots ()
1254{
1255  temp_slot_level++;
1256}
1257
1258/* Likewise, but save the new level as the place to allocate variables
1259   for blocks.  */
1260
1261#if 0
1262void
1263push_temp_slots_for_block ()
1264{
1265  push_temp_slots ();
1266
1267  var_temp_slot_level = temp_slot_level;
1268}
1269
1270/* Likewise, but save the new level as the place to allocate temporaries
1271   for TARGET_EXPRs.  */
1272
1273void
1274push_temp_slots_for_target ()
1275{
1276  push_temp_slots ();
1277
1278  target_temp_slot_level = temp_slot_level;
1279}
1280
1281/* Set and get the value of target_temp_slot_level.  The only
1282   permitted use of these functions is to save and restore this value.  */
1283
1284int
1285get_target_temp_slot_level ()
1286{
1287  return target_temp_slot_level;
1288}
1289
1290void
1291set_target_temp_slot_level (level)
1292     int level;
1293{
1294  target_temp_slot_level = level;
1295}
1296#endif
1297
1298/* Pop a temporary nesting level.  All slots in use in the current level
1299   are freed.  */
1300
1301void
1302pop_temp_slots ()
1303{
1304  struct temp_slot *p;
1305
1306  for (p = temp_slots; p; p = p->next)
1307    if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1308      p->in_use = 0;
1309
1310  combine_temp_slots ();
1311
1312  temp_slot_level--;
1313}
1314
1315/* Initialize temporary slots.  */
1316
1317void
1318init_temp_slots ()
1319{
1320  /* We have not allocated any temporaries yet.  */
1321  temp_slots = 0;
1322  temp_slot_level = 0;
1323  var_temp_slot_level = 0;
1324  target_temp_slot_level = 0;
1325}
1326
1327/* Retroactively move an auto variable from a register to a stack
1328   slot.  This is done when an address-reference to the variable is
1329   seen.  If RESCAN is true, all previously emitted instructions are
1330   examined and modified to handle the fact that DECL is now
1331   addressable.  */
1332
1333void
1334put_var_into_stack (decl, rescan)
1335     tree decl;
1336     int rescan;
1337{
1338  rtx reg;
1339  enum machine_mode promoted_mode, decl_mode;
1340  struct function *function = 0;
1341  tree context;
1342  int can_use_addressof;
1343  int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1344  int usedp = (TREE_USED (decl)
1345	       || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1346
1347  context = decl_function_context (decl);
1348
1349  /* Get the current rtl used for this object and its original mode.  */
1350  reg = (TREE_CODE (decl) == SAVE_EXPR
1351	 ? SAVE_EXPR_RTL (decl)
1352	 : DECL_RTL_IF_SET (decl));
1353
1354  /* No need to do anything if decl has no rtx yet
1355     since in that case caller is setting TREE_ADDRESSABLE
1356     and a stack slot will be assigned when the rtl is made.  */
1357  if (reg == 0)
1358    return;
1359
1360  /* Get the declared mode for this object.  */
1361  decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1362	       : DECL_MODE (decl));
1363  /* Get the mode it's actually stored in.  */
1364  promoted_mode = GET_MODE (reg);
1365
1366  /* If this variable comes from an outer function, find that
1367     function's saved context.  Don't use find_function_data here,
1368     because it might not be in any active function.
1369     FIXME: Is that really supposed to happen?
1370     It does in ObjC at least.  */
1371  if (context != current_function_decl && context != inline_function_decl)
1372    for (function = outer_function_chain; function; function = function->outer)
1373      if (function->decl == context)
1374	break;
1375
1376  /* If this is a variable-size object with a pseudo to address it,
1377     put that pseudo into the stack, if the var is nonlocal.  */
1378  if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1379      && GET_CODE (reg) == MEM
1380      && GET_CODE (XEXP (reg, 0)) == REG
1381      && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1382    {
1383      reg = XEXP (reg, 0);
1384      decl_mode = promoted_mode = GET_MODE (reg);
1385    }
1386
1387  can_use_addressof
1388    = (function == 0
1389       && optimize > 0
1390       /* FIXME make it work for promoted modes too */
1391       && decl_mode == promoted_mode
1392#ifdef NON_SAVING_SETJMP
1393       && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1394#endif
1395       );
1396
1397  /* If we can't use ADDRESSOF, make sure we see through one we already
1398     generated.  */
1399  if (! can_use_addressof && GET_CODE (reg) == MEM
1400      && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1401    reg = XEXP (XEXP (reg, 0), 0);
1402
1403  /* Now we should have a value that resides in one or more pseudo regs.  */
1404
1405  if (GET_CODE (reg) == REG)
1406    {
1407      /* If this variable lives in the current function and we don't need
1408	 to put things in the stack for the sake of setjmp, try to keep it
1409	 in a register until we know we actually need the address.  */
1410      if (can_use_addressof)
1411	gen_mem_addressof (reg, decl, rescan);
1412      else
1413	put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1414			    decl_mode, volatilep, 0, usedp, 0);
1415    }
1416  else if (GET_CODE (reg) == CONCAT)
1417    {
1418      /* A CONCAT contains two pseudos; put them both in the stack.
1419	 We do it so they end up consecutive.
1420	 We fixup references to the parts only after we fixup references
1421	 to the whole CONCAT, lest we do double fixups for the latter
1422	 references.  */
1423      enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1424      tree part_type = (*lang_hooks.types.type_for_mode) (part_mode, 0);
1425      rtx lopart = XEXP (reg, 0);
1426      rtx hipart = XEXP (reg, 1);
1427#ifdef FRAME_GROWS_DOWNWARD
1428      /* Since part 0 should have a lower address, do it second.  */
1429      put_reg_into_stack (function, hipart, part_type, part_mode,
1430			  part_mode, volatilep, 0, 0, 0);
1431      put_reg_into_stack (function, lopart, part_type, part_mode,
1432			  part_mode, volatilep, 0, 0, 0);
1433#else
1434      put_reg_into_stack (function, lopart, part_type, part_mode,
1435			  part_mode, volatilep, 0, 0, 0);
1436      put_reg_into_stack (function, hipart, part_type, part_mode,
1437			  part_mode, volatilep, 0, 0, 0);
1438#endif
1439
1440      /* Change the CONCAT into a combined MEM for both parts.  */
1441      PUT_CODE (reg, MEM);
1442      MEM_ATTRS (reg) = 0;
1443
1444      /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1445         already computed alias sets.  Here we want to re-generate.  */
1446      if (DECL_P (decl))
1447	SET_DECL_RTL (decl, NULL);
1448      set_mem_attributes (reg, decl, 1);
1449      if (DECL_P (decl))
1450	SET_DECL_RTL (decl, reg);
1451
1452      /* The two parts are in memory order already.
1453	 Use the lower parts address as ours.  */
1454      XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1455      /* Prevent sharing of rtl that might lose.  */
1456      if (GET_CODE (XEXP (reg, 0)) == PLUS)
1457	XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1458      if (usedp && rescan)
1459	{
1460	  schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1461				   promoted_mode, 0);
1462	  schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1463	  schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1464	}
1465    }
1466  else
1467    return;
1468}
1469
1470/* Subroutine of put_var_into_stack.  This puts a single pseudo reg REG
1471   into the stack frame of FUNCTION (0 means the current function).
1472   DECL_MODE is the machine mode of the user-level data type.
1473   PROMOTED_MODE is the machine mode of the register.
1474   VOLATILE_P is nonzero if this is for a "volatile" decl.
1475   USED_P is nonzero if this reg might have already been used in an insn.  */
1476
1477static void
1478put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1479		    original_regno, used_p, ht)
1480     struct function *function;
1481     rtx reg;
1482     tree type;
1483     enum machine_mode promoted_mode, decl_mode;
1484     int volatile_p;
1485     unsigned int original_regno;
1486     int used_p;
1487     htab_t ht;
1488{
1489  struct function *func = function ? function : cfun;
1490  rtx new = 0;
1491  unsigned int regno = original_regno;
1492
1493  if (regno == 0)
1494    regno = REGNO (reg);
1495
1496  if (regno < func->x_max_parm_reg)
1497    new = func->x_parm_reg_stack_loc[regno];
1498
1499  if (new == 0)
1500    new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1501
1502  PUT_CODE (reg, MEM);
1503  PUT_MODE (reg, decl_mode);
1504  XEXP (reg, 0) = XEXP (new, 0);
1505  MEM_ATTRS (reg) = 0;
1506  /* `volatil' bit means one thing for MEMs, another entirely for REGs.  */
1507  MEM_VOLATILE_P (reg) = volatile_p;
1508
1509  /* If this is a memory ref that contains aggregate components,
1510     mark it as such for cse and loop optimize.  If we are reusing a
1511     previously generated stack slot, then we need to copy the bit in
1512     case it was set for other reasons.  For instance, it is set for
1513     __builtin_va_alist.  */
1514  if (type)
1515    {
1516      MEM_SET_IN_STRUCT_P (reg,
1517			   AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1518      set_mem_alias_set (reg, get_alias_set (type));
1519    }
1520
1521  if (used_p)
1522    schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1523}
1524
1525/* Make sure that all refs to the variable, previously made
1526   when it was a register, are fixed up to be valid again.
1527   See function above for meaning of arguments.  */
1528
1529static void
1530schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1531     struct function *function;
1532     rtx reg;
1533     tree type;
1534     enum machine_mode promoted_mode;
1535     htab_t ht;
1536{
1537  int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1538
1539  if (function != 0)
1540    {
1541      struct var_refs_queue *temp;
1542
1543      temp
1544	= (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
1545      temp->modified = reg;
1546      temp->promoted_mode = promoted_mode;
1547      temp->unsignedp = unsigned_p;
1548      temp->next = function->fixup_var_refs_queue;
1549      function->fixup_var_refs_queue = temp;
1550    }
1551  else
1552    /* Variable is local; fix it up now.  */
1553    fixup_var_refs (reg, promoted_mode, unsigned_p, reg, ht);
1554}
1555
1556static void
1557fixup_var_refs (var, promoted_mode, unsignedp, may_share, ht)
1558     rtx var;
1559     enum machine_mode promoted_mode;
1560     int unsignedp;
1561     htab_t ht;
1562     rtx may_share;
1563{
1564  tree pending;
1565  rtx first_insn = get_insns ();
1566  struct sequence_stack *stack = seq_stack;
1567  tree rtl_exps = rtl_expr_chain;
1568
1569  /* If there's a hash table, it must record all uses of VAR.  */
1570  if (ht)
1571    {
1572      if (stack != 0)
1573	abort ();
1574      fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp,
1575				      may_share);
1576      return;
1577    }
1578
1579  fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1580			stack == 0, may_share);
1581
1582  /* Scan all pending sequences too.  */
1583  for (; stack; stack = stack->next)
1584    {
1585      push_to_full_sequence (stack->first, stack->last);
1586      fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1587			    stack->next != 0, may_share);
1588      /* Update remembered end of sequence
1589	 in case we added an insn at the end.  */
1590      stack->last = get_last_insn ();
1591      end_sequence ();
1592    }
1593
1594  /* Scan all waiting RTL_EXPRs too.  */
1595  for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1596    {
1597      rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1598      if (seq != const0_rtx && seq != 0)
1599	{
1600	  push_to_sequence (seq);
1601	  fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1602				may_share);
1603	  end_sequence ();
1604	}
1605    }
1606}
1607
1608/* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1609   some part of an insn.  Return a struct fixup_replacement whose OLD
1610   value is equal to X.  Allocate a new structure if no such entry exists.  */
1611
1612static struct fixup_replacement *
1613find_fixup_replacement (replacements, x)
1614     struct fixup_replacement **replacements;
1615     rtx x;
1616{
1617  struct fixup_replacement *p;
1618
1619  /* See if we have already replaced this.  */
1620  for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1621    ;
1622
1623  if (p == 0)
1624    {
1625      p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1626      p->old = x;
1627      p->new = 0;
1628      p->next = *replacements;
1629      *replacements = p;
1630    }
1631
1632  return p;
1633}
1634
1635/* Scan the insn-chain starting with INSN for refs to VAR and fix them
1636   up.  TOPLEVEL is nonzero if this chain is the main chain of insns
1637   for the current function.  MAY_SHARE is either a MEM that is not
1638   to be unshared or a list of them.  */
1639
1640static void
1641fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel, may_share)
1642     rtx insn;
1643     rtx var;
1644     enum machine_mode promoted_mode;
1645     int unsignedp;
1646     int toplevel;
1647     rtx may_share;
1648{
1649  while (insn)
1650    {
1651      /* fixup_var_refs_insn might modify insn, so save its next
1652         pointer now.  */
1653      rtx next = NEXT_INSN (insn);
1654
1655      /* CALL_PLACEHOLDERs are special; we have to switch into each of
1656	 the three sequences they (potentially) contain, and process
1657	 them recursively.  The CALL_INSN itself is not interesting.  */
1658
1659      if (GET_CODE (insn) == CALL_INSN
1660	  && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1661	{
1662	  int i;
1663
1664	  /* Look at the Normal call, sibling call and tail recursion
1665	     sequences attached to the CALL_PLACEHOLDER.  */
1666	  for (i = 0; i < 3; i++)
1667	    {
1668	      rtx seq = XEXP (PATTERN (insn), i);
1669	      if (seq)
1670		{
1671		  push_to_sequence (seq);
1672		  fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1673					may_share);
1674		  XEXP (PATTERN (insn), i) = get_insns ();
1675		  end_sequence ();
1676		}
1677	    }
1678	}
1679
1680      else if (INSN_P (insn))
1681	fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel,
1682			     may_share);
1683
1684      insn = next;
1685    }
1686}
1687
1688/* Look up the insns which reference VAR in HT and fix them up.  Other
1689   arguments are the same as fixup_var_refs_insns.
1690
1691   N.B. No need for special processing of CALL_PLACEHOLDERs here,
1692   because the hash table will point straight to the interesting insn
1693   (inside the CALL_PLACEHOLDER).  */
1694
1695static void
1696fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp, may_share)
1697     htab_t ht;
1698     rtx var;
1699     enum machine_mode promoted_mode;
1700     int unsignedp;
1701     rtx may_share;
1702{
1703  struct insns_for_mem_entry tmp;
1704  struct insns_for_mem_entry *ime;
1705  rtx insn_list;
1706
1707  tmp.key = var;
1708  ime = (struct insns_for_mem_entry *) htab_find (ht, &tmp);
1709  for (insn_list = ime->insns; insn_list != 0; insn_list = XEXP (insn_list, 1))
1710    if (INSN_P (XEXP (insn_list, 0)))
1711      fixup_var_refs_insn (XEXP (insn_list, 0), var, promoted_mode,
1712			   unsignedp, 1, may_share);
1713}
1714
1715
1716/* Per-insn processing by fixup_var_refs_insns(_with_hash).  INSN is
1717   the insn under examination, VAR is the variable to fix up
1718   references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1719   TOPLEVEL is nonzero if this is the main insn chain for this
1720   function.  */
1721
1722static void
1723fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel, no_share)
1724     rtx insn;
1725     rtx var;
1726     enum machine_mode promoted_mode;
1727     int unsignedp;
1728     int toplevel;
1729     rtx no_share;
1730{
1731  rtx call_dest = 0;
1732  rtx set, prev, prev_set;
1733  rtx note;
1734
1735  /* Remember the notes in case we delete the insn.  */
1736  note = REG_NOTES (insn);
1737
1738  /* If this is a CLOBBER of VAR, delete it.
1739
1740     If it has a REG_LIBCALL note, delete the REG_LIBCALL
1741     and REG_RETVAL notes too.  */
1742  if (GET_CODE (PATTERN (insn)) == CLOBBER
1743      && (XEXP (PATTERN (insn), 0) == var
1744	  || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1745	      && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1746		  || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1747    {
1748      if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1749	/* The REG_LIBCALL note will go away since we are going to
1750	   turn INSN into a NOTE, so just delete the
1751	   corresponding REG_RETVAL note.  */
1752	remove_note (XEXP (note, 0),
1753		     find_reg_note (XEXP (note, 0), REG_RETVAL,
1754				    NULL_RTX));
1755
1756      delete_insn (insn);
1757    }
1758
1759  /* The insn to load VAR from a home in the arglist
1760     is now a no-op.  When we see it, just delete it.
1761     Similarly if this is storing VAR from a register from which
1762     it was loaded in the previous insn.  This will occur
1763     when an ADDRESSOF was made for an arglist slot.  */
1764  else if (toplevel
1765	   && (set = single_set (insn)) != 0
1766	   && SET_DEST (set) == var
1767	   /* If this represents the result of an insn group,
1768	      don't delete the insn.  */
1769	   && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1770	   && (rtx_equal_p (SET_SRC (set), var)
1771	       || (GET_CODE (SET_SRC (set)) == REG
1772		   && (prev = prev_nonnote_insn (insn)) != 0
1773		   && (prev_set = single_set (prev)) != 0
1774		   && SET_DEST (prev_set) == SET_SRC (set)
1775		   && rtx_equal_p (SET_SRC (prev_set), var))))
1776    {
1777      delete_insn (insn);
1778    }
1779  else
1780    {
1781      struct fixup_replacement *replacements = 0;
1782      rtx next_insn = NEXT_INSN (insn);
1783
1784      if (SMALL_REGISTER_CLASSES)
1785	{
1786	  /* If the insn that copies the results of a CALL_INSN
1787	     into a pseudo now references VAR, we have to use an
1788	     intermediate pseudo since we want the life of the
1789	     return value register to be only a single insn.
1790
1791	     If we don't use an intermediate pseudo, such things as
1792	     address computations to make the address of VAR valid
1793	     if it is not can be placed between the CALL_INSN and INSN.
1794
1795	     To make sure this doesn't happen, we record the destination
1796	     of the CALL_INSN and see if the next insn uses both that
1797	     and VAR.  */
1798
1799	  if (call_dest != 0 && GET_CODE (insn) == INSN
1800	      && reg_mentioned_p (var, PATTERN (insn))
1801	      && reg_mentioned_p (call_dest, PATTERN (insn)))
1802	    {
1803	      rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1804
1805	      emit_insn_before (gen_move_insn (temp, call_dest), insn);
1806
1807	      PATTERN (insn) = replace_rtx (PATTERN (insn),
1808					    call_dest, temp);
1809	    }
1810
1811	  if (GET_CODE (insn) == CALL_INSN
1812	      && GET_CODE (PATTERN (insn)) == SET)
1813	    call_dest = SET_DEST (PATTERN (insn));
1814	  else if (GET_CODE (insn) == CALL_INSN
1815		   && GET_CODE (PATTERN (insn)) == PARALLEL
1816		   && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1817	    call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1818	  else
1819	    call_dest = 0;
1820	}
1821
1822      /* See if we have to do anything to INSN now that VAR is in
1823	 memory.  If it needs to be loaded into a pseudo, use a single
1824	 pseudo for the entire insn in case there is a MATCH_DUP
1825	 between two operands.  We pass a pointer to the head of
1826	 a list of struct fixup_replacements.  If fixup_var_refs_1
1827	 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1828	 it will record them in this list.
1829
1830	 If it allocated a pseudo for any replacement, we copy into
1831	 it here.  */
1832
1833      fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1834			&replacements, no_share);
1835
1836      /* If this is last_parm_insn, and any instructions were output
1837	 after it to fix it up, then we must set last_parm_insn to
1838	 the last such instruction emitted.  */
1839      if (insn == last_parm_insn)
1840	last_parm_insn = PREV_INSN (next_insn);
1841
1842      while (replacements)
1843	{
1844	  struct fixup_replacement *next;
1845
1846	  if (GET_CODE (replacements->new) == REG)
1847	    {
1848	      rtx insert_before;
1849	      rtx seq;
1850
1851	      /* OLD might be a (subreg (mem)).  */
1852	      if (GET_CODE (replacements->old) == SUBREG)
1853		replacements->old
1854		  = fixup_memory_subreg (replacements->old, insn,
1855					 promoted_mode, 0);
1856	      else
1857		replacements->old
1858		  = fixup_stack_1 (replacements->old, insn);
1859
1860	      insert_before = insn;
1861
1862	      /* If we are changing the mode, do a conversion.
1863		 This might be wasteful, but combine.c will
1864		 eliminate much of the waste.  */
1865
1866	      if (GET_MODE (replacements->new)
1867		  != GET_MODE (replacements->old))
1868		{
1869		  start_sequence ();
1870		  convert_move (replacements->new,
1871				replacements->old, unsignedp);
1872		  seq = get_insns ();
1873		  end_sequence ();
1874		}
1875	      else
1876		seq = gen_move_insn (replacements->new,
1877				     replacements->old);
1878
1879	      emit_insn_before (seq, insert_before);
1880	    }
1881
1882	  next = replacements->next;
1883	  free (replacements);
1884	  replacements = next;
1885	}
1886    }
1887
1888  /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1889     But don't touch other insns referred to by reg-notes;
1890     we will get them elsewhere.  */
1891  while (note)
1892    {
1893      if (GET_CODE (note) != INSN_LIST)
1894	XEXP (note, 0)
1895	  = walk_fixup_memory_subreg (XEXP (note, 0), insn,
1896				      promoted_mode, 1);
1897      note = XEXP (note, 1);
1898    }
1899}
1900
1901/* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1902   See if the rtx expression at *LOC in INSN needs to be changed.
1903
1904   REPLACEMENTS is a pointer to a list head that starts out zero, but may
1905   contain a list of original rtx's and replacements. If we find that we need
1906   to modify this insn by replacing a memory reference with a pseudo or by
1907   making a new MEM to implement a SUBREG, we consult that list to see if
1908   we have already chosen a replacement. If none has already been allocated,
1909   we allocate it and update the list.  fixup_var_refs_insn will copy VAR
1910   or the SUBREG, as appropriate, to the pseudo.  */
1911
1912static void
1913fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements, no_share)
1914     rtx var;
1915     enum machine_mode promoted_mode;
1916     rtx *loc;
1917     rtx insn;
1918     struct fixup_replacement **replacements;
1919     rtx no_share;
1920{
1921  int i;
1922  rtx x = *loc;
1923  RTX_CODE code = GET_CODE (x);
1924  const char *fmt;
1925  rtx tem, tem1;
1926  struct fixup_replacement *replacement;
1927
1928  switch (code)
1929    {
1930    case ADDRESSOF:
1931      if (XEXP (x, 0) == var)
1932	{
1933	  /* Prevent sharing of rtl that might lose.  */
1934	  rtx sub = copy_rtx (XEXP (var, 0));
1935
1936	  if (! validate_change (insn, loc, sub, 0))
1937	    {
1938	      rtx y = gen_reg_rtx (GET_MODE (sub));
1939	      rtx seq, new_insn;
1940
1941	      /* We should be able to replace with a register or all is lost.
1942		 Note that we can't use validate_change to verify this, since
1943		 we're not caring for replacing all dups simultaneously.  */
1944	      if (! validate_replace_rtx (*loc, y, insn))
1945		abort ();
1946
1947	      /* Careful!  First try to recognize a direct move of the
1948		 value, mimicking how things are done in gen_reload wrt
1949		 PLUS.  Consider what happens when insn is a conditional
1950		 move instruction and addsi3 clobbers flags.  */
1951
1952	      start_sequence ();
1953	      new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1954	      seq = get_insns ();
1955	      end_sequence ();
1956
1957	      if (recog_memoized (new_insn) < 0)
1958		{
1959		  /* That failed.  Fall back on force_operand and hope.  */
1960
1961		  start_sequence ();
1962		  sub = force_operand (sub, y);
1963		  if (sub != y)
1964		    emit_insn (gen_move_insn (y, sub));
1965		  seq = get_insns ();
1966		  end_sequence ();
1967		}
1968
1969#ifdef HAVE_cc0
1970	      /* Don't separate setter from user.  */
1971	      if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1972		insn = PREV_INSN (insn);
1973#endif
1974
1975	      emit_insn_before (seq, insn);
1976	    }
1977	}
1978      return;
1979
1980    case MEM:
1981      if (var == x)
1982	{
1983	  /* If we already have a replacement, use it.  Otherwise,
1984	     try to fix up this address in case it is invalid.  */
1985
1986	  replacement = find_fixup_replacement (replacements, var);
1987	  if (replacement->new)
1988	    {
1989	      *loc = replacement->new;
1990	      return;
1991	    }
1992
1993	  *loc = replacement->new = x = fixup_stack_1 (x, insn);
1994
1995	  /* Unless we are forcing memory to register or we changed the mode,
1996	     we can leave things the way they are if the insn is valid.  */
1997
1998	  INSN_CODE (insn) = -1;
1999	  if (! flag_force_mem && GET_MODE (x) == promoted_mode
2000	      && recog_memoized (insn) >= 0)
2001	    return;
2002
2003	  *loc = replacement->new = gen_reg_rtx (promoted_mode);
2004	  return;
2005	}
2006
2007      /* If X contains VAR, we need to unshare it here so that we update
2008	 each occurrence separately.  But all identical MEMs in one insn
2009	 must be replaced with the same rtx because of the possibility of
2010	 MATCH_DUPs.  */
2011
2012      if (reg_mentioned_p (var, x))
2013	{
2014	  replacement = find_fixup_replacement (replacements, x);
2015	  if (replacement->new == 0)
2016	    replacement->new = copy_most_rtx (x, no_share);
2017
2018	  *loc = x = replacement->new;
2019	  code = GET_CODE (x);
2020	}
2021      break;
2022
2023    case REG:
2024    case CC0:
2025    case PC:
2026    case CONST_INT:
2027    case CONST:
2028    case SYMBOL_REF:
2029    case LABEL_REF:
2030    case CONST_DOUBLE:
2031    case CONST_VECTOR:
2032      return;
2033
2034    case SIGN_EXTRACT:
2035    case ZERO_EXTRACT:
2036      /* Note that in some cases those types of expressions are altered
2037	 by optimize_bit_field, and do not survive to get here.  */
2038      if (XEXP (x, 0) == var
2039	  || (GET_CODE (XEXP (x, 0)) == SUBREG
2040	      && SUBREG_REG (XEXP (x, 0)) == var))
2041	{
2042	  /* Get TEM as a valid MEM in the mode presently in the insn.
2043
2044	     We don't worry about the possibility of MATCH_DUP here; it
2045	     is highly unlikely and would be tricky to handle.  */
2046
2047	  tem = XEXP (x, 0);
2048	  if (GET_CODE (tem) == SUBREG)
2049	    {
2050	      if (GET_MODE_BITSIZE (GET_MODE (tem))
2051		  > GET_MODE_BITSIZE (GET_MODE (var)))
2052		{
2053		  replacement = find_fixup_replacement (replacements, var);
2054		  if (replacement->new == 0)
2055		    replacement->new = gen_reg_rtx (GET_MODE (var));
2056		  SUBREG_REG (tem) = replacement->new;
2057
2058		  /* The following code works only if we have a MEM, so we
2059		     need to handle the subreg here.  We directly substitute
2060		     it assuming that a subreg must be OK here.  We already
2061		     scheduled a replacement to copy the mem into the
2062		     subreg.  */
2063		  XEXP (x, 0) = tem;
2064		  return;
2065		}
2066	      else
2067		tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2068	    }
2069	  else
2070	    tem = fixup_stack_1 (tem, insn);
2071
2072	  /* Unless we want to load from memory, get TEM into the proper mode
2073	     for an extract from memory.  This can only be done if the
2074	     extract is at a constant position and length.  */
2075
2076	  if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2077	      && GET_CODE (XEXP (x, 2)) == CONST_INT
2078	      && ! mode_dependent_address_p (XEXP (tem, 0))
2079	      && ! MEM_VOLATILE_P (tem))
2080	    {
2081	      enum machine_mode wanted_mode = VOIDmode;
2082	      enum machine_mode is_mode = GET_MODE (tem);
2083	      HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2084
2085	      if (GET_CODE (x) == ZERO_EXTRACT)
2086		{
2087		  enum machine_mode new_mode
2088		    = mode_for_extraction (EP_extzv, 1);
2089		  if (new_mode != MAX_MACHINE_MODE)
2090		    wanted_mode = new_mode;
2091		}
2092	      else if (GET_CODE (x) == SIGN_EXTRACT)
2093		{
2094		  enum machine_mode new_mode
2095		    = mode_for_extraction (EP_extv, 1);
2096		  if (new_mode != MAX_MACHINE_MODE)
2097		    wanted_mode = new_mode;
2098		}
2099
2100	      /* If we have a narrower mode, we can do something.  */
2101	      if (wanted_mode != VOIDmode
2102		  && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2103		{
2104		  HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2105		  rtx old_pos = XEXP (x, 2);
2106		  rtx newmem;
2107
2108		  /* If the bytes and bits are counted differently, we
2109		     must adjust the offset.  */
2110		  if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2111		    offset = (GET_MODE_SIZE (is_mode)
2112			      - GET_MODE_SIZE (wanted_mode) - offset);
2113
2114		  pos %= GET_MODE_BITSIZE (wanted_mode);
2115
2116		  newmem = adjust_address_nv (tem, wanted_mode, offset);
2117
2118		  /* Make the change and see if the insn remains valid.  */
2119		  INSN_CODE (insn) = -1;
2120		  XEXP (x, 0) = newmem;
2121		  XEXP (x, 2) = GEN_INT (pos);
2122
2123		  if (recog_memoized (insn) >= 0)
2124		    return;
2125
2126		  /* Otherwise, restore old position.  XEXP (x, 0) will be
2127		     restored later.  */
2128		  XEXP (x, 2) = old_pos;
2129		}
2130	    }
2131
2132	  /* If we get here, the bitfield extract insn can't accept a memory
2133	     reference.  Copy the input into a register.  */
2134
2135	  tem1 = gen_reg_rtx (GET_MODE (tem));
2136	  emit_insn_before (gen_move_insn (tem1, tem), insn);
2137	  XEXP (x, 0) = tem1;
2138	  return;
2139	}
2140      break;
2141
2142    case SUBREG:
2143      if (SUBREG_REG (x) == var)
2144	{
2145	  /* If this is a special SUBREG made because VAR was promoted
2146	     from a wider mode, replace it with VAR and call ourself
2147	     recursively, this time saying that the object previously
2148	     had its current mode (by virtue of the SUBREG).  */
2149
2150	  if (SUBREG_PROMOTED_VAR_P (x))
2151	    {
2152	      *loc = var;
2153	      fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements,
2154				no_share);
2155	      return;
2156	    }
2157
2158	  /* If this SUBREG makes VAR wider, it has become a paradoxical
2159	     SUBREG with VAR in memory, but these aren't allowed at this
2160	     stage of the compilation.  So load VAR into a pseudo and take
2161	     a SUBREG of that pseudo.  */
2162	  if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2163	    {
2164	      replacement = find_fixup_replacement (replacements, var);
2165	      if (replacement->new == 0)
2166		replacement->new = gen_reg_rtx (promoted_mode);
2167	      SUBREG_REG (x) = replacement->new;
2168	      return;
2169	    }
2170
2171	  /* See if we have already found a replacement for this SUBREG.
2172	     If so, use it.  Otherwise, make a MEM and see if the insn
2173	     is recognized.  If not, or if we should force MEM into a register,
2174	     make a pseudo for this SUBREG.  */
2175	  replacement = find_fixup_replacement (replacements, x);
2176	  if (replacement->new)
2177	    {
2178	      *loc = replacement->new;
2179	      return;
2180	    }
2181
2182	  replacement->new = *loc = fixup_memory_subreg (x, insn,
2183							 promoted_mode, 0);
2184
2185	  INSN_CODE (insn) = -1;
2186	  if (! flag_force_mem && recog_memoized (insn) >= 0)
2187	    return;
2188
2189	  *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2190	  return;
2191	}
2192      break;
2193
2194    case SET:
2195      /* First do special simplification of bit-field references.  */
2196      if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2197	  || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2198	optimize_bit_field (x, insn, 0);
2199      if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2200	  || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2201	optimize_bit_field (x, insn, 0);
2202
2203      /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2204	 into a register and then store it back out.  */
2205      if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2206	  && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2207	  && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2208	  && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2209	      > GET_MODE_SIZE (GET_MODE (var))))
2210	{
2211	  replacement = find_fixup_replacement (replacements, var);
2212	  if (replacement->new == 0)
2213	    replacement->new = gen_reg_rtx (GET_MODE (var));
2214
2215	  SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2216	  emit_insn_after (gen_move_insn (var, replacement->new), insn);
2217	}
2218
2219      /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2220	 insn into a pseudo and store the low part of the pseudo into VAR.  */
2221      if (GET_CODE (SET_DEST (x)) == SUBREG
2222	  && SUBREG_REG (SET_DEST (x)) == var
2223	  && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2224	      > GET_MODE_SIZE (GET_MODE (var))))
2225	{
2226	  SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2227	  emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2228							    tem)),
2229			   insn);
2230	  break;
2231	}
2232
2233      {
2234	rtx dest = SET_DEST (x);
2235	rtx src = SET_SRC (x);
2236	rtx outerdest = dest;
2237
2238	while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2239	       || GET_CODE (dest) == SIGN_EXTRACT
2240	       || GET_CODE (dest) == ZERO_EXTRACT)
2241	  dest = XEXP (dest, 0);
2242
2243	if (GET_CODE (src) == SUBREG)
2244	  src = SUBREG_REG (src);
2245
2246	/* If VAR does not appear at the top level of the SET
2247	   just scan the lower levels of the tree.  */
2248
2249	if (src != var && dest != var)
2250	  break;
2251
2252	/* We will need to rerecognize this insn.  */
2253	INSN_CODE (insn) = -1;
2254
2255	if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
2256	    && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
2257	  {
2258	    /* Since this case will return, ensure we fixup all the
2259	       operands here.  */
2260	    fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2261			      insn, replacements, no_share);
2262	    fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2263			      insn, replacements, no_share);
2264	    fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2265			      insn, replacements, no_share);
2266
2267	    tem = XEXP (outerdest, 0);
2268
2269	    /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2270	       that may appear inside a ZERO_EXTRACT.
2271	       This was legitimate when the MEM was a REG.  */
2272	    if (GET_CODE (tem) == SUBREG
2273		&& SUBREG_REG (tem) == var)
2274	      tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2275	    else
2276	      tem = fixup_stack_1 (tem, insn);
2277
2278	    if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2279		&& GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2280		&& ! mode_dependent_address_p (XEXP (tem, 0))
2281		&& ! MEM_VOLATILE_P (tem))
2282	      {
2283		enum machine_mode wanted_mode;
2284		enum machine_mode is_mode = GET_MODE (tem);
2285		HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2286
2287		wanted_mode = mode_for_extraction (EP_insv, 0);
2288
2289		/* If we have a narrower mode, we can do something.  */
2290		if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2291		  {
2292		    HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2293		    rtx old_pos = XEXP (outerdest, 2);
2294		    rtx newmem;
2295
2296		    if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2297		      offset = (GET_MODE_SIZE (is_mode)
2298				- GET_MODE_SIZE (wanted_mode) - offset);
2299
2300		    pos %= GET_MODE_BITSIZE (wanted_mode);
2301
2302		    newmem = adjust_address_nv (tem, wanted_mode, offset);
2303
2304		    /* Make the change and see if the insn remains valid.  */
2305		    INSN_CODE (insn) = -1;
2306		    XEXP (outerdest, 0) = newmem;
2307		    XEXP (outerdest, 2) = GEN_INT (pos);
2308
2309		    if (recog_memoized (insn) >= 0)
2310		      return;
2311
2312		    /* Otherwise, restore old position.  XEXP (x, 0) will be
2313		       restored later.  */
2314		    XEXP (outerdest, 2) = old_pos;
2315		  }
2316	      }
2317
2318	    /* If we get here, the bit-field store doesn't allow memory
2319	       or isn't located at a constant position.  Load the value into
2320	       a register, do the store, and put it back into memory.  */
2321
2322	    tem1 = gen_reg_rtx (GET_MODE (tem));
2323	    emit_insn_before (gen_move_insn (tem1, tem), insn);
2324	    emit_insn_after (gen_move_insn (tem, tem1), insn);
2325	    XEXP (outerdest, 0) = tem1;
2326	    return;
2327	  }
2328
2329	/* STRICT_LOW_PART is a no-op on memory references
2330	   and it can cause combinations to be unrecognizable,
2331	   so eliminate it.  */
2332
2333	if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2334	  SET_DEST (x) = XEXP (SET_DEST (x), 0);
2335
2336	/* A valid insn to copy VAR into or out of a register
2337	   must be left alone, to avoid an infinite loop here.
2338	   If the reference to VAR is by a subreg, fix that up,
2339	   since SUBREG is not valid for a memref.
2340	   Also fix up the address of the stack slot.
2341
2342	   Note that we must not try to recognize the insn until
2343	   after we know that we have valid addresses and no
2344	   (subreg (mem ...) ...) constructs, since these interfere
2345	   with determining the validity of the insn.  */
2346
2347	if ((SET_SRC (x) == var
2348	     || (GET_CODE (SET_SRC (x)) == SUBREG
2349		 && SUBREG_REG (SET_SRC (x)) == var))
2350	    && (GET_CODE (SET_DEST (x)) == REG
2351		|| (GET_CODE (SET_DEST (x)) == SUBREG
2352		    && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2353	    && GET_MODE (var) == promoted_mode
2354	    && x == single_set (insn))
2355	  {
2356	    rtx pat, last;
2357
2358	    if (GET_CODE (SET_SRC (x)) == SUBREG
2359		&& (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
2360		    > GET_MODE_SIZE (GET_MODE (var))))
2361	      {
2362		/* This (subreg VAR) is now a paradoxical subreg.  We need
2363		   to replace VAR instead of the subreg.  */
2364		replacement = find_fixup_replacement (replacements, var);
2365		if (replacement->new == NULL_RTX)
2366		  replacement->new = gen_reg_rtx (GET_MODE (var));
2367		SUBREG_REG (SET_SRC (x)) = replacement->new;
2368	      }
2369	    else
2370	      {
2371		replacement = find_fixup_replacement (replacements, SET_SRC (x));
2372		if (replacement->new)
2373		  SET_SRC (x) = replacement->new;
2374		else if (GET_CODE (SET_SRC (x)) == SUBREG)
2375		  SET_SRC (x) = replacement->new
2376		    = fixup_memory_subreg (SET_SRC (x), insn, promoted_mode,
2377					   0);
2378		else
2379		  SET_SRC (x) = replacement->new
2380		    = fixup_stack_1 (SET_SRC (x), insn);
2381	      }
2382
2383	    if (recog_memoized (insn) >= 0)
2384	      return;
2385
2386	    /* INSN is not valid, but we know that we want to
2387	       copy SET_SRC (x) to SET_DEST (x) in some way.  So
2388	       we generate the move and see whether it requires more
2389	       than one insn.  If it does, we emit those insns and
2390	       delete INSN.  Otherwise, we can just replace the pattern
2391	       of INSN; we have already verified above that INSN has
2392	       no other function that to do X.  */
2393
2394	    pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2395	    if (NEXT_INSN (pat) != NULL_RTX)
2396	      {
2397		last = emit_insn_before (pat, insn);
2398
2399		/* INSN might have REG_RETVAL or other important notes, so
2400		   we need to store the pattern of the last insn in the
2401		   sequence into INSN similarly to the normal case.  LAST
2402		   should not have REG_NOTES, but we allow them if INSN has
2403		   no REG_NOTES.  */
2404		if (REG_NOTES (last) && REG_NOTES (insn))
2405		  abort ();
2406		if (REG_NOTES (last))
2407		  REG_NOTES (insn) = REG_NOTES (last);
2408		PATTERN (insn) = PATTERN (last);
2409
2410		delete_insn (last);
2411	      }
2412	    else
2413	      PATTERN (insn) = PATTERN (pat);
2414
2415	    return;
2416	  }
2417
2418	if ((SET_DEST (x) == var
2419	     || (GET_CODE (SET_DEST (x)) == SUBREG
2420		 && SUBREG_REG (SET_DEST (x)) == var))
2421	    && (GET_CODE (SET_SRC (x)) == REG
2422		|| (GET_CODE (SET_SRC (x)) == SUBREG
2423		    && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2424	    && GET_MODE (var) == promoted_mode
2425	    && x == single_set (insn))
2426	  {
2427	    rtx pat, last;
2428
2429	    if (GET_CODE (SET_DEST (x)) == SUBREG)
2430	      SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn,
2431						  promoted_mode, 0);
2432	    else
2433	      SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2434
2435	    if (recog_memoized (insn) >= 0)
2436	      return;
2437
2438	    pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2439	    if (NEXT_INSN (pat) != NULL_RTX)
2440	      {
2441		last = emit_insn_before (pat, insn);
2442
2443		/* INSN might have REG_RETVAL or other important notes, so
2444		   we need to store the pattern of the last insn in the
2445		   sequence into INSN similarly to the normal case.  LAST
2446		   should not have REG_NOTES, but we allow them if INSN has
2447		   no REG_NOTES.  */
2448		if (REG_NOTES (last) && REG_NOTES (insn))
2449		  abort ();
2450		if (REG_NOTES (last))
2451		  REG_NOTES (insn) = REG_NOTES (last);
2452		PATTERN (insn) = PATTERN (last);
2453
2454		delete_insn (last);
2455	      }
2456	    else
2457	      PATTERN (insn) = PATTERN (pat);
2458
2459	    return;
2460	  }
2461
2462	/* Otherwise, storing into VAR must be handled specially
2463	   by storing into a temporary and copying that into VAR
2464	   with a new insn after this one.  Note that this case
2465	   will be used when storing into a promoted scalar since
2466	   the insn will now have different modes on the input
2467	   and output and hence will be invalid (except for the case
2468	   of setting it to a constant, which does not need any
2469	   change if it is valid).  We generate extra code in that case,
2470	   but combine.c will eliminate it.  */
2471
2472	if (dest == var)
2473	  {
2474	    rtx temp;
2475	    rtx fixeddest = SET_DEST (x);
2476	    enum machine_mode temp_mode;
2477
2478	    /* STRICT_LOW_PART can be discarded, around a MEM.  */
2479	    if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2480	      fixeddest = XEXP (fixeddest, 0);
2481	    /* Convert (SUBREG (MEM)) to a MEM in a changed mode.  */
2482	    if (GET_CODE (fixeddest) == SUBREG)
2483	      {
2484		fixeddest = fixup_memory_subreg (fixeddest, insn,
2485						 promoted_mode, 0);
2486		temp_mode = GET_MODE (fixeddest);
2487	      }
2488	    else
2489	      {
2490		fixeddest = fixup_stack_1 (fixeddest, insn);
2491		temp_mode = promoted_mode;
2492	      }
2493
2494	    temp = gen_reg_rtx (temp_mode);
2495
2496	    emit_insn_after (gen_move_insn (fixeddest,
2497					    gen_lowpart (GET_MODE (fixeddest),
2498							 temp)),
2499			     insn);
2500
2501	    SET_DEST (x) = temp;
2502	  }
2503      }
2504
2505    default:
2506      break;
2507    }
2508
2509  /* Nothing special about this RTX; fix its operands.  */
2510
2511  fmt = GET_RTX_FORMAT (code);
2512  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2513    {
2514      if (fmt[i] == 'e')
2515	fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements,
2516			  no_share);
2517      else if (fmt[i] == 'E')
2518	{
2519	  int j;
2520	  for (j = 0; j < XVECLEN (x, i); j++)
2521	    fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2522			      insn, replacements, no_share);
2523	}
2524    }
2525}
2526
2527/* Previously, X had the form (SUBREG:m1 (REG:PROMOTED_MODE ...)).
2528   The REG  was placed on the stack, so X now has the form (SUBREG:m1
2529   (MEM:m2 ...)).
2530
2531   Return an rtx (MEM:m1 newaddr) which is equivalent.  If any insns
2532   must be emitted to compute NEWADDR, put them before INSN.
2533
2534   UNCRITICAL nonzero means accept paradoxical subregs.
2535   This is used for subregs found inside REG_NOTES.  */
2536
2537static rtx
2538fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2539     rtx x;
2540     rtx insn;
2541     enum machine_mode promoted_mode;
2542     int uncritical;
2543{
2544  int offset;
2545  rtx mem = SUBREG_REG (x);
2546  rtx addr = XEXP (mem, 0);
2547  enum machine_mode mode = GET_MODE (x);
2548  rtx result, seq;
2549
2550  /* Paradoxical SUBREGs are usually invalid during RTL generation.  */
2551  if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (mem)) && ! uncritical)
2552    abort ();
2553
2554  offset = SUBREG_BYTE (x);
2555  if (BYTES_BIG_ENDIAN)
2556    /* If the PROMOTED_MODE is wider than the mode of the MEM, adjust
2557       the offset so that it points to the right location within the
2558       MEM.  */
2559    offset -= (GET_MODE_SIZE (promoted_mode) - GET_MODE_SIZE (GET_MODE (mem)));
2560
2561  if (!flag_force_addr
2562      && memory_address_p (mode, plus_constant (addr, offset)))
2563    /* Shortcut if no insns need be emitted.  */
2564    return adjust_address (mem, mode, offset);
2565
2566  start_sequence ();
2567  result = adjust_address (mem, mode, offset);
2568  seq = get_insns ();
2569  end_sequence ();
2570
2571  emit_insn_before (seq, insn);
2572  return result;
2573}
2574
2575/* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2576   Replace subexpressions of X in place.
2577   If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2578   Otherwise return X, with its contents possibly altered.
2579
2580   INSN, PROMOTED_MODE and UNCRITICAL are as for
2581   fixup_memory_subreg.  */
2582
2583static rtx
2584walk_fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2585     rtx x;
2586     rtx insn;
2587     enum machine_mode promoted_mode;
2588     int uncritical;
2589{
2590  enum rtx_code code;
2591  const char *fmt;
2592  int i;
2593
2594  if (x == 0)
2595    return 0;
2596
2597  code = GET_CODE (x);
2598
2599  if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2600    return fixup_memory_subreg (x, insn, promoted_mode, uncritical);
2601
2602  /* Nothing special about this RTX; fix its operands.  */
2603
2604  fmt = GET_RTX_FORMAT (code);
2605  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2606    {
2607      if (fmt[i] == 'e')
2608	XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn,
2609						promoted_mode, uncritical);
2610      else if (fmt[i] == 'E')
2611	{
2612	  int j;
2613	  for (j = 0; j < XVECLEN (x, i); j++)
2614	    XVECEXP (x, i, j)
2615	      = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn,
2616					  promoted_mode, uncritical);
2617	}
2618    }
2619  return x;
2620}
2621
2622/* For each memory ref within X, if it refers to a stack slot
2623   with an out of range displacement, put the address in a temp register
2624   (emitting new insns before INSN to load these registers)
2625   and alter the memory ref to use that register.
2626   Replace each such MEM rtx with a copy, to avoid clobberage.  */
2627
2628static rtx
2629fixup_stack_1 (x, insn)
2630     rtx x;
2631     rtx insn;
2632{
2633  int i;
2634  RTX_CODE code = GET_CODE (x);
2635  const char *fmt;
2636
2637  if (code == MEM)
2638    {
2639      rtx ad = XEXP (x, 0);
2640      /* If we have address of a stack slot but it's not valid
2641	 (displacement is too large), compute the sum in a register.  */
2642      if (GET_CODE (ad) == PLUS
2643	  && GET_CODE (XEXP (ad, 0)) == REG
2644	  && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2645	       && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2646	      || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2647#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2648	      || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2649#endif
2650	      || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2651	      || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2652	      || XEXP (ad, 0) == current_function_internal_arg_pointer)
2653	  && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2654	{
2655	  rtx temp, seq;
2656	  if (memory_address_p (GET_MODE (x), ad))
2657	    return x;
2658
2659	  start_sequence ();
2660	  temp = copy_to_reg (ad);
2661	  seq = get_insns ();
2662	  end_sequence ();
2663	  emit_insn_before (seq, insn);
2664	  return replace_equiv_address (x, temp);
2665	}
2666      return x;
2667    }
2668
2669  fmt = GET_RTX_FORMAT (code);
2670  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2671    {
2672      if (fmt[i] == 'e')
2673	XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2674      else if (fmt[i] == 'E')
2675	{
2676	  int j;
2677	  for (j = 0; j < XVECLEN (x, i); j++)
2678	    XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2679	}
2680    }
2681  return x;
2682}
2683
2684/* Optimization: a bit-field instruction whose field
2685   happens to be a byte or halfword in memory
2686   can be changed to a move instruction.
2687
2688   We call here when INSN is an insn to examine or store into a bit-field.
2689   BODY is the SET-rtx to be altered.
2690
2691   EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2692   (Currently this is called only from function.c, and EQUIV_MEM
2693   is always 0.)  */
2694
2695static void
2696optimize_bit_field (body, insn, equiv_mem)
2697     rtx body;
2698     rtx insn;
2699     rtx *equiv_mem;
2700{
2701  rtx bitfield;
2702  int destflag;
2703  rtx seq = 0;
2704  enum machine_mode mode;
2705
2706  if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2707      || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2708    bitfield = SET_DEST (body), destflag = 1;
2709  else
2710    bitfield = SET_SRC (body), destflag = 0;
2711
2712  /* First check that the field being stored has constant size and position
2713     and is in fact a byte or halfword suitably aligned.  */
2714
2715  if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2716      && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2717      && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2718	  != BLKmode)
2719      && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2720    {
2721      rtx memref = 0;
2722
2723      /* Now check that the containing word is memory, not a register,
2724	 and that it is safe to change the machine mode.  */
2725
2726      if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2727	memref = XEXP (bitfield, 0);
2728      else if (GET_CODE (XEXP (bitfield, 0)) == REG
2729	       && equiv_mem != 0)
2730	memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2731      else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2732	       && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2733	memref = SUBREG_REG (XEXP (bitfield, 0));
2734      else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2735	       && equiv_mem != 0
2736	       && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2737	memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2738
2739      if (memref
2740	  && ! mode_dependent_address_p (XEXP (memref, 0))
2741	  && ! MEM_VOLATILE_P (memref))
2742	{
2743	  /* Now adjust the address, first for any subreg'ing
2744	     that we are now getting rid of,
2745	     and then for which byte of the word is wanted.  */
2746
2747	  HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2748	  rtx insns;
2749
2750	  /* Adjust OFFSET to count bits from low-address byte.  */
2751	  if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2752	    offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2753		      - offset - INTVAL (XEXP (bitfield, 1)));
2754
2755	  /* Adjust OFFSET to count bytes from low-address byte.  */
2756	  offset /= BITS_PER_UNIT;
2757	  if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2758	    {
2759	      offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2760			 / UNITS_PER_WORD) * UNITS_PER_WORD;
2761	      if (BYTES_BIG_ENDIAN)
2762		offset -= (MIN (UNITS_PER_WORD,
2763				GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2764			   - MIN (UNITS_PER_WORD,
2765				  GET_MODE_SIZE (GET_MODE (memref))));
2766	    }
2767
2768	  start_sequence ();
2769	  memref = adjust_address (memref, mode, offset);
2770	  insns = get_insns ();
2771	  end_sequence ();
2772	  emit_insn_before (insns, insn);
2773
2774	  /* Store this memory reference where
2775	     we found the bit field reference.  */
2776
2777	  if (destflag)
2778	    {
2779	      validate_change (insn, &SET_DEST (body), memref, 1);
2780	      if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2781		{
2782		  rtx src = SET_SRC (body);
2783		  while (GET_CODE (src) == SUBREG
2784			 && SUBREG_BYTE (src) == 0)
2785		    src = SUBREG_REG (src);
2786		  if (GET_MODE (src) != GET_MODE (memref))
2787		    src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2788		  validate_change (insn, &SET_SRC (body), src, 1);
2789		}
2790	      else if (GET_MODE (SET_SRC (body)) != VOIDmode
2791		       && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2792		/* This shouldn't happen because anything that didn't have
2793		   one of these modes should have got converted explicitly
2794		   and then referenced through a subreg.
2795		   This is so because the original bit-field was
2796		   handled by agg_mode and so its tree structure had
2797		   the same mode that memref now has.  */
2798		abort ();
2799	    }
2800	  else
2801	    {
2802	      rtx dest = SET_DEST (body);
2803
2804	      while (GET_CODE (dest) == SUBREG
2805		     && SUBREG_BYTE (dest) == 0
2806		     && (GET_MODE_CLASS (GET_MODE (dest))
2807			 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2808		     && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2809			 <= UNITS_PER_WORD))
2810		dest = SUBREG_REG (dest);
2811
2812	      validate_change (insn, &SET_DEST (body), dest, 1);
2813
2814	      if (GET_MODE (dest) == GET_MODE (memref))
2815		validate_change (insn, &SET_SRC (body), memref, 1);
2816	      else
2817		{
2818		  /* Convert the mem ref to the destination mode.  */
2819		  rtx newreg = gen_reg_rtx (GET_MODE (dest));
2820
2821		  start_sequence ();
2822		  convert_move (newreg, memref,
2823				GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2824		  seq = get_insns ();
2825		  end_sequence ();
2826
2827		  validate_change (insn, &SET_SRC (body), newreg, 1);
2828		}
2829	    }
2830
2831	  /* See if we can convert this extraction or insertion into
2832	     a simple move insn.  We might not be able to do so if this
2833	     was, for example, part of a PARALLEL.
2834
2835	     If we succeed, write out any needed conversions.  If we fail,
2836	     it is hard to guess why we failed, so don't do anything
2837	     special; just let the optimization be suppressed.  */
2838
2839	  if (apply_change_group () && seq)
2840	    emit_insn_before (seq, insn);
2841	}
2842    }
2843}
2844
2845/* These routines are responsible for converting virtual register references
2846   to the actual hard register references once RTL generation is complete.
2847
2848   The following four variables are used for communication between the
2849   routines.  They contain the offsets of the virtual registers from their
2850   respective hard registers.  */
2851
2852static int in_arg_offset;
2853static int var_offset;
2854static int dynamic_offset;
2855static int out_arg_offset;
2856static int cfa_offset;
2857
2858/* In most machines, the stack pointer register is equivalent to the bottom
2859   of the stack.  */
2860
2861#ifndef STACK_POINTER_OFFSET
2862#define STACK_POINTER_OFFSET	0
2863#endif
2864
2865/* If not defined, pick an appropriate default for the offset of dynamically
2866   allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2867   REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE.  */
2868
2869#ifndef STACK_DYNAMIC_OFFSET
2870
2871/* The bottom of the stack points to the actual arguments.  If
2872   REG_PARM_STACK_SPACE is defined, this includes the space for the register
2873   parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
2874   stack space for register parameters is not pushed by the caller, but
2875   rather part of the fixed stack areas and hence not included in
2876   `current_function_outgoing_args_size'.  Nevertheless, we must allow
2877   for it when allocating stack dynamic objects.  */
2878
2879#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2880#define STACK_DYNAMIC_OFFSET(FNDECL)	\
2881((ACCUMULATE_OUTGOING_ARGS						      \
2882  ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2883 + (STACK_POINTER_OFFSET))						      \
2884
2885#else
2886#define STACK_DYNAMIC_OFFSET(FNDECL)	\
2887((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0)	      \
2888 + (STACK_POINTER_OFFSET))
2889#endif
2890#endif
2891
2892/* On most machines, the CFA coincides with the first incoming parm.  */
2893
2894#ifndef ARG_POINTER_CFA_OFFSET
2895#define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2896#endif
2897
2898/* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just
2899   had its address taken.  DECL is the decl or SAVE_EXPR for the
2900   object stored in the register, for later use if we do need to force
2901   REG into the stack.  REG is overwritten by the MEM like in
2902   put_reg_into_stack.  RESCAN is true if previously emitted
2903   instructions must be rescanned and modified now that the REG has
2904   been transformed.  */
2905
2906rtx
2907gen_mem_addressof (reg, decl, rescan)
2908     rtx reg;
2909     tree decl;
2910     int rescan;
2911{
2912  rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2913			     REGNO (reg), decl);
2914
2915  /* Calculate this before we start messing with decl's RTL.  */
2916  HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
2917
2918  /* If the original REG was a user-variable, then so is the REG whose
2919     address is being taken.  Likewise for unchanging.  */
2920  REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2921  RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2922
2923  PUT_CODE (reg, MEM);
2924  MEM_ATTRS (reg) = 0;
2925  XEXP (reg, 0) = r;
2926
2927  if (decl)
2928    {
2929      tree type = TREE_TYPE (decl);
2930      enum machine_mode decl_mode
2931	= (DECL_P (decl) ? DECL_MODE (decl) : TYPE_MODE (TREE_TYPE (decl)));
2932      rtx decl_rtl = (TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl)
2933		      : DECL_RTL_IF_SET (decl));
2934
2935      PUT_MODE (reg, decl_mode);
2936
2937      /* Clear DECL_RTL momentarily so functions below will work
2938	 properly, then set it again.  */
2939      if (DECL_P (decl) && decl_rtl == reg)
2940	SET_DECL_RTL (decl, 0);
2941
2942      set_mem_attributes (reg, decl, 1);
2943      set_mem_alias_set (reg, set);
2944
2945      if (DECL_P (decl) && decl_rtl == reg)
2946	SET_DECL_RTL (decl, reg);
2947
2948      if (rescan
2949	  && (TREE_USED (decl) || (DECL_P (decl) && DECL_INITIAL (decl) != 0)))
2950	fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), reg, 0);
2951    }
2952  else if (rescan)
2953    fixup_var_refs (reg, GET_MODE (reg), 0, reg, 0);
2954
2955  return reg;
2956}
2957
2958/* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack.  */
2959
2960void
2961flush_addressof (decl)
2962     tree decl;
2963{
2964  if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2965      && DECL_RTL (decl) != 0
2966      && GET_CODE (DECL_RTL (decl)) == MEM
2967      && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2968      && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2969    put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2970}
2971
2972/* Force the register pointed to by R, an ADDRESSOF rtx, into the stack.  */
2973
2974static void
2975put_addressof_into_stack (r, ht)
2976     rtx r;
2977     htab_t ht;
2978{
2979  tree decl, type;
2980  int volatile_p, used_p;
2981
2982  rtx reg = XEXP (r, 0);
2983
2984  if (GET_CODE (reg) != REG)
2985    abort ();
2986
2987  decl = ADDRESSOF_DECL (r);
2988  if (decl)
2989    {
2990      type = TREE_TYPE (decl);
2991      volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2992		    && TREE_THIS_VOLATILE (decl));
2993      used_p = (TREE_USED (decl)
2994		|| (DECL_P (decl) && DECL_INITIAL (decl) != 0));
2995    }
2996  else
2997    {
2998      type = NULL_TREE;
2999      volatile_p = 0;
3000      used_p = 1;
3001    }
3002
3003  put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
3004		      volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
3005}
3006
3007/* List of replacements made below in purge_addressof_1 when creating
3008   bitfield insertions.  */
3009static rtx purge_bitfield_addressof_replacements;
3010
3011/* List of replacements made below in purge_addressof_1 for patterns
3012   (MEM (ADDRESSOF (REG ...))).  The key of the list entry is the
3013   corresponding (ADDRESSOF (REG ...)) and value is a substitution for
3014   the all pattern.  List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
3015   enough in complex cases, e.g. when some field values can be
3016   extracted by usage MEM with narrower mode.  */
3017static rtx purge_addressof_replacements;
3018
3019/* Helper function for purge_addressof.  See if the rtx expression at *LOC
3020   in INSN needs to be changed.  If FORCE, always put any ADDRESSOFs into
3021   the stack.  If the function returns FALSE then the replacement could not
3022   be made.  */
3023
3024static bool
3025purge_addressof_1 (loc, insn, force, store, ht)
3026     rtx *loc;
3027     rtx insn;
3028     int force, store;
3029     htab_t ht;
3030{
3031  rtx x;
3032  RTX_CODE code;
3033  int i, j;
3034  const char *fmt;
3035  bool result = true;
3036
3037  /* Re-start here to avoid recursion in common cases.  */
3038 restart:
3039
3040  x = *loc;
3041  if (x == 0)
3042    return true;
3043
3044  code = GET_CODE (x);
3045
3046  /* If we don't return in any of the cases below, we will recurse inside
3047     the RTX, which will normally result in any ADDRESSOF being forced into
3048     memory.  */
3049  if (code == SET)
3050    {
3051      result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3052      result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3053      return result;
3054    }
3055  else if (code == ADDRESSOF)
3056    {
3057      rtx sub, insns;
3058
3059      if (GET_CODE (XEXP (x, 0)) != MEM)
3060	put_addressof_into_stack (x, ht);
3061
3062      /* We must create a copy of the rtx because it was created by
3063	 overwriting a REG rtx which is always shared.  */
3064      sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3065      if (validate_change (insn, loc, sub, 0)
3066	  || validate_replace_rtx (x, sub, insn))
3067	return true;
3068
3069      start_sequence ();
3070      sub = force_operand (sub, NULL_RTX);
3071      if (! validate_change (insn, loc, sub, 0)
3072	  && ! validate_replace_rtx (x, sub, insn))
3073	abort ();
3074
3075      insns = get_insns ();
3076      end_sequence ();
3077      emit_insn_before (insns, insn);
3078      return true;
3079    }
3080
3081  else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3082    {
3083      rtx sub = XEXP (XEXP (x, 0), 0);
3084
3085      if (GET_CODE (sub) == MEM)
3086	sub = adjust_address_nv (sub, GET_MODE (x), 0);
3087      else if (GET_CODE (sub) == REG
3088	       && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3089	;
3090      else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3091	{
3092	  int size_x, size_sub;
3093
3094	  if (!insn)
3095	    {
3096	      /* When processing REG_NOTES look at the list of
3097		 replacements done on the insn to find the register that X
3098		 was replaced by.  */
3099	      rtx tem;
3100
3101	      for (tem = purge_bitfield_addressof_replacements;
3102		   tem != NULL_RTX;
3103		   tem = XEXP (XEXP (tem, 1), 1))
3104		if (rtx_equal_p (x, XEXP (tem, 0)))
3105		  {
3106		    *loc = XEXP (XEXP (tem, 1), 0);
3107		    return true;
3108		  }
3109
3110	      /* See comment for purge_addressof_replacements.  */
3111	      for (tem = purge_addressof_replacements;
3112		   tem != NULL_RTX;
3113		   tem = XEXP (XEXP (tem, 1), 1))
3114		if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3115		  {
3116		    rtx z = XEXP (XEXP (tem, 1), 0);
3117
3118		    if (GET_MODE (x) == GET_MODE (z)
3119			|| (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3120			    && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3121		      abort ();
3122
3123		    /* It can happen that the note may speak of things
3124		       in a wider (or just different) mode than the
3125		       code did.  This is especially true of
3126		       REG_RETVAL.  */
3127
3128		    if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3129		      z = SUBREG_REG (z);
3130
3131		    if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3132			&& (GET_MODE_SIZE (GET_MODE (x))
3133			    > GET_MODE_SIZE (GET_MODE (z))))
3134		      {
3135			/* This can occur as a result in invalid
3136			   pointer casts, e.g. float f; ...
3137			   *(long long int *)&f.
3138			   ??? We could emit a warning here, but
3139			   without a line number that wouldn't be
3140			   very helpful.  */
3141			z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3142		      }
3143		    else
3144		      z = gen_lowpart (GET_MODE (x), z);
3145
3146		    *loc = z;
3147		    return true;
3148		  }
3149
3150	      /* Sometimes we may not be able to find the replacement.  For
3151		 example when the original insn was a MEM in a wider mode,
3152		 and the note is part of a sign extension of a narrowed
3153		 version of that MEM.  Gcc testcase compile/990829-1.c can
3154		 generate an example of this situation.  Rather than complain
3155		 we return false, which will prompt our caller to remove the
3156		 offending note.  */
3157	      return false;
3158	    }
3159
3160	  size_x = GET_MODE_BITSIZE (GET_MODE (x));
3161	  size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3162
3163	  /* Do not frob unchanging MEMs.  If a later reference forces the
3164	     pseudo to the stack, we can wind up with multiple writes to
3165	     an unchanging memory, which is invalid.  */
3166	  if (RTX_UNCHANGING_P (x) && size_x != size_sub)
3167	    ;
3168
3169	  /* Don't even consider working with paradoxical subregs,
3170	     or the moral equivalent seen here.  */
3171	  else if (size_x <= size_sub
3172	           && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3173	    {
3174	      /* Do a bitfield insertion to mirror what would happen
3175		 in memory.  */
3176
3177	      rtx val, seq;
3178
3179	      if (store)
3180		{
3181		  rtx p = PREV_INSN (insn);
3182
3183		  start_sequence ();
3184		  val = gen_reg_rtx (GET_MODE (x));
3185		  if (! validate_change (insn, loc, val, 0))
3186		    {
3187		      /* Discard the current sequence and put the
3188			 ADDRESSOF on stack.  */
3189		      end_sequence ();
3190		      goto give_up;
3191		    }
3192		  seq = get_insns ();
3193		  end_sequence ();
3194		  emit_insn_before (seq, insn);
3195		  compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3196					 insn, ht);
3197
3198		  start_sequence ();
3199		  store_bit_field (sub, size_x, 0, GET_MODE (x),
3200				   val, GET_MODE_SIZE (GET_MODE (sub)));
3201
3202		  /* Make sure to unshare any shared rtl that store_bit_field
3203		     might have created.  */
3204		  unshare_all_rtl_again (get_insns ());
3205
3206		  seq = get_insns ();
3207		  end_sequence ();
3208		  p = emit_insn_after (seq, insn);
3209		  if (NEXT_INSN (insn))
3210		    compute_insns_for_mem (NEXT_INSN (insn),
3211					   p ? NEXT_INSN (p) : NULL_RTX,
3212					   ht);
3213		}
3214	      else
3215		{
3216		  rtx p = PREV_INSN (insn);
3217
3218		  start_sequence ();
3219		  val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3220					   GET_MODE (x), GET_MODE (x),
3221					   GET_MODE_SIZE (GET_MODE (sub)));
3222
3223		  if (! validate_change (insn, loc, val, 0))
3224		    {
3225		      /* Discard the current sequence and put the
3226			 ADDRESSOF on stack.  */
3227		      end_sequence ();
3228		      goto give_up;
3229		    }
3230
3231		  seq = get_insns ();
3232		  end_sequence ();
3233		  emit_insn_before (seq, insn);
3234		  compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3235					 insn, ht);
3236		}
3237
3238	      /* Remember the replacement so that the same one can be done
3239		 on the REG_NOTES.  */
3240	      purge_bitfield_addressof_replacements
3241		= gen_rtx_EXPR_LIST (VOIDmode, x,
3242				     gen_rtx_EXPR_LIST
3243				     (VOIDmode, val,
3244				      purge_bitfield_addressof_replacements));
3245
3246	      /* We replaced with a reg -- all done.  */
3247	      return true;
3248	    }
3249	}
3250
3251      else if (validate_change (insn, loc, sub, 0))
3252	{
3253	  /* Remember the replacement so that the same one can be done
3254	     on the REG_NOTES.  */
3255	  if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3256	    {
3257	      rtx tem;
3258
3259	      for (tem = purge_addressof_replacements;
3260		   tem != NULL_RTX;
3261		   tem = XEXP (XEXP (tem, 1), 1))
3262		if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3263		  {
3264		    XEXP (XEXP (tem, 1), 0) = sub;
3265		    return true;
3266		  }
3267	      purge_addressof_replacements
3268		= gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3269			   gen_rtx_EXPR_LIST (VOIDmode, sub,
3270					      purge_addressof_replacements));
3271	      return true;
3272	    }
3273	  goto restart;
3274	}
3275    }
3276
3277 give_up:
3278  /* Scan all subexpressions.  */
3279  fmt = GET_RTX_FORMAT (code);
3280  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3281    {
3282      if (*fmt == 'e')
3283	result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3284      else if (*fmt == 'E')
3285	for (j = 0; j < XVECLEN (x, i); j++)
3286	  result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3287    }
3288
3289  return result;
3290}
3291
3292/* Return a hash value for K, a REG.  */
3293
3294static hashval_t
3295insns_for_mem_hash (k)
3296     const void * k;
3297{
3298  /* Use the address of the key for the hash value.  */
3299  struct insns_for_mem_entry *m = (struct insns_for_mem_entry *) k;
3300  return htab_hash_pointer (m->key);
3301}
3302
3303/* Return nonzero if K1 and K2 (two REGs) are the same.  */
3304
3305static int
3306insns_for_mem_comp (k1, k2)
3307     const void * k1;
3308     const void * k2;
3309{
3310  struct insns_for_mem_entry *m1 = (struct insns_for_mem_entry *) k1;
3311  struct insns_for_mem_entry *m2 = (struct insns_for_mem_entry *) k2;
3312  return m1->key == m2->key;
3313}
3314
3315struct insns_for_mem_walk_info
3316{
3317  /* The hash table that we are using to record which INSNs use which
3318     MEMs.  */
3319  htab_t ht;
3320
3321  /* The INSN we are currently processing.  */
3322  rtx insn;
3323
3324  /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3325     to find the insns that use the REGs in the ADDRESSOFs.  */
3326  int pass;
3327};
3328
3329/* Called from compute_insns_for_mem via for_each_rtx.  If R is a REG
3330   that might be used in an ADDRESSOF expression, record this INSN in
3331   the hash table given by DATA (which is really a pointer to an
3332   insns_for_mem_walk_info structure).  */
3333
3334static int
3335insns_for_mem_walk (r, data)
3336     rtx *r;
3337     void *data;
3338{
3339  struct insns_for_mem_walk_info *ifmwi
3340    = (struct insns_for_mem_walk_info *) data;
3341  struct insns_for_mem_entry tmp;
3342  tmp.insns = NULL_RTX;
3343
3344  if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3345      && GET_CODE (XEXP (*r, 0)) == REG)
3346    {
3347      PTR *e;
3348      tmp.key = XEXP (*r, 0);
3349      e = htab_find_slot (ifmwi->ht, &tmp, INSERT);
3350      if (*e == NULL)
3351	{
3352	  *e = ggc_alloc (sizeof (tmp));
3353	  memcpy (*e, &tmp, sizeof (tmp));
3354	}
3355    }
3356  else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3357    {
3358      struct insns_for_mem_entry *ifme;
3359      tmp.key = *r;
3360      ifme = (struct insns_for_mem_entry *) htab_find (ifmwi->ht, &tmp);
3361
3362      /* If we have not already recorded this INSN, do so now.  Since
3363	 we process the INSNs in order, we know that if we have
3364	 recorded it it must be at the front of the list.  */
3365      if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3366	ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3367					 ifme->insns);
3368    }
3369
3370  return 0;
3371}
3372
3373/* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3374   which REGs in HT.  */
3375
3376static void
3377compute_insns_for_mem (insns, last_insn, ht)
3378     rtx insns;
3379     rtx last_insn;
3380     htab_t ht;
3381{
3382  rtx insn;
3383  struct insns_for_mem_walk_info ifmwi;
3384  ifmwi.ht = ht;
3385
3386  for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3387    for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3388      if (INSN_P (insn))
3389	{
3390	  ifmwi.insn = insn;
3391	  for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3392	}
3393}
3394
3395/* Helper function for purge_addressof called through for_each_rtx.
3396   Returns true iff the rtl is an ADDRESSOF.  */
3397
3398static int
3399is_addressof (rtl, data)
3400     rtx *rtl;
3401     void *data ATTRIBUTE_UNUSED;
3402{
3403  return GET_CODE (*rtl) == ADDRESSOF;
3404}
3405
3406/* Eliminate all occurrences of ADDRESSOF from INSNS.  Elide any remaining
3407   (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3408   stack.  */
3409
3410void
3411purge_addressof (insns)
3412     rtx insns;
3413{
3414  rtx insn;
3415  htab_t ht;
3416
3417  /* When we actually purge ADDRESSOFs, we turn REGs into MEMs.  That
3418     requires a fixup pass over the instruction stream to correct
3419     INSNs that depended on the REG being a REG, and not a MEM.  But,
3420     these fixup passes are slow.  Furthermore, most MEMs are not
3421     mentioned in very many instructions.  So, we speed up the process
3422     by pre-calculating which REGs occur in which INSNs; that allows
3423     us to perform the fixup passes much more quickly.  */
3424  ht = htab_create_ggc (1000, insns_for_mem_hash, insns_for_mem_comp, NULL);
3425  compute_insns_for_mem (insns, NULL_RTX, ht);
3426
3427  for (insn = insns; insn; insn = NEXT_INSN (insn))
3428    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3429	|| GET_CODE (insn) == CALL_INSN)
3430      {
3431	if (! purge_addressof_1 (&PATTERN (insn), insn,
3432				 asm_noperands (PATTERN (insn)) > 0, 0, ht))
3433	  /* If we could not replace the ADDRESSOFs in the insn,
3434	     something is wrong.  */
3435	  abort ();
3436
3437	if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, ht))
3438	  {
3439	    /* If we could not replace the ADDRESSOFs in the insn's notes,
3440	       we can just remove the offending notes instead.  */
3441	    rtx note;
3442
3443	    for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3444	      {
3445		/* If we find a REG_RETVAL note then the insn is a libcall.
3446		   Such insns must have REG_EQUAL notes as well, in order
3447		   for later passes of the compiler to work.  So it is not
3448		   safe to delete the notes here, and instead we abort.  */
3449		if (REG_NOTE_KIND (note) == REG_RETVAL)
3450		  abort ();
3451		if (for_each_rtx (&note, is_addressof, NULL))
3452		  remove_note (insn, note);
3453	      }
3454	  }
3455      }
3456
3457  /* Clean up.  */
3458  purge_bitfield_addressof_replacements = 0;
3459  purge_addressof_replacements = 0;
3460
3461  /* REGs are shared.  purge_addressof will destructively replace a REG
3462     with a MEM, which creates shared MEMs.
3463
3464     Unfortunately, the children of put_reg_into_stack assume that MEMs
3465     referring to the same stack slot are shared (fixup_var_refs and
3466     the associated hash table code).
3467
3468     So, we have to do another unsharing pass after we have flushed any
3469     REGs that had their address taken into the stack.
3470
3471     It may be worth tracking whether or not we converted any REGs into
3472     MEMs to avoid this overhead when it is not needed.  */
3473  unshare_all_rtl_again (get_insns ());
3474}
3475
3476/* Convert a SET of a hard subreg to a set of the appropriate hard
3477   register.  A subroutine of purge_hard_subreg_sets.  */
3478
3479static void
3480purge_single_hard_subreg_set (pattern)
3481     rtx pattern;
3482{
3483  rtx reg = SET_DEST (pattern);
3484  enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3485  int offset = 0;
3486
3487  if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
3488      && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3489    {
3490      offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3491				    GET_MODE (SUBREG_REG (reg)),
3492				    SUBREG_BYTE (reg),
3493				    GET_MODE (reg));
3494      reg = SUBREG_REG (reg);
3495    }
3496
3497
3498  if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3499    {
3500      reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3501      SET_DEST (pattern) = reg;
3502    }
3503}
3504
3505/* Eliminate all occurrences of SETs of hard subregs from INSNS.  The
3506   only such SETs that we expect to see are those left in because
3507   integrate can't handle sets of parts of a return value register.
3508
3509   We don't use alter_subreg because we only want to eliminate subregs
3510   of hard registers.  */
3511
3512void
3513purge_hard_subreg_sets (insn)
3514     rtx insn;
3515{
3516  for (; insn; insn = NEXT_INSN (insn))
3517    {
3518      if (INSN_P (insn))
3519	{
3520	  rtx pattern = PATTERN (insn);
3521	  switch (GET_CODE (pattern))
3522	    {
3523	    case SET:
3524	      if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3525		purge_single_hard_subreg_set (pattern);
3526	      break;
3527	    case PARALLEL:
3528	      {
3529		int j;
3530		for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3531		  {
3532		    rtx inner_pattern = XVECEXP (pattern, 0, j);
3533		    if (GET_CODE (inner_pattern) == SET
3534			&& GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3535		      purge_single_hard_subreg_set (inner_pattern);
3536		  }
3537	      }
3538	      break;
3539	    default:
3540	      break;
3541	    }
3542	}
3543    }
3544}
3545
3546/* Pass through the INSNS of function FNDECL and convert virtual register
3547   references to hard register references.  */
3548
3549void
3550instantiate_virtual_regs (fndecl, insns)
3551     tree fndecl;
3552     rtx insns;
3553{
3554  rtx insn;
3555  unsigned int i;
3556
3557  /* Compute the offsets to use for this function.  */
3558  in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3559  var_offset = STARTING_FRAME_OFFSET;
3560  dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3561  out_arg_offset = STACK_POINTER_OFFSET;
3562  cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3563
3564  /* Scan all variables and parameters of this function.  For each that is
3565     in memory, instantiate all virtual registers if the result is a valid
3566     address.  If not, we do it later.  That will handle most uses of virtual
3567     regs on many machines.  */
3568  instantiate_decls (fndecl, 1);
3569
3570  /* Initialize recognition, indicating that volatile is OK.  */
3571  init_recog ();
3572
3573  /* Scan through all the insns, instantiating every virtual register still
3574     present.  */
3575  for (insn = insns; insn; insn = NEXT_INSN (insn))
3576    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3577	|| GET_CODE (insn) == CALL_INSN)
3578      {
3579	instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3580	if (INSN_DELETED_P (insn))
3581	  continue;
3582	instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3583	/* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE.  */
3584	if (GET_CODE (insn) == CALL_INSN)
3585	  instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3586				      NULL_RTX, 0);
3587
3588	/* Past this point all ASM statements should match.  Verify that
3589	   to avoid failures later in the compilation process.  */
3590        if (asm_noperands (PATTERN (insn)) >= 0
3591	    && ! check_asm_operands (PATTERN (insn)))
3592          instantiate_virtual_regs_lossage (insn);
3593      }
3594
3595  /* Instantiate the stack slots for the parm registers, for later use in
3596     addressof elimination.  */
3597  for (i = 0; i < max_parm_reg; ++i)
3598    if (parm_reg_stack_loc[i])
3599      instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3600
3601  /* Now instantiate the remaining register equivalences for debugging info.
3602     These will not be valid addresses.  */
3603  instantiate_decls (fndecl, 0);
3604
3605  /* Indicate that, from now on, assign_stack_local should use
3606     frame_pointer_rtx.  */
3607  virtuals_instantiated = 1;
3608}
3609
3610/* Scan all decls in FNDECL (both variables and parameters) and instantiate
3611   all virtual registers in their DECL_RTL's.
3612
3613   If VALID_ONLY, do this only if the resulting address is still valid.
3614   Otherwise, always do it.  */
3615
3616static void
3617instantiate_decls (fndecl, valid_only)
3618     tree fndecl;
3619     int valid_only;
3620{
3621  tree decl;
3622
3623  /* Process all parameters of the function.  */
3624  for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3625    {
3626      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3627      HOST_WIDE_INT size_rtl;
3628
3629      instantiate_decl (DECL_RTL (decl), size, valid_only);
3630
3631      /* If the parameter was promoted, then the incoming RTL mode may be
3632	 larger than the declared type size.  We must use the larger of
3633	 the two sizes.  */
3634      size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
3635      size = MAX (size_rtl, size);
3636      instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3637    }
3638
3639  /* Now process all variables defined in the function or its subblocks.  */
3640  instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3641}
3642
3643/* Subroutine of instantiate_decls: Process all decls in the given
3644   BLOCK node and all its subblocks.  */
3645
3646static void
3647instantiate_decls_1 (let, valid_only)
3648     tree let;
3649     int valid_only;
3650{
3651  tree t;
3652
3653  for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3654    if (DECL_RTL_SET_P (t))
3655      instantiate_decl (DECL_RTL (t),
3656			int_size_in_bytes (TREE_TYPE (t)),
3657			valid_only);
3658
3659  /* Process all subblocks.  */
3660  for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3661    instantiate_decls_1 (t, valid_only);
3662}
3663
3664/* Subroutine of the preceding procedures: Given RTL representing a
3665   decl and the size of the object, do any instantiation required.
3666
3667   If VALID_ONLY is nonzero, it means that the RTL should only be
3668   changed if the new address is valid.  */
3669
3670static void
3671instantiate_decl (x, size, valid_only)
3672     rtx x;
3673     HOST_WIDE_INT size;
3674     int valid_only;
3675{
3676  enum machine_mode mode;
3677  rtx addr;
3678
3679  /* If this is not a MEM, no need to do anything.  Similarly if the
3680     address is a constant or a register that is not a virtual register.  */
3681
3682  if (x == 0 || GET_CODE (x) != MEM)
3683    return;
3684
3685  addr = XEXP (x, 0);
3686  if (CONSTANT_P (addr)
3687      || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3688      || (GET_CODE (addr) == REG
3689	  && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3690	      || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3691    return;
3692
3693  /* If we should only do this if the address is valid, copy the address.
3694     We need to do this so we can undo any changes that might make the
3695     address invalid.  This copy is unfortunate, but probably can't be
3696     avoided.  */
3697
3698  if (valid_only)
3699    addr = copy_rtx (addr);
3700
3701  instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3702
3703  if (valid_only && size >= 0)
3704    {
3705      unsigned HOST_WIDE_INT decl_size = size;
3706
3707      /* Now verify that the resulting address is valid for every integer or
3708	 floating-point mode up to and including SIZE bytes long.  We do this
3709	 since the object might be accessed in any mode and frame addresses
3710	 are shared.  */
3711
3712      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3713	   mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3714	   mode = GET_MODE_WIDER_MODE (mode))
3715	if (! memory_address_p (mode, addr))
3716	  return;
3717
3718      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3719	   mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3720	   mode = GET_MODE_WIDER_MODE (mode))
3721	if (! memory_address_p (mode, addr))
3722	  return;
3723    }
3724
3725  /* Put back the address now that we have updated it and we either know
3726     it is valid or we don't care whether it is valid.  */
3727
3728  XEXP (x, 0) = addr;
3729}
3730
3731/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3732   is a virtual register, return the equivalent hard register and set the
3733   offset indirectly through the pointer.  Otherwise, return 0.  */
3734
3735static rtx
3736instantiate_new_reg (x, poffset)
3737     rtx x;
3738     HOST_WIDE_INT *poffset;
3739{
3740  rtx new;
3741  HOST_WIDE_INT offset;
3742
3743  if (x == virtual_incoming_args_rtx)
3744    new = arg_pointer_rtx, offset = in_arg_offset;
3745  else if (x == virtual_stack_vars_rtx)
3746    new = frame_pointer_rtx, offset = var_offset;
3747  else if (x == virtual_stack_dynamic_rtx)
3748    new = stack_pointer_rtx, offset = dynamic_offset;
3749  else if (x == virtual_outgoing_args_rtx)
3750    new = stack_pointer_rtx, offset = out_arg_offset;
3751  else if (x == virtual_cfa_rtx)
3752    new = arg_pointer_rtx, offset = cfa_offset;
3753  else
3754    return 0;
3755
3756  *poffset = offset;
3757  return new;
3758}
3759
3760
3761/* Called when instantiate_virtual_regs has failed to update the instruction.
3762   Usually this means that non-matching instruction has been emit, however for
3763   asm statements it may be the problem in the constraints.  */
3764static void
3765instantiate_virtual_regs_lossage (insn)
3766     rtx insn;
3767{
3768  if (asm_noperands (PATTERN (insn)) >= 0)
3769    {
3770      error_for_asm (insn, "impossible constraint in `asm'");
3771      delete_insn (insn);
3772    }
3773  else
3774    abort ();
3775}
3776/* Given a pointer to a piece of rtx and an optional pointer to the
3777   containing object, instantiate any virtual registers present in it.
3778
3779   If EXTRA_INSNS, we always do the replacement and generate
3780   any extra insns before OBJECT.  If it zero, we do nothing if replacement
3781   is not valid.
3782
3783   Return 1 if we either had nothing to do or if we were able to do the
3784   needed replacement.  Return 0 otherwise; we only return zero if
3785   EXTRA_INSNS is zero.
3786
3787   We first try some simple transformations to avoid the creation of extra
3788   pseudos.  */
3789
3790static int
3791instantiate_virtual_regs_1 (loc, object, extra_insns)
3792     rtx *loc;
3793     rtx object;
3794     int extra_insns;
3795{
3796  rtx x;
3797  RTX_CODE code;
3798  rtx new = 0;
3799  HOST_WIDE_INT offset = 0;
3800  rtx temp;
3801  rtx seq;
3802  int i, j;
3803  const char *fmt;
3804
3805  /* Re-start here to avoid recursion in common cases.  */
3806 restart:
3807
3808  x = *loc;
3809  if (x == 0)
3810    return 1;
3811
3812  /* We may have detected and deleted invalid asm statements.  */
3813  if (object && INSN_P (object) && INSN_DELETED_P (object))
3814    return 1;
3815
3816  code = GET_CODE (x);
3817
3818  /* Check for some special cases.  */
3819  switch (code)
3820    {
3821    case CONST_INT:
3822    case CONST_DOUBLE:
3823    case CONST_VECTOR:
3824    case CONST:
3825    case SYMBOL_REF:
3826    case CODE_LABEL:
3827    case PC:
3828    case CC0:
3829    case ASM_INPUT:
3830    case ADDR_VEC:
3831    case ADDR_DIFF_VEC:
3832    case RETURN:
3833      return 1;
3834
3835    case SET:
3836      /* We are allowed to set the virtual registers.  This means that
3837	 the actual register should receive the source minus the
3838	 appropriate offset.  This is used, for example, in the handling
3839	 of non-local gotos.  */
3840      if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3841	{
3842	  rtx src = SET_SRC (x);
3843
3844	  /* We are setting the register, not using it, so the relevant
3845	     offset is the negative of the offset to use were we using
3846	     the register.  */
3847	  offset = - offset;
3848	  instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3849
3850	  /* The only valid sources here are PLUS or REG.  Just do
3851	     the simplest possible thing to handle them.  */
3852	  if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3853	    {
3854	      instantiate_virtual_regs_lossage (object);
3855	      return 1;
3856	    }
3857
3858	  start_sequence ();
3859	  if (GET_CODE (src) != REG)
3860	    temp = force_operand (src, NULL_RTX);
3861	  else
3862	    temp = src;
3863	  temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3864	  seq = get_insns ();
3865	  end_sequence ();
3866
3867	  emit_insn_before (seq, object);
3868	  SET_DEST (x) = new;
3869
3870	  if (! validate_change (object, &SET_SRC (x), temp, 0)
3871	      || ! extra_insns)
3872	    instantiate_virtual_regs_lossage (object);
3873
3874	  return 1;
3875	}
3876
3877      instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3878      loc = &SET_SRC (x);
3879      goto restart;
3880
3881    case PLUS:
3882      /* Handle special case of virtual register plus constant.  */
3883      if (CONSTANT_P (XEXP (x, 1)))
3884	{
3885	  rtx old, new_offset;
3886
3887	  /* Check for (plus (plus VIRT foo) (const_int)) first.  */
3888	  if (GET_CODE (XEXP (x, 0)) == PLUS)
3889	    {
3890	      if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3891		{
3892		  instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3893					      extra_insns);
3894		  new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3895		}
3896	      else
3897		{
3898		  loc = &XEXP (x, 0);
3899		  goto restart;
3900		}
3901	    }
3902
3903#ifdef POINTERS_EXTEND_UNSIGNED
3904	  /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3905	     we can commute the PLUS and SUBREG because pointers into the
3906	     frame are well-behaved.  */
3907	  else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3908		   && GET_CODE (XEXP (x, 1)) == CONST_INT
3909		   && 0 != (new
3910			    = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3911						   &offset))
3912		   && validate_change (object, loc,
3913				       plus_constant (gen_lowpart (ptr_mode,
3914								   new),
3915						      offset
3916						      + INTVAL (XEXP (x, 1))),
3917				       0))
3918		return 1;
3919#endif
3920	  else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3921	    {
3922	      /* We know the second operand is a constant.  Unless the
3923		 first operand is a REG (which has been already checked),
3924		 it needs to be checked.  */
3925	      if (GET_CODE (XEXP (x, 0)) != REG)
3926		{
3927		  loc = &XEXP (x, 0);
3928		  goto restart;
3929		}
3930	      return 1;
3931	    }
3932
3933	  new_offset = plus_constant (XEXP (x, 1), offset);
3934
3935	  /* If the new constant is zero, try to replace the sum with just
3936	     the register.  */
3937	  if (new_offset == const0_rtx
3938	      && validate_change (object, loc, new, 0))
3939	    return 1;
3940
3941	  /* Next try to replace the register and new offset.
3942	     There are two changes to validate here and we can't assume that
3943	     in the case of old offset equals new just changing the register
3944	     will yield a valid insn.  In the interests of a little efficiency,
3945	     however, we only call validate change once (we don't queue up the
3946	     changes and then call apply_change_group).  */
3947
3948	  old = XEXP (x, 0);
3949	  if (offset == 0
3950	      ? ! validate_change (object, &XEXP (x, 0), new, 0)
3951	      : (XEXP (x, 0) = new,
3952		 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3953	    {
3954	      if (! extra_insns)
3955		{
3956		  XEXP (x, 0) = old;
3957		  return 0;
3958		}
3959
3960	      /* Otherwise copy the new constant into a register and replace
3961		 constant with that register.  */
3962	      temp = gen_reg_rtx (Pmode);
3963	      XEXP (x, 0) = new;
3964	      if (validate_change (object, &XEXP (x, 1), temp, 0))
3965		emit_insn_before (gen_move_insn (temp, new_offset), object);
3966	      else
3967		{
3968		  /* If that didn't work, replace this expression with a
3969		     register containing the sum.  */
3970
3971		  XEXP (x, 0) = old;
3972		  new = gen_rtx_PLUS (Pmode, new, new_offset);
3973
3974		  start_sequence ();
3975		  temp = force_operand (new, NULL_RTX);
3976		  seq = get_insns ();
3977		  end_sequence ();
3978
3979		  emit_insn_before (seq, object);
3980		  if (! validate_change (object, loc, temp, 0)
3981		      && ! validate_replace_rtx (x, temp, object))
3982		    {
3983		      instantiate_virtual_regs_lossage (object);
3984		      return 1;
3985		    }
3986		}
3987	    }
3988
3989	  return 1;
3990	}
3991
3992      /* Fall through to generic two-operand expression case.  */
3993    case EXPR_LIST:
3994    case CALL:
3995    case COMPARE:
3996    case MINUS:
3997    case MULT:
3998    case DIV:      case UDIV:
3999    case MOD:      case UMOD:
4000    case AND:      case IOR:      case XOR:
4001    case ROTATERT: case ROTATE:
4002    case ASHIFTRT: case LSHIFTRT: case ASHIFT:
4003    case NE:       case EQ:
4004    case GE:       case GT:       case GEU:    case GTU:
4005    case LE:       case LT:       case LEU:    case LTU:
4006      if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
4007	instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
4008      loc = &XEXP (x, 0);
4009      goto restart;
4010
4011    case MEM:
4012      /* Most cases of MEM that convert to valid addresses have already been
4013	 handled by our scan of decls.  The only special handling we
4014	 need here is to make a copy of the rtx to ensure it isn't being
4015	 shared if we have to change it to a pseudo.
4016
4017	 If the rtx is a simple reference to an address via a virtual register,
4018	 it can potentially be shared.  In such cases, first try to make it
4019	 a valid address, which can also be shared.  Otherwise, copy it and
4020	 proceed normally.
4021
4022	 First check for common cases that need no processing.  These are
4023	 usually due to instantiation already being done on a previous instance
4024	 of a shared rtx.  */
4025
4026      temp = XEXP (x, 0);
4027      if (CONSTANT_ADDRESS_P (temp)
4028#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4029	  || temp == arg_pointer_rtx
4030#endif
4031#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4032	  || temp == hard_frame_pointer_rtx
4033#endif
4034	  || temp == frame_pointer_rtx)
4035	return 1;
4036
4037      if (GET_CODE (temp) == PLUS
4038	  && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4039	  && (XEXP (temp, 0) == frame_pointer_rtx
4040#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4041	      || XEXP (temp, 0) == hard_frame_pointer_rtx
4042#endif
4043#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4044	      || XEXP (temp, 0) == arg_pointer_rtx
4045#endif
4046	      ))
4047	return 1;
4048
4049      if (temp == virtual_stack_vars_rtx
4050	  || temp == virtual_incoming_args_rtx
4051	  || (GET_CODE (temp) == PLUS
4052	      && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4053	      && (XEXP (temp, 0) == virtual_stack_vars_rtx
4054		  || XEXP (temp, 0) == virtual_incoming_args_rtx)))
4055	{
4056	  /* This MEM may be shared.  If the substitution can be done without
4057	     the need to generate new pseudos, we want to do it in place
4058	     so all copies of the shared rtx benefit.  The call below will
4059	     only make substitutions if the resulting address is still
4060	     valid.
4061
4062	     Note that we cannot pass X as the object in the recursive call
4063	     since the insn being processed may not allow all valid
4064	     addresses.  However, if we were not passed on object, we can
4065	     only modify X without copying it if X will have a valid
4066	     address.
4067
4068	     ??? Also note that this can still lose if OBJECT is an insn that
4069	     has less restrictions on an address that some other insn.
4070	     In that case, we will modify the shared address.  This case
4071	     doesn't seem very likely, though.  One case where this could
4072	     happen is in the case of a USE or CLOBBER reference, but we
4073	     take care of that below.  */
4074
4075	  if (instantiate_virtual_regs_1 (&XEXP (x, 0),
4076					  object ? object : x, 0))
4077	    return 1;
4078
4079	  /* Otherwise make a copy and process that copy.  We copy the entire
4080	     RTL expression since it might be a PLUS which could also be
4081	     shared.  */
4082	  *loc = x = copy_rtx (x);
4083	}
4084
4085      /* Fall through to generic unary operation case.  */
4086    case PREFETCH:
4087    case SUBREG:
4088    case STRICT_LOW_PART:
4089    case NEG:          case NOT:
4090    case PRE_DEC:      case PRE_INC:      case POST_DEC:    case POST_INC:
4091    case SIGN_EXTEND:  case ZERO_EXTEND:
4092    case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4093    case FLOAT:        case FIX:
4094    case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4095    case ABS:
4096    case SQRT:
4097    case FFS:
4098      /* These case either have just one operand or we know that we need not
4099	 check the rest of the operands.  */
4100      loc = &XEXP (x, 0);
4101      goto restart;
4102
4103    case USE:
4104    case CLOBBER:
4105      /* If the operand is a MEM, see if the change is a valid MEM.  If not,
4106	 go ahead and make the invalid one, but do it to a copy.  For a REG,
4107	 just make the recursive call, since there's no chance of a problem.  */
4108
4109      if ((GET_CODE (XEXP (x, 0)) == MEM
4110	   && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4111					  0))
4112	  || (GET_CODE (XEXP (x, 0)) == REG
4113	      && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4114	return 1;
4115
4116      XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4117      loc = &XEXP (x, 0);
4118      goto restart;
4119
4120    case REG:
4121      /* Try to replace with a PLUS.  If that doesn't work, compute the sum
4122	 in front of this insn and substitute the temporary.  */
4123      if ((new = instantiate_new_reg (x, &offset)) != 0)
4124	{
4125	  temp = plus_constant (new, offset);
4126	  if (!validate_change (object, loc, temp, 0))
4127	    {
4128	      if (! extra_insns)
4129		return 0;
4130
4131	      start_sequence ();
4132	      temp = force_operand (temp, NULL_RTX);
4133	      seq = get_insns ();
4134	      end_sequence ();
4135
4136	      emit_insn_before (seq, object);
4137	      if (! validate_change (object, loc, temp, 0)
4138		  && ! validate_replace_rtx (x, temp, object))
4139	        instantiate_virtual_regs_lossage (object);
4140	    }
4141	}
4142
4143      return 1;
4144
4145    case ADDRESSOF:
4146      if (GET_CODE (XEXP (x, 0)) == REG)
4147	return 1;
4148
4149      else if (GET_CODE (XEXP (x, 0)) == MEM)
4150	{
4151	  /* If we have a (addressof (mem ..)), do any instantiation inside
4152	     since we know we'll be making the inside valid when we finally
4153	     remove the ADDRESSOF.  */
4154	  instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4155	  return 1;
4156	}
4157      break;
4158
4159    default:
4160      break;
4161    }
4162
4163  /* Scan all subexpressions.  */
4164  fmt = GET_RTX_FORMAT (code);
4165  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4166    if (*fmt == 'e')
4167      {
4168	if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4169	  return 0;
4170      }
4171    else if (*fmt == 'E')
4172      for (j = 0; j < XVECLEN (x, i); j++)
4173	if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4174					  extra_insns))
4175	  return 0;
4176
4177  return 1;
4178}
4179
4180/* Optimization: assuming this function does not receive nonlocal gotos,
4181   delete the handlers for such, as well as the insns to establish
4182   and disestablish them.  */
4183
4184static void
4185delete_handlers ()
4186{
4187  rtx insn;
4188  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4189    {
4190      /* Delete the handler by turning off the flag that would
4191	 prevent jump_optimize from deleting it.
4192	 Also permit deletion of the nonlocal labels themselves
4193	 if nothing local refers to them.  */
4194      if (GET_CODE (insn) == CODE_LABEL)
4195	{
4196	  tree t, last_t;
4197
4198	  LABEL_PRESERVE_P (insn) = 0;
4199
4200	  /* Remove it from the nonlocal_label list, to avoid confusing
4201	     flow.  */
4202	  for (t = nonlocal_labels, last_t = 0; t;
4203	       last_t = t, t = TREE_CHAIN (t))
4204	    if (DECL_RTL (TREE_VALUE (t)) == insn)
4205	      break;
4206	  if (t)
4207	    {
4208	      if (! last_t)
4209		nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4210	      else
4211		TREE_CHAIN (last_t) = TREE_CHAIN (t);
4212	    }
4213	}
4214      if (GET_CODE (insn) == INSN)
4215	{
4216	  int can_delete = 0;
4217	  rtx t;
4218	  for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4219	    if (reg_mentioned_p (t, PATTERN (insn)))
4220	      {
4221		can_delete = 1;
4222		break;
4223	      }
4224	  if (can_delete
4225	      || (nonlocal_goto_stack_level != 0
4226		  && reg_mentioned_p (nonlocal_goto_stack_level,
4227				      PATTERN (insn))))
4228	    delete_related_insns (insn);
4229	}
4230    }
4231}
4232
4233int
4234max_parm_reg_num ()
4235{
4236  return max_parm_reg;
4237}
4238
4239/* Return the first insn following those generated by `assign_parms'.  */
4240
4241rtx
4242get_first_nonparm_insn ()
4243{
4244  if (last_parm_insn)
4245    return NEXT_INSN (last_parm_insn);
4246  return get_insns ();
4247}
4248
4249/* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4250   Crash if there is none.  */
4251
4252rtx
4253get_first_block_beg ()
4254{
4255  rtx searcher;
4256  rtx insn = get_first_nonparm_insn ();
4257
4258  for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4259    if (GET_CODE (searcher) == NOTE
4260	&& NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4261      return searcher;
4262
4263  abort ();	/* Invalid call to this function.  (See comments above.)  */
4264  return NULL_RTX;
4265}
4266
4267/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4268   This means a type for which function calls must pass an address to the
4269   function or get an address back from the function.
4270   EXP may be a type node or an expression (whose type is tested).  */
4271
4272int
4273aggregate_value_p (exp)
4274     tree exp;
4275{
4276  int i, regno, nregs;
4277  rtx reg;
4278
4279  tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4280
4281  if (TREE_CODE (type) == VOID_TYPE)
4282    return 0;
4283  if (RETURN_IN_MEMORY (type))
4284    return 1;
4285  /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4286     and thus can't be returned in registers.  */
4287  if (TREE_ADDRESSABLE (type))
4288    return 1;
4289  if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4290    return 1;
4291  /* Make sure we have suitable call-clobbered regs to return
4292     the value in; if not, we must return it in memory.  */
4293  reg = hard_function_value (type, 0, 0);
4294
4295  /* If we have something other than a REG (e.g. a PARALLEL), then assume
4296     it is OK.  */
4297  if (GET_CODE (reg) != REG)
4298    return 0;
4299
4300  regno = REGNO (reg);
4301  nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4302  for (i = 0; i < nregs; i++)
4303    if (! call_used_regs[regno + i])
4304      return 1;
4305  return 0;
4306}
4307
4308/* Assign RTL expressions to the function's parameters.
4309   This may involve copying them into registers and using
4310   those registers as the RTL for them.  */
4311
4312void
4313assign_parms (fndecl)
4314     tree fndecl;
4315{
4316  tree parm;
4317  rtx entry_parm = 0;
4318  rtx stack_parm = 0;
4319  CUMULATIVE_ARGS args_so_far;
4320  enum machine_mode promoted_mode, passed_mode;
4321  enum machine_mode nominal_mode, promoted_nominal_mode;
4322  int unsignedp;
4323  /* Total space needed so far for args on the stack,
4324     given as a constant and a tree-expression.  */
4325  struct args_size stack_args_size;
4326  tree fntype = TREE_TYPE (fndecl);
4327  tree fnargs = DECL_ARGUMENTS (fndecl);
4328  /* This is used for the arg pointer when referring to stack args.  */
4329  rtx internal_arg_pointer;
4330  /* This is a dummy PARM_DECL that we used for the function result if
4331     the function returns a structure.  */
4332  tree function_result_decl = 0;
4333#ifdef SETUP_INCOMING_VARARGS
4334  int varargs_setup = 0;
4335#endif
4336  rtx conversion_insns = 0;
4337  struct args_size alignment_pad;
4338
4339  /* Nonzero if function takes extra anonymous args.
4340     This means the last named arg must be on the stack
4341     right before the anonymous ones.  */
4342  int stdarg
4343    = (TYPE_ARG_TYPES (fntype) != 0
4344       && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4345	   != void_type_node));
4346
4347  current_function_stdarg = stdarg;
4348
4349  /* If the reg that the virtual arg pointer will be translated into is
4350     not a fixed reg or is the stack pointer, make a copy of the virtual
4351     arg pointer, and address parms via the copy.  The frame pointer is
4352     considered fixed even though it is not marked as such.
4353
4354     The second time through, simply use ap to avoid generating rtx.  */
4355
4356  if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4357       || ! (fixed_regs[ARG_POINTER_REGNUM]
4358	     || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4359    internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4360  else
4361    internal_arg_pointer = virtual_incoming_args_rtx;
4362  current_function_internal_arg_pointer = internal_arg_pointer;
4363
4364  stack_args_size.constant = 0;
4365  stack_args_size.var = 0;
4366
4367  /* If struct value address is treated as the first argument, make it so.  */
4368  if (aggregate_value_p (DECL_RESULT (fndecl))
4369      && ! current_function_returns_pcc_struct
4370      && struct_value_incoming_rtx == 0)
4371    {
4372      tree type = build_pointer_type (TREE_TYPE (fntype));
4373
4374      function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4375
4376      DECL_ARG_TYPE (function_result_decl) = type;
4377      TREE_CHAIN (function_result_decl) = fnargs;
4378      fnargs = function_result_decl;
4379    }
4380
4381  max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4382  parm_reg_stack_loc = (rtx *) ggc_alloc_cleared (max_parm_reg * sizeof (rtx));
4383
4384#ifdef INIT_CUMULATIVE_INCOMING_ARGS
4385  INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4386#else
4387  INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4388#endif
4389
4390  /* We haven't yet found an argument that we must push and pretend the
4391     caller did.  */
4392  current_function_pretend_args_size = 0;
4393
4394  for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4395    {
4396      struct args_size stack_offset;
4397      struct args_size arg_size;
4398      int passed_pointer = 0;
4399      int did_conversion = 0;
4400      tree passed_type = DECL_ARG_TYPE (parm);
4401      tree nominal_type = TREE_TYPE (parm);
4402      int pretend_named;
4403      int last_named = 0, named_arg;
4404
4405      /* Set LAST_NAMED if this is last named arg before last
4406	 anonymous args.  */
4407      if (stdarg)
4408	{
4409	  tree tem;
4410
4411	  for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
4412	    if (DECL_NAME (tem))
4413	      break;
4414
4415	  if (tem == 0)
4416	    last_named = 1;
4417	}
4418      /* Set NAMED_ARG if this arg should be treated as a named arg.  For
4419	 most machines, if this is a varargs/stdarg function, then we treat
4420	 the last named arg as if it were anonymous too.  */
4421      named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4422
4423      if (TREE_TYPE (parm) == error_mark_node
4424	  /* This can happen after weird syntax errors
4425	     or if an enum type is defined among the parms.  */
4426	  || TREE_CODE (parm) != PARM_DECL
4427	  || passed_type == NULL)
4428	{
4429	  SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4430	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4431	  TREE_USED (parm) = 1;
4432	  continue;
4433	}
4434
4435      /* Find mode of arg as it is passed, and mode of arg
4436	 as it should be during execution of this function.  */
4437      passed_mode = TYPE_MODE (passed_type);
4438      nominal_mode = TYPE_MODE (nominal_type);
4439
4440      /* If the parm's mode is VOID, its value doesn't matter,
4441	 and avoid the usual things like emit_move_insn that could crash.  */
4442      if (nominal_mode == VOIDmode)
4443	{
4444	  SET_DECL_RTL (parm, const0_rtx);
4445	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4446	  continue;
4447	}
4448
4449      /* If the parm is to be passed as a transparent union, use the
4450	 type of the first field for the tests below.  We have already
4451	 verified that the modes are the same.  */
4452      if (DECL_TRANSPARENT_UNION (parm)
4453	  || (TREE_CODE (passed_type) == UNION_TYPE
4454	      && TYPE_TRANSPARENT_UNION (passed_type)))
4455	passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4456
4457      /* See if this arg was passed by invisible reference.  It is if
4458	 it is an object whose size depends on the contents of the
4459	 object itself or if the machine requires these objects be passed
4460	 that way.  */
4461
4462      if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4463	   && contains_placeholder_p (TYPE_SIZE (passed_type)))
4464	  || TREE_ADDRESSABLE (passed_type)
4465#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4466	  || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4467					      passed_type, named_arg)
4468#endif
4469	  )
4470	{
4471	  passed_type = nominal_type = build_pointer_type (passed_type);
4472	  passed_pointer = 1;
4473	  passed_mode = nominal_mode = Pmode;
4474	}
4475      /* See if the frontend wants to pass this by invisible reference.  */
4476      else if (passed_type != nominal_type
4477	       && POINTER_TYPE_P (passed_type)
4478	       && TREE_TYPE (passed_type) == nominal_type)
4479	{
4480	  nominal_type = passed_type;
4481	  passed_pointer = 1;
4482	  passed_mode = nominal_mode = Pmode;
4483	}
4484
4485      promoted_mode = passed_mode;
4486
4487#ifdef PROMOTE_FUNCTION_ARGS
4488      /* Compute the mode in which the arg is actually extended to.  */
4489      unsignedp = TREE_UNSIGNED (passed_type);
4490      promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4491#endif
4492
4493      /* Let machine desc say which reg (if any) the parm arrives in.
4494	 0 means it arrives on the stack.  */
4495#ifdef FUNCTION_INCOMING_ARG
4496      entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4497					  passed_type, named_arg);
4498#else
4499      entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4500				 passed_type, named_arg);
4501#endif
4502
4503      if (entry_parm == 0)
4504	promoted_mode = passed_mode;
4505
4506#ifdef SETUP_INCOMING_VARARGS
4507      /* If this is the last named parameter, do any required setup for
4508	 varargs or stdargs.  We need to know about the case of this being an
4509	 addressable type, in which case we skip the registers it
4510	 would have arrived in.
4511
4512	 For stdargs, LAST_NAMED will be set for two parameters, the one that
4513	 is actually the last named, and the dummy parameter.  We only
4514	 want to do this action once.
4515
4516	 Also, indicate when RTL generation is to be suppressed.  */
4517      if (last_named && !varargs_setup)
4518	{
4519	  SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4520				  current_function_pretend_args_size, 0);
4521	  varargs_setup = 1;
4522	}
4523#endif
4524
4525      /* Determine parm's home in the stack,
4526	 in case it arrives in the stack or we should pretend it did.
4527
4528	 Compute the stack position and rtx where the argument arrives
4529	 and its size.
4530
4531	 There is one complexity here:  If this was a parameter that would
4532	 have been passed in registers, but wasn't only because it is
4533	 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4534	 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4535	 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4536	 0 as it was the previous time.  */
4537
4538      pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4539      locate_and_pad_parm (promoted_mode, passed_type,
4540#ifdef STACK_PARMS_IN_REG_PARM_AREA
4541			   1,
4542#else
4543#ifdef FUNCTION_INCOMING_ARG
4544			   FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4545						  passed_type,
4546						  pretend_named) != 0,
4547#else
4548			   FUNCTION_ARG (args_so_far, promoted_mode,
4549					 passed_type,
4550					 pretend_named) != 0,
4551#endif
4552#endif
4553			   fndecl, &stack_args_size, &stack_offset, &arg_size,
4554			   &alignment_pad);
4555
4556      {
4557	rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4558
4559	if (offset_rtx == const0_rtx)
4560	  stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4561	else
4562	  stack_parm = gen_rtx_MEM (promoted_mode,
4563				    gen_rtx_PLUS (Pmode,
4564						  internal_arg_pointer,
4565						  offset_rtx));
4566
4567	set_mem_attributes (stack_parm, parm, 1);
4568      }
4569
4570      /* If this parameter was passed both in registers and in the stack,
4571	 use the copy on the stack.  */
4572      if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4573	entry_parm = 0;
4574
4575#ifdef FUNCTION_ARG_PARTIAL_NREGS
4576      /* If this parm was passed part in regs and part in memory,
4577	 pretend it arrived entirely in memory
4578	 by pushing the register-part onto the stack.
4579
4580	 In the special case of a DImode or DFmode that is split,
4581	 we could put it together in a pseudoreg directly,
4582	 but for now that's not worth bothering with.  */
4583
4584      if (entry_parm)
4585	{
4586	  int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4587						  passed_type, named_arg);
4588
4589	  if (nregs > 0)
4590	    {
4591#if defined (REG_PARM_STACK_SPACE) && !defined (MAYBE_REG_PARM_STACK_SPACE)
4592	      /* When REG_PARM_STACK_SPACE is nonzero, stack space for
4593		 split parameters was allocated by our caller, so we
4594		 won't be pushing it in the prolog.  */
4595	      if (REG_PARM_STACK_SPACE (fndecl) == 0)
4596#endif
4597	      current_function_pretend_args_size
4598		= (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4599		   / (PARM_BOUNDARY / BITS_PER_UNIT)
4600		   * (PARM_BOUNDARY / BITS_PER_UNIT));
4601
4602	      /* Handle calls that pass values in multiple non-contiguous
4603		 locations.  The Irix 6 ABI has examples of this.  */
4604	      if (GET_CODE (entry_parm) == PARALLEL)
4605		emit_group_store (validize_mem (stack_parm), entry_parm,
4606				  int_size_in_bytes (TREE_TYPE (parm)));
4607
4608	      else
4609		move_block_from_reg (REGNO (entry_parm),
4610				     validize_mem (stack_parm), nregs,
4611				     int_size_in_bytes (TREE_TYPE (parm)));
4612
4613	      entry_parm = stack_parm;
4614	    }
4615	}
4616#endif
4617
4618      /* If we didn't decide this parm came in a register,
4619	 by default it came on the stack.  */
4620      if (entry_parm == 0)
4621	entry_parm = stack_parm;
4622
4623      /* Record permanently how this parm was passed.  */
4624      DECL_INCOMING_RTL (parm) = entry_parm;
4625
4626      /* If there is actually space on the stack for this parm,
4627	 count it in stack_args_size; otherwise set stack_parm to 0
4628	 to indicate there is no preallocated stack slot for the parm.  */
4629
4630      if (entry_parm == stack_parm
4631	  || (GET_CODE (entry_parm) == PARALLEL
4632	      && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4633#if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4634	  /* On some machines, even if a parm value arrives in a register
4635	     there is still an (uninitialized) stack slot allocated for it.
4636
4637	     ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4638	     whether this parameter already has a stack slot allocated,
4639	     because an arg block exists only if current_function_args_size
4640	     is larger than some threshold, and we haven't calculated that
4641	     yet.  So, for now, we just assume that stack slots never exist
4642	     in this case.  */
4643	  || REG_PARM_STACK_SPACE (fndecl) > 0
4644#endif
4645	  )
4646	{
4647	  stack_args_size.constant += arg_size.constant;
4648	  if (arg_size.var)
4649	    ADD_PARM_SIZE (stack_args_size, arg_size.var);
4650	}
4651      else
4652	/* No stack slot was pushed for this parm.  */
4653	stack_parm = 0;
4654
4655      /* Update info on where next arg arrives in registers.  */
4656
4657      FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4658			    passed_type, named_arg);
4659
4660      /* If we can't trust the parm stack slot to be aligned enough
4661	 for its ultimate type, don't use that slot after entry.
4662	 We'll make another stack slot, if we need one.  */
4663      {
4664	unsigned int thisparm_boundary
4665	  = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4666
4667	if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4668	  stack_parm = 0;
4669      }
4670
4671      /* If parm was passed in memory, and we need to convert it on entry,
4672	 don't store it back in that same slot.  */
4673      if (entry_parm != 0
4674	  && nominal_mode != BLKmode && nominal_mode != passed_mode)
4675	stack_parm = 0;
4676
4677      /* When an argument is passed in multiple locations, we can't
4678	 make use of this information, but we can save some copying if
4679	 the whole argument is passed in a single register.  */
4680      if (GET_CODE (entry_parm) == PARALLEL
4681	  && nominal_mode != BLKmode && passed_mode != BLKmode)
4682	{
4683	  int i, len = XVECLEN (entry_parm, 0);
4684
4685	  for (i = 0; i < len; i++)
4686	    if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4687		&& GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4688		&& (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4689		    == passed_mode)
4690		&& INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4691	      {
4692		entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4693		DECL_INCOMING_RTL (parm) = entry_parm;
4694		break;
4695	      }
4696	}
4697
4698      /* ENTRY_PARM is an RTX for the parameter as it arrives,
4699	 in the mode in which it arrives.
4700	 STACK_PARM is an RTX for a stack slot where the parameter can live
4701	 during the function (in case we want to put it there).
4702	 STACK_PARM is 0 if no stack slot was pushed for it.
4703
4704	 Now output code if necessary to convert ENTRY_PARM to
4705	 the type in which this function declares it,
4706	 and store that result in an appropriate place,
4707	 which may be a pseudo reg, may be STACK_PARM,
4708	 or may be a local stack slot if STACK_PARM is 0.
4709
4710	 Set DECL_RTL to that place.  */
4711
4712      if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4713	{
4714	  /* If a BLKmode arrives in registers, copy it to a stack slot.
4715	     Handle calls that pass values in multiple non-contiguous
4716	     locations.  The Irix 6 ABI has examples of this.  */
4717	  if (GET_CODE (entry_parm) == REG
4718	      || GET_CODE (entry_parm) == PARALLEL)
4719	    {
4720	      int size_stored
4721		= CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4722			      UNITS_PER_WORD);
4723
4724	      /* Note that we will be storing an integral number of words.
4725		 So we have to be careful to ensure that we allocate an
4726		 integral number of words.  We do this below in the
4727		 assign_stack_local if space was not allocated in the argument
4728		 list.  If it was, this will not work if PARM_BOUNDARY is not
4729		 a multiple of BITS_PER_WORD.  It isn't clear how to fix this
4730		 if it becomes a problem.  */
4731
4732	      if (stack_parm == 0)
4733		{
4734		  stack_parm
4735		    = assign_stack_local (GET_MODE (entry_parm),
4736					  size_stored, 0);
4737		  set_mem_attributes (stack_parm, parm, 1);
4738		}
4739
4740	      else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4741		abort ();
4742
4743	      /* Handle calls that pass values in multiple non-contiguous
4744		 locations.  The Irix 6 ABI has examples of this.  */
4745	      if (GET_CODE (entry_parm) == PARALLEL)
4746		emit_group_store (validize_mem (stack_parm), entry_parm,
4747				  int_size_in_bytes (TREE_TYPE (parm)));
4748	      else
4749		move_block_from_reg (REGNO (entry_parm),
4750				     validize_mem (stack_parm),
4751				     size_stored / UNITS_PER_WORD,
4752				     int_size_in_bytes (TREE_TYPE (parm)));
4753	    }
4754	  SET_DECL_RTL (parm, stack_parm);
4755	}
4756      else if (! ((! optimize
4757		   && ! DECL_REGISTER (parm))
4758		  || TREE_SIDE_EFFECTS (parm)
4759		  /* If -ffloat-store specified, don't put explicit
4760		     float variables into registers.  */
4761		  || (flag_float_store
4762		      && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4763	       /* Always assign pseudo to structure return or item passed
4764		  by invisible reference.  */
4765	       || passed_pointer || parm == function_result_decl)
4766	{
4767	  /* Store the parm in a pseudoregister during the function, but we
4768	     may need to do it in a wider mode.  */
4769
4770	  rtx parmreg;
4771	  unsigned int regno, regnoi = 0, regnor = 0;
4772
4773	  unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4774
4775	  promoted_nominal_mode
4776	    = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4777
4778	  parmreg = gen_reg_rtx (promoted_nominal_mode);
4779	  mark_user_reg (parmreg);
4780
4781	  /* If this was an item that we received a pointer to, set DECL_RTL
4782	     appropriately.  */
4783	  if (passed_pointer)
4784	    {
4785	      rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4786				   parmreg);
4787	      set_mem_attributes (x, parm, 1);
4788	      SET_DECL_RTL (parm, x);
4789	    }
4790	  else
4791	    {
4792	      SET_DECL_RTL (parm, parmreg);
4793	      maybe_set_unchanging (DECL_RTL (parm), parm);
4794	    }
4795
4796	  /* Copy the value into the register.  */
4797	  if (nominal_mode != passed_mode
4798	      || promoted_nominal_mode != promoted_mode)
4799	    {
4800	      int save_tree_used;
4801	      /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4802		 mode, by the caller.  We now have to convert it to
4803		 NOMINAL_MODE, if different.  However, PARMREG may be in
4804		 a different mode than NOMINAL_MODE if it is being stored
4805		 promoted.
4806
4807		 If ENTRY_PARM is a hard register, it might be in a register
4808		 not valid for operating in its mode (e.g., an odd-numbered
4809		 register for a DFmode).  In that case, moves are the only
4810		 thing valid, so we can't do a convert from there.  This
4811		 occurs when the calling sequence allow such misaligned
4812		 usages.
4813
4814		 In addition, the conversion may involve a call, which could
4815		 clobber parameters which haven't been copied to pseudo
4816		 registers yet.  Therefore, we must first copy the parm to
4817		 a pseudo reg here, and save the conversion until after all
4818		 parameters have been moved.  */
4819
4820	      rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4821
4822	      emit_move_insn (tempreg, validize_mem (entry_parm));
4823
4824	      push_to_sequence (conversion_insns);
4825	      tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4826
4827	      if (GET_CODE (tempreg) == SUBREG
4828		  && GET_MODE (tempreg) == nominal_mode
4829		  && GET_CODE (SUBREG_REG (tempreg)) == REG
4830		  && nominal_mode == passed_mode
4831		  && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4832		  && GET_MODE_SIZE (GET_MODE (tempreg))
4833		     < GET_MODE_SIZE (GET_MODE (entry_parm)))
4834		{
4835		  /* The argument is already sign/zero extended, so note it
4836		     into the subreg.  */
4837		  SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4838		  SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
4839		}
4840
4841	      /* TREE_USED gets set erroneously during expand_assignment.  */
4842	      save_tree_used = TREE_USED (parm);
4843	      expand_assignment (parm,
4844				 make_tree (nominal_type, tempreg), 0, 0);
4845	      TREE_USED (parm) = save_tree_used;
4846	      conversion_insns = get_insns ();
4847	      did_conversion = 1;
4848	      end_sequence ();
4849	    }
4850	  else
4851	    emit_move_insn (parmreg, validize_mem (entry_parm));
4852
4853	  /* If we were passed a pointer but the actual value
4854	     can safely live in a register, put it in one.  */
4855	  if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4856	      /* If by-reference argument was promoted, demote it.  */
4857	      && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
4858		  || ! ((! optimize
4859			 && ! DECL_REGISTER (parm))
4860			|| TREE_SIDE_EFFECTS (parm)
4861			/* If -ffloat-store specified, don't put explicit
4862			   float variables into registers.  */
4863			|| (flag_float_store
4864			    && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))))
4865	    {
4866	      /* We can't use nominal_mode, because it will have been set to
4867		 Pmode above.  We must use the actual mode of the parm.  */
4868	      parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4869	      mark_user_reg (parmreg);
4870	      if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4871		{
4872		  rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4873		  int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
4874		  push_to_sequence (conversion_insns);
4875		  emit_move_insn (tempreg, DECL_RTL (parm));
4876		  SET_DECL_RTL (parm,
4877				convert_to_mode (GET_MODE (parmreg),
4878						 tempreg,
4879						 unsigned_p));
4880		  emit_move_insn (parmreg, DECL_RTL (parm));
4881		  conversion_insns = get_insns();
4882		  did_conversion = 1;
4883		  end_sequence ();
4884		}
4885	      else
4886		emit_move_insn (parmreg, DECL_RTL (parm));
4887	      SET_DECL_RTL (parm, parmreg);
4888	      /* STACK_PARM is the pointer, not the parm, and PARMREG is
4889		 now the parm.  */
4890	      stack_parm = 0;
4891	    }
4892#ifdef FUNCTION_ARG_CALLEE_COPIES
4893	  /* If we are passed an arg by reference and it is our responsibility
4894	     to make a copy, do it now.
4895	     PASSED_TYPE and PASSED mode now refer to the pointer, not the
4896	     original argument, so we must recreate them in the call to
4897	     FUNCTION_ARG_CALLEE_COPIES.  */
4898	  /* ??? Later add code to handle the case that if the argument isn't
4899	     modified, don't do the copy.  */
4900
4901	  else if (passed_pointer
4902		   && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4903						  TYPE_MODE (DECL_ARG_TYPE (parm)),
4904						  DECL_ARG_TYPE (parm),
4905						  named_arg)
4906		   && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4907	    {
4908	      rtx copy;
4909	      tree type = DECL_ARG_TYPE (parm);
4910
4911	      /* This sequence may involve a library call perhaps clobbering
4912		 registers that haven't been copied to pseudos yet.  */
4913
4914	      push_to_sequence (conversion_insns);
4915
4916	      if (!COMPLETE_TYPE_P (type)
4917		  || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4918		/* This is a variable sized object.  */
4919		copy = gen_rtx_MEM (BLKmode,
4920				    allocate_dynamic_stack_space
4921				    (expr_size (parm), NULL_RTX,
4922				     TYPE_ALIGN (type)));
4923	      else
4924		copy = assign_stack_temp (TYPE_MODE (type),
4925					  int_size_in_bytes (type), 1);
4926	      set_mem_attributes (copy, parm, 1);
4927
4928	      store_expr (parm, copy, 0);
4929	      emit_move_insn (parmreg, XEXP (copy, 0));
4930	      conversion_insns = get_insns ();
4931	      did_conversion = 1;
4932	      end_sequence ();
4933	    }
4934#endif /* FUNCTION_ARG_CALLEE_COPIES */
4935
4936	  /* In any case, record the parm's desired stack location
4937	     in case we later discover it must live in the stack.
4938
4939	     If it is a COMPLEX value, store the stack location for both
4940	     halves.  */
4941
4942	  if (GET_CODE (parmreg) == CONCAT)
4943	    regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4944	  else
4945	    regno = REGNO (parmreg);
4946
4947	  if (regno >= max_parm_reg)
4948	    {
4949	      rtx *new;
4950	      int old_max_parm_reg = max_parm_reg;
4951
4952	      /* It's slow to expand this one register at a time,
4953		 but it's also rare and we need max_parm_reg to be
4954		 precisely correct.  */
4955	      max_parm_reg = regno + 1;
4956	      new = (rtx *) ggc_realloc (parm_reg_stack_loc,
4957				      max_parm_reg * sizeof (rtx));
4958	      memset ((char *) (new + old_max_parm_reg), 0,
4959		     (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4960	      parm_reg_stack_loc = new;
4961	    }
4962
4963	  if (GET_CODE (parmreg) == CONCAT)
4964	    {
4965	      enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4966
4967	      regnor = REGNO (gen_realpart (submode, parmreg));
4968	      regnoi = REGNO (gen_imagpart (submode, parmreg));
4969
4970	      if (stack_parm != 0)
4971		{
4972		  parm_reg_stack_loc[regnor]
4973		    = gen_realpart (submode, stack_parm);
4974		  parm_reg_stack_loc[regnoi]
4975		    = gen_imagpart (submode, stack_parm);
4976		}
4977	      else
4978		{
4979		  parm_reg_stack_loc[regnor] = 0;
4980		  parm_reg_stack_loc[regnoi] = 0;
4981		}
4982	    }
4983	  else
4984	    parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4985
4986	  /* Mark the register as eliminable if we did no conversion
4987	     and it was copied from memory at a fixed offset,
4988	     and the arg pointer was not copied to a pseudo-reg.
4989	     If the arg pointer is a pseudo reg or the offset formed
4990	     an invalid address, such memory-equivalences
4991	     as we make here would screw up life analysis for it.  */
4992	  if (nominal_mode == passed_mode
4993	      && ! did_conversion
4994	      && stack_parm != 0
4995	      && GET_CODE (stack_parm) == MEM
4996	      && stack_offset.var == 0
4997	      && reg_mentioned_p (virtual_incoming_args_rtx,
4998				  XEXP (stack_parm, 0)))
4999	    {
5000	      rtx linsn = get_last_insn ();
5001	      rtx sinsn, set;
5002
5003	      /* Mark complex types separately.  */
5004	      if (GET_CODE (parmreg) == CONCAT)
5005		/* Scan backwards for the set of the real and
5006		   imaginary parts.  */
5007		for (sinsn = linsn; sinsn != 0;
5008		     sinsn = prev_nonnote_insn (sinsn))
5009		  {
5010		    set = single_set (sinsn);
5011		    if (set != 0
5012			&& SET_DEST (set) == regno_reg_rtx [regnoi])
5013		      REG_NOTES (sinsn)
5014			= gen_rtx_EXPR_LIST (REG_EQUIV,
5015					     parm_reg_stack_loc[regnoi],
5016					     REG_NOTES (sinsn));
5017		    else if (set != 0
5018			     && SET_DEST (set) == regno_reg_rtx [regnor])
5019		      REG_NOTES (sinsn)
5020			= gen_rtx_EXPR_LIST (REG_EQUIV,
5021					     parm_reg_stack_loc[regnor],
5022					     REG_NOTES (sinsn));
5023		  }
5024	      else if ((set = single_set (linsn)) != 0
5025		       && SET_DEST (set) == parmreg)
5026		REG_NOTES (linsn)
5027		  = gen_rtx_EXPR_LIST (REG_EQUIV,
5028				       stack_parm, REG_NOTES (linsn));
5029	    }
5030
5031	  /* For pointer data type, suggest pointer register.  */
5032	  if (POINTER_TYPE_P (TREE_TYPE (parm)))
5033	    mark_reg_pointer (parmreg,
5034			      TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5035
5036	  /* If something wants our address, try to use ADDRESSOF.  */
5037	  if (TREE_ADDRESSABLE (parm))
5038	    {
5039	      /* If we end up putting something into the stack,
5040		 fixup_var_refs_insns will need to make a pass over
5041		 all the instructions.  It looks through the pending
5042		 sequences -- but it can't see the ones in the
5043		 CONVERSION_INSNS, if they're not on the sequence
5044		 stack.  So, we go back to that sequence, just so that
5045		 the fixups will happen.  */
5046	      push_to_sequence (conversion_insns);
5047	      put_var_into_stack (parm, /*rescan=*/true);
5048	      conversion_insns = get_insns ();
5049	      end_sequence ();
5050	    }
5051	}
5052      else
5053	{
5054	  /* Value must be stored in the stack slot STACK_PARM
5055	     during function execution.  */
5056
5057	  if (promoted_mode != nominal_mode)
5058	    {
5059	      /* Conversion is required.  */
5060	      rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
5061
5062	      emit_move_insn (tempreg, validize_mem (entry_parm));
5063
5064	      push_to_sequence (conversion_insns);
5065	      entry_parm = convert_to_mode (nominal_mode, tempreg,
5066					    TREE_UNSIGNED (TREE_TYPE (parm)));
5067	      if (stack_parm)
5068		/* ??? This may need a big-endian conversion on sparc64.  */
5069		stack_parm = adjust_address (stack_parm, nominal_mode, 0);
5070
5071	      conversion_insns = get_insns ();
5072	      did_conversion = 1;
5073	      end_sequence ();
5074	    }
5075
5076	  if (entry_parm != stack_parm)
5077	    {
5078	      if (stack_parm == 0)
5079		{
5080		  stack_parm
5081		    = assign_stack_local (GET_MODE (entry_parm),
5082					  GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
5083		  set_mem_attributes (stack_parm, parm, 1);
5084		}
5085
5086	      if (promoted_mode != nominal_mode)
5087		{
5088		  push_to_sequence (conversion_insns);
5089		  emit_move_insn (validize_mem (stack_parm),
5090				  validize_mem (entry_parm));
5091		  conversion_insns = get_insns ();
5092		  end_sequence ();
5093		}
5094	      else
5095		emit_move_insn (validize_mem (stack_parm),
5096				validize_mem (entry_parm));
5097	    }
5098
5099	  SET_DECL_RTL (parm, stack_parm);
5100	}
5101
5102      /* If this "parameter" was the place where we are receiving the
5103	 function's incoming structure pointer, set up the result.  */
5104      if (parm == function_result_decl)
5105	{
5106	  tree result = DECL_RESULT (fndecl);
5107	  rtx addr = DECL_RTL (parm);
5108	  rtx x;
5109
5110#ifdef POINTERS_EXTEND_UNSIGNED
5111	  if (GET_MODE (addr) != Pmode)
5112	    addr = convert_memory_address (Pmode, addr);
5113#endif
5114
5115	  x = gen_rtx_MEM (DECL_MODE (result), addr);
5116	  set_mem_attributes (x, result, 1);
5117	  SET_DECL_RTL (result, x);
5118	}
5119
5120      if (GET_CODE (DECL_RTL (parm)) == REG)
5121	REGNO_DECL (REGNO (DECL_RTL (parm))) = parm;
5122      else if (GET_CODE (DECL_RTL (parm)) == CONCAT)
5123	{
5124	  REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 0))) = parm;
5125	  REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 1))) = parm;
5126	}
5127
5128    }
5129
5130  /* Output all parameter conversion instructions (possibly including calls)
5131     now that all parameters have been copied out of hard registers.  */
5132  emit_insn (conversion_insns);
5133
5134  last_parm_insn = get_last_insn ();
5135
5136  current_function_args_size = stack_args_size.constant;
5137
5138  /* Adjust function incoming argument size for alignment and
5139     minimum length.  */
5140
5141#ifdef REG_PARM_STACK_SPACE
5142#ifndef MAYBE_REG_PARM_STACK_SPACE
5143  current_function_args_size = MAX (current_function_args_size,
5144				    REG_PARM_STACK_SPACE (fndecl));
5145#endif
5146#endif
5147
5148#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5149
5150  current_function_args_size
5151    = ((current_function_args_size + STACK_BYTES - 1)
5152       / STACK_BYTES) * STACK_BYTES;
5153
5154#ifdef ARGS_GROW_DOWNWARD
5155  current_function_arg_offset_rtx
5156    = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5157       : expand_expr (size_diffop (stack_args_size.var,
5158				   size_int (-stack_args_size.constant)),
5159		      NULL_RTX, VOIDmode, 0));
5160#else
5161  current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5162#endif
5163
5164  /* See how many bytes, if any, of its args a function should try to pop
5165     on return.  */
5166
5167  current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5168						 current_function_args_size);
5169
5170  /* For stdarg.h function, save info about
5171     regs and stack space used by the named args.  */
5172
5173  current_function_args_info = args_so_far;
5174
5175  /* Set the rtx used for the function return value.  Put this in its
5176     own variable so any optimizers that need this information don't have
5177     to include tree.h.  Do this here so it gets done when an inlined
5178     function gets output.  */
5179
5180  current_function_return_rtx
5181    = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5182       ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5183
5184  /* If scalar return value was computed in a pseudo-reg, or was a named
5185     return value that got dumped to the stack, copy that to the hard
5186     return register.  */
5187  if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
5188    {
5189      tree decl_result = DECL_RESULT (fndecl);
5190      rtx decl_rtl = DECL_RTL (decl_result);
5191
5192      if (REG_P (decl_rtl)
5193	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5194	  : DECL_REGISTER (decl_result))
5195	{
5196	  rtx real_decl_rtl;
5197
5198#ifdef FUNCTION_OUTGOING_VALUE
5199	  real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
5200						   fndecl);
5201#else
5202	  real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
5203					  fndecl);
5204#endif
5205	  REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
5206	  /* The delay slot scheduler assumes that current_function_return_rtx
5207	     holds the hard register containing the return value, not a
5208	     temporary pseudo.  */
5209	  current_function_return_rtx = real_decl_rtl;
5210	}
5211    }
5212}
5213
5214/* Indicate whether REGNO is an incoming argument to the current function
5215   that was promoted to a wider mode.  If so, return the RTX for the
5216   register (to get its mode).  PMODE and PUNSIGNEDP are set to the mode
5217   that REGNO is promoted from and whether the promotion was signed or
5218   unsigned.  */
5219
5220#ifdef PROMOTE_FUNCTION_ARGS
5221
5222rtx
5223promoted_input_arg (regno, pmode, punsignedp)
5224     unsigned int regno;
5225     enum machine_mode *pmode;
5226     int *punsignedp;
5227{
5228  tree arg;
5229
5230  for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5231       arg = TREE_CHAIN (arg))
5232    if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5233	&& REGNO (DECL_INCOMING_RTL (arg)) == regno
5234	&& TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5235      {
5236	enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5237	int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5238
5239	mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5240	if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5241	    && mode != DECL_MODE (arg))
5242	  {
5243	    *pmode = DECL_MODE (arg);
5244	    *punsignedp = unsignedp;
5245	    return DECL_INCOMING_RTL (arg);
5246	  }
5247      }
5248
5249  return 0;
5250}
5251
5252#endif
5253
5254/* Compute the size and offset from the start of the stacked arguments for a
5255   parm passed in mode PASSED_MODE and with type TYPE.
5256
5257   INITIAL_OFFSET_PTR points to the current offset into the stacked
5258   arguments.
5259
5260   The starting offset and size for this parm are returned in *OFFSET_PTR
5261   and *ARG_SIZE_PTR, respectively.
5262
5263   IN_REGS is nonzero if the argument will be passed in registers.  It will
5264   never be set if REG_PARM_STACK_SPACE is not defined.
5265
5266   FNDECL is the function in which the argument was defined.
5267
5268   There are two types of rounding that are done.  The first, controlled by
5269   FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5270   list to be aligned to the specific boundary (in bits).  This rounding
5271   affects the initial and starting offsets, but not the argument size.
5272
5273   The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5274   optionally rounds the size of the parm to PARM_BOUNDARY.  The
5275   initial offset is not affected by this rounding, while the size always
5276   is and the starting offset may be.  */
5277
5278/*  offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5279    initial_offset_ptr is positive because locate_and_pad_parm's
5280    callers pass in the total size of args so far as
5281    initial_offset_ptr. arg_size_ptr is always positive.  */
5282
5283void
5284locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5285		     initial_offset_ptr, offset_ptr, arg_size_ptr,
5286		     alignment_pad)
5287     enum machine_mode passed_mode;
5288     tree type;
5289     int in_regs ATTRIBUTE_UNUSED;
5290     tree fndecl ATTRIBUTE_UNUSED;
5291     struct args_size *initial_offset_ptr;
5292     struct args_size *offset_ptr;
5293     struct args_size *arg_size_ptr;
5294     struct args_size *alignment_pad;
5295
5296{
5297  tree sizetree
5298    = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5299  enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5300  int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5301#ifdef ARGS_GROW_DOWNWARD
5302  tree s2 = sizetree;
5303#endif
5304
5305#ifdef REG_PARM_STACK_SPACE
5306  /* If we have found a stack parm before we reach the end of the
5307     area reserved for registers, skip that area.  */
5308  if (! in_regs)
5309    {
5310      int reg_parm_stack_space = 0;
5311
5312#ifdef MAYBE_REG_PARM_STACK_SPACE
5313      reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5314#else
5315      reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5316#endif
5317      if (reg_parm_stack_space > 0)
5318	{
5319	  if (initial_offset_ptr->var)
5320	    {
5321	      initial_offset_ptr->var
5322		= size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5323			      ssize_int (reg_parm_stack_space));
5324	      initial_offset_ptr->constant = 0;
5325	    }
5326	  else if (initial_offset_ptr->constant < reg_parm_stack_space)
5327	    initial_offset_ptr->constant = reg_parm_stack_space;
5328	}
5329    }
5330#endif /* REG_PARM_STACK_SPACE */
5331
5332  arg_size_ptr->var = 0;
5333  arg_size_ptr->constant = 0;
5334  alignment_pad->var = 0;
5335  alignment_pad->constant = 0;
5336
5337#ifdef ARGS_GROW_DOWNWARD
5338  if (initial_offset_ptr->var)
5339    {
5340      offset_ptr->constant = 0;
5341      offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5342				    initial_offset_ptr->var);
5343    }
5344  else
5345    {
5346      offset_ptr->constant = -initial_offset_ptr->constant;
5347      offset_ptr->var = 0;
5348    }
5349
5350  if (where_pad != none
5351      && (!host_integerp (sizetree, 1)
5352	  || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5353    s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
5354  SUB_PARM_SIZE (*offset_ptr, s2);
5355
5356  if (!in_regs
5357#ifdef REG_PARM_STACK_SPACE
5358      || REG_PARM_STACK_SPACE (fndecl) > 0
5359#endif
5360     )
5361    pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5362
5363  if (initial_offset_ptr->var)
5364    arg_size_ptr->var = size_binop (MINUS_EXPR,
5365				    size_binop (MINUS_EXPR,
5366						ssize_int (0),
5367						initial_offset_ptr->var),
5368				    offset_ptr->var);
5369
5370  else
5371    arg_size_ptr->constant = (-initial_offset_ptr->constant
5372			      - offset_ptr->constant);
5373
5374  /* Pad_below needs the pre-rounded size to know how much to pad below.
5375     We only pad parameters which are not in registers as they have their
5376     padding done elsewhere.  */
5377  if (where_pad == downward
5378      && !in_regs)
5379    pad_below (offset_ptr, passed_mode, sizetree);
5380
5381#else /* !ARGS_GROW_DOWNWARD */
5382  if (!in_regs
5383#ifdef REG_PARM_STACK_SPACE
5384      || REG_PARM_STACK_SPACE (fndecl) > 0
5385#endif
5386      )
5387    pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5388  *offset_ptr = *initial_offset_ptr;
5389
5390#ifdef PUSH_ROUNDING
5391  if (passed_mode != BLKmode)
5392    sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5393#endif
5394
5395  /* Pad_below needs the pre-rounded size to know how much to pad below
5396     so this must be done before rounding up.  */
5397  if (where_pad == downward
5398    /* However, BLKmode args passed in regs have their padding done elsewhere.
5399       The stack slot must be able to hold the entire register.  */
5400      && !(in_regs && passed_mode == BLKmode))
5401    pad_below (offset_ptr, passed_mode, sizetree);
5402
5403  if (where_pad != none
5404      && (!host_integerp (sizetree, 1)
5405	  || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5406    sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5407
5408  ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5409#endif /* ARGS_GROW_DOWNWARD */
5410}
5411
5412/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5413   BOUNDARY is measured in bits, but must be a multiple of a storage unit.  */
5414
5415static void
5416pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5417     struct args_size *offset_ptr;
5418     int boundary;
5419     struct args_size *alignment_pad;
5420{
5421  tree save_var = NULL_TREE;
5422  HOST_WIDE_INT save_constant = 0;
5423
5424  int boundary_in_bytes = boundary / BITS_PER_UNIT;
5425
5426  if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5427    {
5428      save_var = offset_ptr->var;
5429      save_constant = offset_ptr->constant;
5430    }
5431
5432  alignment_pad->var = NULL_TREE;
5433  alignment_pad->constant = 0;
5434
5435  if (boundary > BITS_PER_UNIT)
5436    {
5437      if (offset_ptr->var)
5438	{
5439	  offset_ptr->var =
5440#ifdef ARGS_GROW_DOWNWARD
5441	    round_down
5442#else
5443	    round_up
5444#endif
5445	      (ARGS_SIZE_TREE (*offset_ptr),
5446	       boundary / BITS_PER_UNIT);
5447	  offset_ptr->constant = 0; /*?*/
5448	  if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5449	    alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5450					     save_var);
5451	}
5452      else
5453	{
5454	  offset_ptr->constant =
5455#ifdef ARGS_GROW_DOWNWARD
5456	    FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5457#else
5458	    CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5459#endif
5460	    if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5461	      alignment_pad->constant = offset_ptr->constant - save_constant;
5462	}
5463    }
5464}
5465
5466static void
5467pad_below (offset_ptr, passed_mode, sizetree)
5468     struct args_size *offset_ptr;
5469     enum machine_mode passed_mode;
5470     tree sizetree;
5471{
5472  if (passed_mode != BLKmode)
5473    {
5474      if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5475	offset_ptr->constant
5476	  += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5477	       / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5478	      - GET_MODE_SIZE (passed_mode));
5479    }
5480  else
5481    {
5482      if (TREE_CODE (sizetree) != INTEGER_CST
5483	  || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5484	{
5485	  /* Round the size up to multiple of PARM_BOUNDARY bits.  */
5486	  tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5487	  /* Add it in.  */
5488	  ADD_PARM_SIZE (*offset_ptr, s2);
5489	  SUB_PARM_SIZE (*offset_ptr, sizetree);
5490	}
5491    }
5492}
5493
5494/* Walk the tree of blocks describing the binding levels within a function
5495   and warn about uninitialized variables.
5496   This is done after calling flow_analysis and before global_alloc
5497   clobbers the pseudo-regs to hard regs.  */
5498
5499void
5500uninitialized_vars_warning (block)
5501     tree block;
5502{
5503  tree decl, sub;
5504  for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5505    {
5506      if (warn_uninitialized
5507	  && TREE_CODE (decl) == VAR_DECL
5508	  /* These warnings are unreliable for and aggregates
5509	     because assigning the fields one by one can fail to convince
5510	     flow.c that the entire aggregate was initialized.
5511	     Unions are troublesome because members may be shorter.  */
5512	  && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5513	  && DECL_RTL (decl) != 0
5514	  && GET_CODE (DECL_RTL (decl)) == REG
5515	  /* Global optimizations can make it difficult to determine if a
5516	     particular variable has been initialized.  However, a VAR_DECL
5517	     with a nonzero DECL_INITIAL had an initializer, so do not
5518	     claim it is potentially uninitialized.
5519
5520	     We do not care about the actual value in DECL_INITIAL, so we do
5521	     not worry that it may be a dangling pointer.  */
5522	  && DECL_INITIAL (decl) == NULL_TREE
5523	  && regno_uninitialized (REGNO (DECL_RTL (decl))))
5524	warning_with_decl (decl,
5525			   "`%s' might be used uninitialized in this function");
5526      if (extra_warnings
5527	  && TREE_CODE (decl) == VAR_DECL
5528	  && DECL_RTL (decl) != 0
5529	  && GET_CODE (DECL_RTL (decl)) == REG
5530	  && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5531	warning_with_decl (decl,
5532			   "variable `%s' might be clobbered by `longjmp' or `vfork'");
5533    }
5534  for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5535    uninitialized_vars_warning (sub);
5536}
5537
5538/* Do the appropriate part of uninitialized_vars_warning
5539   but for arguments instead of local variables.  */
5540
5541void
5542setjmp_args_warning ()
5543{
5544  tree decl;
5545  for (decl = DECL_ARGUMENTS (current_function_decl);
5546       decl; decl = TREE_CHAIN (decl))
5547    if (DECL_RTL (decl) != 0
5548	&& GET_CODE (DECL_RTL (decl)) == REG
5549	&& regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5550      warning_with_decl (decl,
5551			 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5552}
5553
5554/* If this function call setjmp, put all vars into the stack
5555   unless they were declared `register'.  */
5556
5557void
5558setjmp_protect (block)
5559     tree block;
5560{
5561  tree decl, sub;
5562  for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5563    if ((TREE_CODE (decl) == VAR_DECL
5564	 || TREE_CODE (decl) == PARM_DECL)
5565	&& DECL_RTL (decl) != 0
5566	&& (GET_CODE (DECL_RTL (decl)) == REG
5567	    || (GET_CODE (DECL_RTL (decl)) == MEM
5568		&& GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5569	/* If this variable came from an inline function, it must be
5570	   that its life doesn't overlap the setjmp.  If there was a
5571	   setjmp in the function, it would already be in memory.  We
5572	   must exclude such variable because their DECL_RTL might be
5573	   set to strange things such as virtual_stack_vars_rtx.  */
5574	&& ! DECL_FROM_INLINE (decl)
5575	&& (
5576#ifdef NON_SAVING_SETJMP
5577	    /* If longjmp doesn't restore the registers,
5578	       don't put anything in them.  */
5579	    NON_SAVING_SETJMP
5580	    ||
5581#endif
5582	    ! DECL_REGISTER (decl)))
5583      put_var_into_stack (decl, /*rescan=*/true);
5584  for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5585    setjmp_protect (sub);
5586}
5587
5588/* Like the previous function, but for args instead of local variables.  */
5589
5590void
5591setjmp_protect_args ()
5592{
5593  tree decl;
5594  for (decl = DECL_ARGUMENTS (current_function_decl);
5595       decl; decl = TREE_CHAIN (decl))
5596    if ((TREE_CODE (decl) == VAR_DECL
5597	 || TREE_CODE (decl) == PARM_DECL)
5598	&& DECL_RTL (decl) != 0
5599	&& (GET_CODE (DECL_RTL (decl)) == REG
5600	    || (GET_CODE (DECL_RTL (decl)) == MEM
5601		&& GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5602	&& (
5603	    /* If longjmp doesn't restore the registers,
5604	       don't put anything in them.  */
5605#ifdef NON_SAVING_SETJMP
5606	    NON_SAVING_SETJMP
5607	    ||
5608#endif
5609	    ! DECL_REGISTER (decl)))
5610      put_var_into_stack (decl, /*rescan=*/true);
5611}
5612
5613/* Return the context-pointer register corresponding to DECL,
5614   or 0 if it does not need one.  */
5615
5616rtx
5617lookup_static_chain (decl)
5618     tree decl;
5619{
5620  tree context = decl_function_context (decl);
5621  tree link;
5622
5623  if (context == 0
5624      || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5625    return 0;
5626
5627  /* We treat inline_function_decl as an alias for the current function
5628     because that is the inline function whose vars, types, etc.
5629     are being merged into the current function.
5630     See expand_inline_function.  */
5631  if (context == current_function_decl || context == inline_function_decl)
5632    return virtual_stack_vars_rtx;
5633
5634  for (link = context_display; link; link = TREE_CHAIN (link))
5635    if (TREE_PURPOSE (link) == context)
5636      return RTL_EXPR_RTL (TREE_VALUE (link));
5637
5638  abort ();
5639}
5640
5641/* Convert a stack slot address ADDR for variable VAR
5642   (from a containing function)
5643   into an address valid in this function (using a static chain).  */
5644
5645rtx
5646fix_lexical_addr (addr, var)
5647     rtx addr;
5648     tree var;
5649{
5650  rtx basereg;
5651  HOST_WIDE_INT displacement;
5652  tree context = decl_function_context (var);
5653  struct function *fp;
5654  rtx base = 0;
5655
5656  /* If this is the present function, we need not do anything.  */
5657  if (context == current_function_decl || context == inline_function_decl)
5658    return addr;
5659
5660  fp = find_function_data (context);
5661
5662  if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5663    addr = XEXP (XEXP (addr, 0), 0);
5664
5665  /* Decode given address as base reg plus displacement.  */
5666  if (GET_CODE (addr) == REG)
5667    basereg = addr, displacement = 0;
5668  else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5669    basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5670  else
5671    abort ();
5672
5673  /* We accept vars reached via the containing function's
5674     incoming arg pointer and via its stack variables pointer.  */
5675  if (basereg == fp->internal_arg_pointer)
5676    {
5677      /* If reached via arg pointer, get the arg pointer value
5678	 out of that function's stack frame.
5679
5680	 There are two cases:  If a separate ap is needed, allocate a
5681	 slot in the outer function for it and dereference it that way.
5682	 This is correct even if the real ap is actually a pseudo.
5683	 Otherwise, just adjust the offset from the frame pointer to
5684	 compensate.  */
5685
5686#ifdef NEED_SEPARATE_AP
5687      rtx addr;
5688
5689      addr = get_arg_pointer_save_area (fp);
5690      addr = fix_lexical_addr (XEXP (addr, 0), var);
5691      addr = memory_address (Pmode, addr);
5692
5693      base = gen_rtx_MEM (Pmode, addr);
5694      set_mem_alias_set (base, get_frame_alias_set ());
5695      base = copy_to_reg (base);
5696#else
5697      displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5698      base = lookup_static_chain (var);
5699#endif
5700    }
5701
5702  else if (basereg == virtual_stack_vars_rtx)
5703    {
5704      /* This is the same code as lookup_static_chain, duplicated here to
5705	 avoid an extra call to decl_function_context.  */
5706      tree link;
5707
5708      for (link = context_display; link; link = TREE_CHAIN (link))
5709	if (TREE_PURPOSE (link) == context)
5710	  {
5711	    base = RTL_EXPR_RTL (TREE_VALUE (link));
5712	    break;
5713	  }
5714    }
5715
5716  if (base == 0)
5717    abort ();
5718
5719  /* Use same offset, relative to appropriate static chain or argument
5720     pointer.  */
5721  return plus_constant (base, displacement);
5722}
5723
5724/* Return the address of the trampoline for entering nested fn FUNCTION.
5725   If necessary, allocate a trampoline (in the stack frame)
5726   and emit rtl to initialize its contents (at entry to this function).  */
5727
5728rtx
5729trampoline_address (function)
5730     tree function;
5731{
5732  tree link;
5733  tree rtlexp;
5734  rtx tramp;
5735  struct function *fp;
5736  tree fn_context;
5737
5738  /* Find an existing trampoline and return it.  */
5739  for (link = trampoline_list; link; link = TREE_CHAIN (link))
5740    if (TREE_PURPOSE (link) == function)
5741      return
5742	adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5743
5744  for (fp = outer_function_chain; fp; fp = fp->outer)
5745    for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5746      if (TREE_PURPOSE (link) == function)
5747	{
5748	  tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5749				    function);
5750	  return adjust_trampoline_addr (tramp);
5751	}
5752
5753  /* None exists; we must make one.  */
5754
5755  /* Find the `struct function' for the function containing FUNCTION.  */
5756  fp = 0;
5757  fn_context = decl_function_context (function);
5758  if (fn_context != current_function_decl
5759      && fn_context != inline_function_decl)
5760    fp = find_function_data (fn_context);
5761
5762  /* Allocate run-time space for this trampoline
5763     (usually in the defining function's stack frame).  */
5764#ifdef ALLOCATE_TRAMPOLINE
5765  tramp = ALLOCATE_TRAMPOLINE (fp);
5766#else
5767  /* If rounding needed, allocate extra space
5768     to ensure we have TRAMPOLINE_SIZE bytes left after rounding up.  */
5769#define TRAMPOLINE_REAL_SIZE \
5770  (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5771  tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5772				fp ? fp : cfun);
5773#endif
5774
5775  /* Record the trampoline for reuse and note it for later initialization
5776     by expand_function_end.  */
5777  if (fp != 0)
5778    {
5779      rtlexp = make_node (RTL_EXPR);
5780      RTL_EXPR_RTL (rtlexp) = tramp;
5781      fp->x_trampoline_list = tree_cons (function, rtlexp,
5782					 fp->x_trampoline_list);
5783    }
5784  else
5785    {
5786      /* Make the RTL_EXPR node temporary, not momentary, so that the
5787	 trampoline_list doesn't become garbage.  */
5788      rtlexp = make_node (RTL_EXPR);
5789
5790      RTL_EXPR_RTL (rtlexp) = tramp;
5791      trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5792    }
5793
5794  tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5795  return adjust_trampoline_addr (tramp);
5796}
5797
5798/* Given a trampoline address,
5799   round it to multiple of TRAMPOLINE_ALIGNMENT.  */
5800
5801static rtx
5802round_trampoline_addr (tramp)
5803     rtx tramp;
5804{
5805  /* Round address up to desired boundary.  */
5806  rtx temp = gen_reg_rtx (Pmode);
5807  rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5808  rtx mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5809
5810  temp  = expand_simple_binop (Pmode, PLUS, tramp, addend,
5811			       temp, 0, OPTAB_LIB_WIDEN);
5812  tramp = expand_simple_binop (Pmode, AND, temp, mask,
5813			       temp, 0, OPTAB_LIB_WIDEN);
5814
5815  return tramp;
5816}
5817
5818/* Given a trampoline address, round it then apply any
5819   platform-specific adjustments so that the result can be used for a
5820   function call .  */
5821
5822static rtx
5823adjust_trampoline_addr (tramp)
5824     rtx tramp;
5825{
5826  tramp = round_trampoline_addr (tramp);
5827#ifdef TRAMPOLINE_ADJUST_ADDRESS
5828  TRAMPOLINE_ADJUST_ADDRESS (tramp);
5829#endif
5830  return tramp;
5831}
5832
5833/* Put all this function's BLOCK nodes including those that are chained
5834   onto the first block into a vector, and return it.
5835   Also store in each NOTE for the beginning or end of a block
5836   the index of that block in the vector.
5837   The arguments are BLOCK, the chain of top-level blocks of the function,
5838   and INSNS, the insn chain of the function.  */
5839
5840void
5841identify_blocks ()
5842{
5843  int n_blocks;
5844  tree *block_vector, *last_block_vector;
5845  tree *block_stack;
5846  tree block = DECL_INITIAL (current_function_decl);
5847
5848  if (block == 0)
5849    return;
5850
5851  /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5852     depth-first order.  */
5853  block_vector = get_block_vector (block, &n_blocks);
5854  block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5855
5856  last_block_vector = identify_blocks_1 (get_insns (),
5857					 block_vector + 1,
5858					 block_vector + n_blocks,
5859					 block_stack);
5860
5861  /* If we didn't use all of the subblocks, we've misplaced block notes.  */
5862  /* ??? This appears to happen all the time.  Latent bugs elsewhere?  */
5863  if (0 && last_block_vector != block_vector + n_blocks)
5864    abort ();
5865
5866  free (block_vector);
5867  free (block_stack);
5868}
5869
5870/* Subroutine of identify_blocks.  Do the block substitution on the
5871   insn chain beginning with INSNS.  Recurse for CALL_PLACEHOLDER chains.
5872
5873   BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5874   BLOCK_VECTOR is incremented for each block seen.  */
5875
5876static tree *
5877identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5878     rtx insns;
5879     tree *block_vector;
5880     tree *end_block_vector;
5881     tree *orig_block_stack;
5882{
5883  rtx insn;
5884  tree *block_stack = orig_block_stack;
5885
5886  for (insn = insns; insn; insn = NEXT_INSN (insn))
5887    {
5888      if (GET_CODE (insn) == NOTE)
5889	{
5890	  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5891	    {
5892	      tree b;
5893
5894	      /* If there are more block notes than BLOCKs, something
5895		 is badly wrong.  */
5896	      if (block_vector == end_block_vector)
5897		abort ();
5898
5899	      b = *block_vector++;
5900	      NOTE_BLOCK (insn) = b;
5901	      *block_stack++ = b;
5902	    }
5903	  else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5904	    {
5905	      /* If there are more NOTE_INSN_BLOCK_ENDs than
5906		 NOTE_INSN_BLOCK_BEGs, something is badly wrong.  */
5907	      if (block_stack == orig_block_stack)
5908		abort ();
5909
5910	      NOTE_BLOCK (insn) = *--block_stack;
5911	    }
5912	}
5913      else if (GET_CODE (insn) == CALL_INSN
5914	       && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5915	{
5916	  rtx cp = PATTERN (insn);
5917
5918	  block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5919					    end_block_vector, block_stack);
5920	  if (XEXP (cp, 1))
5921	    block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5922					      end_block_vector, block_stack);
5923	  if (XEXP (cp, 2))
5924	    block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5925					      end_block_vector, block_stack);
5926	}
5927    }
5928
5929  /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5930     something is badly wrong.  */
5931  if (block_stack != orig_block_stack)
5932    abort ();
5933
5934  return block_vector;
5935}
5936
5937/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
5938   and create duplicate blocks.  */
5939/* ??? Need an option to either create block fragments or to create
5940   abstract origin duplicates of a source block.  It really depends
5941   on what optimization has been performed.  */
5942
5943void
5944reorder_blocks ()
5945{
5946  tree block = DECL_INITIAL (current_function_decl);
5947  varray_type block_stack;
5948
5949  if (block == NULL_TREE)
5950    return;
5951
5952  VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5953
5954  /* Reset the TREE_ASM_WRITTEN bit for all blocks.  */
5955  reorder_blocks_0 (block);
5956
5957  /* Prune the old trees away, so that they don't get in the way.  */
5958  BLOCK_SUBBLOCKS (block) = NULL_TREE;
5959  BLOCK_CHAIN (block) = NULL_TREE;
5960
5961  /* Recreate the block tree from the note nesting.  */
5962  reorder_blocks_1 (get_insns (), block, &block_stack);
5963  BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5964
5965  /* Remove deleted blocks from the block fragment chains.  */
5966  reorder_fix_fragments (block);
5967}
5968
5969/* Helper function for reorder_blocks.  Reset TREE_ASM_WRITTEN.  */
5970
5971static void
5972reorder_blocks_0 (block)
5973     tree block;
5974{
5975  while (block)
5976    {
5977      TREE_ASM_WRITTEN (block) = 0;
5978      reorder_blocks_0 (BLOCK_SUBBLOCKS (block));
5979      block = BLOCK_CHAIN (block);
5980    }
5981}
5982
5983static void
5984reorder_blocks_1 (insns, current_block, p_block_stack)
5985     rtx insns;
5986     tree current_block;
5987     varray_type *p_block_stack;
5988{
5989  rtx insn;
5990
5991  for (insn = insns; insn; insn = NEXT_INSN (insn))
5992    {
5993      if (GET_CODE (insn) == NOTE)
5994	{
5995	  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5996	    {
5997	      tree block = NOTE_BLOCK (insn);
5998
5999	      /* If we have seen this block before, that means it now
6000		 spans multiple address regions.  Create a new fragment.  */
6001	      if (TREE_ASM_WRITTEN (block))
6002		{
6003		  tree new_block = copy_node (block);
6004		  tree origin;
6005
6006		  origin = (BLOCK_FRAGMENT_ORIGIN (block)
6007			    ? BLOCK_FRAGMENT_ORIGIN (block)
6008			    : block);
6009		  BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
6010		  BLOCK_FRAGMENT_CHAIN (new_block)
6011		    = BLOCK_FRAGMENT_CHAIN (origin);
6012		  BLOCK_FRAGMENT_CHAIN (origin) = new_block;
6013
6014		  NOTE_BLOCK (insn) = new_block;
6015		  block = new_block;
6016		}
6017
6018	      BLOCK_SUBBLOCKS (block) = 0;
6019	      TREE_ASM_WRITTEN (block) = 1;
6020	      BLOCK_SUPERCONTEXT (block) = current_block;
6021	      BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
6022	      BLOCK_SUBBLOCKS (current_block) = block;
6023	      current_block = block;
6024	      VARRAY_PUSH_TREE (*p_block_stack, block);
6025	    }
6026	  else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
6027	    {
6028	      NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
6029	      VARRAY_POP (*p_block_stack);
6030	      BLOCK_SUBBLOCKS (current_block)
6031		= blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
6032	      current_block = BLOCK_SUPERCONTEXT (current_block);
6033	    }
6034	}
6035      else if (GET_CODE (insn) == CALL_INSN
6036	       && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
6037	{
6038	  rtx cp = PATTERN (insn);
6039	  reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
6040	  if (XEXP (cp, 1))
6041	    reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
6042	  if (XEXP (cp, 2))
6043	    reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
6044	}
6045    }
6046}
6047
6048/* Rationalize BLOCK_FRAGMENT_ORIGIN.  If an origin block no longer
6049   appears in the block tree, select one of the fragments to become
6050   the new origin block.  */
6051
6052static void
6053reorder_fix_fragments (block)
6054     tree block;
6055{
6056  while (block)
6057    {
6058      tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
6059      tree new_origin = NULL_TREE;
6060
6061      if (dup_origin)
6062	{
6063	  if (! TREE_ASM_WRITTEN (dup_origin))
6064	    {
6065	      new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
6066
6067	      /* Find the first of the remaining fragments.  There must
6068		 be at least one -- the current block.  */
6069	      while (! TREE_ASM_WRITTEN (new_origin))
6070		new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
6071	      BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
6072	    }
6073	}
6074      else if (! dup_origin)
6075	new_origin = block;
6076
6077      /* Re-root the rest of the fragments to the new origin.  In the
6078	 case that DUP_ORIGIN was null, that means BLOCK was the origin
6079	 of a chain of fragments and we want to remove those fragments
6080	 that didn't make it to the output.  */
6081      if (new_origin)
6082	{
6083	  tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
6084	  tree chain = *pp;
6085
6086	  while (chain)
6087	    {
6088	      if (TREE_ASM_WRITTEN (chain))
6089		{
6090		  BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
6091		  *pp = chain;
6092		  pp = &BLOCK_FRAGMENT_CHAIN (chain);
6093		}
6094	      chain = BLOCK_FRAGMENT_CHAIN (chain);
6095	    }
6096	  *pp = NULL_TREE;
6097	}
6098
6099      reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
6100      block = BLOCK_CHAIN (block);
6101    }
6102}
6103
6104/* Reverse the order of elements in the chain T of blocks,
6105   and return the new head of the chain (old last element).  */
6106
6107static tree
6108blocks_nreverse (t)
6109     tree t;
6110{
6111  tree prev = 0, decl, next;
6112  for (decl = t; decl; decl = next)
6113    {
6114      next = BLOCK_CHAIN (decl);
6115      BLOCK_CHAIN (decl) = prev;
6116      prev = decl;
6117    }
6118  return prev;
6119}
6120
6121/* Count the subblocks of the list starting with BLOCK.  If VECTOR is
6122   non-NULL, list them all into VECTOR, in a depth-first preorder
6123   traversal of the block tree.  Also clear TREE_ASM_WRITTEN in all
6124   blocks.  */
6125
6126static int
6127all_blocks (block, vector)
6128     tree block;
6129     tree *vector;
6130{
6131  int n_blocks = 0;
6132
6133  while (block)
6134    {
6135      TREE_ASM_WRITTEN (block) = 0;
6136
6137      /* Record this block.  */
6138      if (vector)
6139	vector[n_blocks] = block;
6140
6141      ++n_blocks;
6142
6143      /* Record the subblocks, and their subblocks...  */
6144      n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
6145			      vector ? vector + n_blocks : 0);
6146      block = BLOCK_CHAIN (block);
6147    }
6148
6149  return n_blocks;
6150}
6151
6152/* Return a vector containing all the blocks rooted at BLOCK.  The
6153   number of elements in the vector is stored in N_BLOCKS_P.  The
6154   vector is dynamically allocated; it is the caller's responsibility
6155   to call `free' on the pointer returned.  */
6156
6157static tree *
6158get_block_vector (block, n_blocks_p)
6159     tree block;
6160     int *n_blocks_p;
6161{
6162  tree *block_vector;
6163
6164  *n_blocks_p = all_blocks (block, NULL);
6165  block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
6166  all_blocks (block, block_vector);
6167
6168  return block_vector;
6169}
6170
6171static int next_block_index = 2;
6172
6173/* Set BLOCK_NUMBER for all the blocks in FN.  */
6174
6175void
6176number_blocks (fn)
6177     tree fn;
6178{
6179  int i;
6180  int n_blocks;
6181  tree *block_vector;
6182
6183  /* For SDB and XCOFF debugging output, we start numbering the blocks
6184     from 1 within each function, rather than keeping a running
6185     count.  */
6186#if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6187  if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6188    next_block_index = 1;
6189#endif
6190
6191  block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6192
6193  /* The top-level BLOCK isn't numbered at all.  */
6194  for (i = 1; i < n_blocks; ++i)
6195    /* We number the blocks from two.  */
6196    BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6197
6198  free (block_vector);
6199
6200  return;
6201}
6202
6203/* If VAR is present in a subblock of BLOCK, return the subblock.  */
6204
6205tree
6206debug_find_var_in_block_tree (var, block)
6207     tree var;
6208     tree block;
6209{
6210  tree t;
6211
6212  for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
6213    if (t == var)
6214      return block;
6215
6216  for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
6217    {
6218      tree ret = debug_find_var_in_block_tree (var, t);
6219      if (ret)
6220	return ret;
6221    }
6222
6223  return NULL_TREE;
6224}
6225
6226/* Allocate a function structure and reset its contents to the defaults.  */
6227
6228static void
6229prepare_function_start ()
6230{
6231  cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function));
6232
6233  init_stmt_for_function ();
6234  init_eh_for_function ();
6235
6236  cse_not_expected = ! optimize;
6237
6238  /* Caller save not needed yet.  */
6239  caller_save_needed = 0;
6240
6241  /* No stack slots have been made yet.  */
6242  stack_slot_list = 0;
6243
6244  current_function_has_nonlocal_label = 0;
6245  current_function_has_nonlocal_goto = 0;
6246
6247  /* There is no stack slot for handling nonlocal gotos.  */
6248  nonlocal_goto_handler_slots = 0;
6249  nonlocal_goto_stack_level = 0;
6250
6251  /* No labels have been declared for nonlocal use.  */
6252  nonlocal_labels = 0;
6253  nonlocal_goto_handler_labels = 0;
6254
6255  /* No function calls so far in this function.  */
6256  function_call_count = 0;
6257
6258  /* No parm regs have been allocated.
6259     (This is important for output_inline_function.)  */
6260  max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6261
6262  /* Initialize the RTL mechanism.  */
6263  init_emit ();
6264
6265  /* Initialize the queue of pending postincrement and postdecrements,
6266     and some other info in expr.c.  */
6267  init_expr ();
6268
6269  /* We haven't done register allocation yet.  */
6270  reg_renumber = 0;
6271
6272  init_varasm_status (cfun);
6273
6274  /* Clear out data used for inlining.  */
6275  cfun->inlinable = 0;
6276  cfun->original_decl_initial = 0;
6277  cfun->original_arg_vector = 0;
6278
6279  cfun->stack_alignment_needed = STACK_BOUNDARY;
6280  cfun->preferred_stack_boundary = STACK_BOUNDARY;
6281
6282  /* Set if a call to setjmp is seen.  */
6283  current_function_calls_setjmp = 0;
6284
6285  /* Set if a call to longjmp is seen.  */
6286  current_function_calls_longjmp = 0;
6287
6288  current_function_calls_alloca = 0;
6289  current_function_contains_functions = 0;
6290  current_function_is_leaf = 0;
6291  current_function_nothrow = 0;
6292  current_function_sp_is_unchanging = 0;
6293  current_function_uses_only_leaf_regs = 0;
6294  current_function_has_computed_jump = 0;
6295  current_function_is_thunk = 0;
6296
6297  current_function_returns_pcc_struct = 0;
6298  current_function_returns_struct = 0;
6299  current_function_epilogue_delay_list = 0;
6300  current_function_uses_const_pool = 0;
6301  current_function_uses_pic_offset_table = 0;
6302  current_function_cannot_inline = 0;
6303
6304  /* We have not yet needed to make a label to jump to for tail-recursion.  */
6305  tail_recursion_label = 0;
6306
6307  /* We haven't had a need to make a save area for ap yet.  */
6308  arg_pointer_save_area = 0;
6309
6310  /* No stack slots allocated yet.  */
6311  frame_offset = 0;
6312
6313  /* No SAVE_EXPRs in this function yet.  */
6314  save_expr_regs = 0;
6315
6316  /* No RTL_EXPRs in this function yet.  */
6317  rtl_expr_chain = 0;
6318
6319  /* Set up to allocate temporaries.  */
6320  init_temp_slots ();
6321
6322  /* Indicate that we need to distinguish between the return value of the
6323     present function and the return value of a function being called.  */
6324  rtx_equal_function_value_matters = 1;
6325
6326  /* Indicate that we have not instantiated virtual registers yet.  */
6327  virtuals_instantiated = 0;
6328
6329  /* Indicate that we want CONCATs now.  */
6330  generating_concat_p = 1;
6331
6332  /* Indicate we have no need of a frame pointer yet.  */
6333  frame_pointer_needed = 0;
6334
6335  /* By default assume not stdarg.  */
6336  current_function_stdarg = 0;
6337
6338  /* We haven't made any trampolines for this function yet.  */
6339  trampoline_list = 0;
6340
6341  init_pending_stack_adjust ();
6342  inhibit_defer_pop = 0;
6343
6344  current_function_outgoing_args_size = 0;
6345
6346  current_function_funcdef_no = funcdef_no++;
6347
6348  cfun->arc_profile = profile_arc_flag || flag_test_coverage;
6349
6350  cfun->arc_profile = profile_arc_flag || flag_test_coverage;
6351
6352  cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
6353
6354  cfun->max_jumptable_ents = 0;
6355
6356  (*lang_hooks.function.init) (cfun);
6357  if (init_machine_status)
6358    cfun->machine = (*init_machine_status) ();
6359}
6360
6361/* Initialize the rtl expansion mechanism so that we can do simple things
6362   like generate sequences.  This is used to provide a context during global
6363   initialization of some passes.  */
6364void
6365init_dummy_function_start ()
6366{
6367  prepare_function_start ();
6368}
6369
6370/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6371   and initialize static variables for generating RTL for the statements
6372   of the function.  */
6373
6374void
6375init_function_start (subr, filename, line)
6376     tree subr;
6377     const char *filename;
6378     int line;
6379{
6380  prepare_function_start ();
6381
6382  current_function_name = (*lang_hooks.decl_printable_name) (subr, 2);
6383  cfun->decl = subr;
6384
6385  /* Nonzero if this is a nested function that uses a static chain.  */
6386
6387  current_function_needs_context
6388    = (decl_function_context (current_function_decl) != 0
6389       && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6390
6391  /* Within function body, compute a type's size as soon it is laid out.  */
6392  immediate_size_expand++;
6393
6394  /* Prevent ever trying to delete the first instruction of a function.
6395     Also tell final how to output a linenum before the function prologue.
6396     Note linenums could be missing, e.g. when compiling a Java .class file.  */
6397  if (line > 0)
6398    emit_line_note (filename, line);
6399
6400  /* Make sure first insn is a note even if we don't want linenums.
6401     This makes sure the first insn will never be deleted.
6402     Also, final expects a note to appear there.  */
6403  emit_note (NULL, NOTE_INSN_DELETED);
6404
6405  /* Set flags used by final.c.  */
6406  if (aggregate_value_p (DECL_RESULT (subr)))
6407    {
6408#ifdef PCC_STATIC_STRUCT_RETURN
6409      current_function_returns_pcc_struct = 1;
6410#endif
6411      current_function_returns_struct = 1;
6412    }
6413
6414  /* Warn if this value is an aggregate type,
6415     regardless of which calling convention we are using for it.  */
6416  if (warn_aggregate_return
6417      && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6418    warning ("function returns an aggregate");
6419
6420  current_function_returns_pointer
6421    = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6422}
6423
6424/* Make sure all values used by the optimization passes have sane
6425   defaults.  */
6426void
6427init_function_for_compilation ()
6428{
6429  reg_renumber = 0;
6430
6431  /* No prologue/epilogue insns yet.  */
6432  VARRAY_GROW (prologue, 0);
6433  VARRAY_GROW (epilogue, 0);
6434  VARRAY_GROW (sibcall_epilogue, 0);
6435}
6436
6437/* Expand a call to __main at the beginning of a possible main function.  */
6438
6439#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6440#undef HAS_INIT_SECTION
6441#define HAS_INIT_SECTION
6442#endif
6443
6444void
6445expand_main_function ()
6446{
6447#ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6448  if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
6449    {
6450      int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
6451      rtx tmp, seq;
6452
6453      start_sequence ();
6454      /* Forcibly align the stack.  */
6455#ifdef STACK_GROWS_DOWNWARD
6456      tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
6457				 stack_pointer_rtx, 1, OPTAB_WIDEN);
6458#else
6459      tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
6460				 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
6461      tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
6462				 stack_pointer_rtx, 1, OPTAB_WIDEN);
6463#endif
6464      if (tmp != stack_pointer_rtx)
6465	emit_move_insn (stack_pointer_rtx, tmp);
6466
6467      /* Enlist allocate_dynamic_stack_space to pick up the pieces.  */
6468      tmp = force_reg (Pmode, const0_rtx);
6469      allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
6470      seq = get_insns ();
6471      end_sequence ();
6472
6473      for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
6474	if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
6475	  break;
6476      if (tmp)
6477	emit_insn_before (seq, tmp);
6478      else
6479	emit_insn (seq);
6480    }
6481#endif
6482
6483#ifndef HAS_INIT_SECTION
6484  emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), LCT_NORMAL,
6485		     VOIDmode, 0);
6486#endif
6487}
6488
6489/* The PENDING_SIZES represent the sizes of variable-sized types.
6490   Create RTL for the various sizes now (using temporary variables),
6491   so that we can refer to the sizes from the RTL we are generating
6492   for the current function.  The PENDING_SIZES are a TREE_LIST.  The
6493   TREE_VALUE of each node is a SAVE_EXPR.  */
6494
6495void
6496expand_pending_sizes (pending_sizes)
6497     tree pending_sizes;
6498{
6499  tree tem;
6500
6501  /* Evaluate now the sizes of any types declared among the arguments.  */
6502  for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6503    {
6504      expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
6505      /* Flush the queue in case this parameter declaration has
6506	 side-effects.  */
6507      emit_queue ();
6508    }
6509}
6510
6511/* Start the RTL for a new function, and set variables used for
6512   emitting RTL.
6513   SUBR is the FUNCTION_DECL node.
6514   PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6515   the function's parameters, which must be run at any return statement.  */
6516
6517void
6518expand_function_start (subr, parms_have_cleanups)
6519     tree subr;
6520     int parms_have_cleanups;
6521{
6522  tree tem;
6523  rtx last_ptr = NULL_RTX;
6524
6525  /* Make sure volatile mem refs aren't considered
6526     valid operands of arithmetic insns.  */
6527  init_recog_no_volatile ();
6528
6529  current_function_instrument_entry_exit
6530    = (flag_instrument_function_entry_exit
6531       && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6532
6533  current_function_profile
6534    = (profile_flag
6535       && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6536
6537  current_function_limit_stack
6538    = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6539
6540  /* If function gets a static chain arg, store it in the stack frame.
6541     Do this first, so it gets the first stack slot offset.  */
6542  if (current_function_needs_context)
6543    {
6544      last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6545
6546      /* Delay copying static chain if it is not a register to avoid
6547	 conflicts with regs used for parameters.  */
6548      if (! SMALL_REGISTER_CLASSES
6549	  || GET_CODE (static_chain_incoming_rtx) == REG)
6550	emit_move_insn (last_ptr, static_chain_incoming_rtx);
6551    }
6552
6553  /* If the parameters of this function need cleaning up, get a label
6554     for the beginning of the code which executes those cleanups.  This must
6555     be done before doing anything with return_label.  */
6556  if (parms_have_cleanups)
6557    cleanup_label = gen_label_rtx ();
6558  else
6559    cleanup_label = 0;
6560
6561  /* Make the label for return statements to jump to.  Do not special
6562     case machines with special return instructions -- they will be
6563     handled later during jump, ifcvt, or epilogue creation.  */
6564  return_label = gen_label_rtx ();
6565
6566  /* Initialize rtx used to return the value.  */
6567  /* Do this before assign_parms so that we copy the struct value address
6568     before any library calls that assign parms might generate.  */
6569
6570  /* Decide whether to return the value in memory or in a register.  */
6571  if (aggregate_value_p (DECL_RESULT (subr)))
6572    {
6573      /* Returning something that won't go in a register.  */
6574      rtx value_address = 0;
6575
6576#ifdef PCC_STATIC_STRUCT_RETURN
6577      if (current_function_returns_pcc_struct)
6578	{
6579	  int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6580	  value_address = assemble_static_space (size);
6581	}
6582      else
6583#endif
6584	{
6585	  /* Expect to be passed the address of a place to store the value.
6586	     If it is passed as an argument, assign_parms will take care of
6587	     it.  */
6588	  if (struct_value_incoming_rtx)
6589	    {
6590	      value_address = gen_reg_rtx (Pmode);
6591	      emit_move_insn (value_address, struct_value_incoming_rtx);
6592	    }
6593	}
6594      if (value_address)
6595	{
6596	  rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6597	  set_mem_attributes (x, DECL_RESULT (subr), 1);
6598	  SET_DECL_RTL (DECL_RESULT (subr), x);
6599	}
6600    }
6601  else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6602    /* If return mode is void, this decl rtl should not be used.  */
6603    SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6604  else
6605    {
6606      /* Compute the return values into a pseudo reg, which we will copy
6607	 into the true return register after the cleanups are done.  */
6608
6609      /* In order to figure out what mode to use for the pseudo, we
6610	 figure out what the mode of the eventual return register will
6611	 actually be, and use that.  */
6612      rtx hard_reg
6613	= hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6614			       subr, 1);
6615
6616      /* Structures that are returned in registers are not aggregate_value_p,
6617	 so we may see a PARALLEL or a REG.  */
6618      if (REG_P (hard_reg))
6619	SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6620      else if (GET_CODE (hard_reg) == PARALLEL)
6621	SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
6622      else
6623	abort ();
6624
6625      /* Set DECL_REGISTER flag so that expand_function_end will copy the
6626	 result to the real return register(s).  */
6627      DECL_REGISTER (DECL_RESULT (subr)) = 1;
6628    }
6629
6630  /* Initialize rtx for parameters and local variables.
6631     In some cases this requires emitting insns.  */
6632
6633  assign_parms (subr);
6634
6635  /* Copy the static chain now if it wasn't a register.  The delay is to
6636     avoid conflicts with the parameter passing registers.  */
6637
6638  if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6639    if (GET_CODE (static_chain_incoming_rtx) != REG)
6640      emit_move_insn (last_ptr, static_chain_incoming_rtx);
6641
6642  /* The following was moved from init_function_start.
6643     The move is supposed to make sdb output more accurate.  */
6644  /* Indicate the beginning of the function body,
6645     as opposed to parm setup.  */
6646  emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
6647
6648  if (GET_CODE (get_last_insn ()) != NOTE)
6649    emit_note (NULL, NOTE_INSN_DELETED);
6650  parm_birth_insn = get_last_insn ();
6651
6652  context_display = 0;
6653  if (current_function_needs_context)
6654    {
6655      /* Fetch static chain values for containing functions.  */
6656      tem = decl_function_context (current_function_decl);
6657      /* Copy the static chain pointer into a pseudo.  If we have
6658	 small register classes, copy the value from memory if
6659	 static_chain_incoming_rtx is a REG.  */
6660      if (tem)
6661	{
6662	  /* If the static chain originally came in a register, put it back
6663	     there, then move it out in the next insn.  The reason for
6664	     this peculiar code is to satisfy function integration.  */
6665	  if (SMALL_REGISTER_CLASSES
6666	      && GET_CODE (static_chain_incoming_rtx) == REG)
6667	    emit_move_insn (static_chain_incoming_rtx, last_ptr);
6668	  last_ptr = copy_to_reg (static_chain_incoming_rtx);
6669	}
6670
6671      while (tem)
6672	{
6673	  tree rtlexp = make_node (RTL_EXPR);
6674
6675	  RTL_EXPR_RTL (rtlexp) = last_ptr;
6676	  context_display = tree_cons (tem, rtlexp, context_display);
6677	  tem = decl_function_context (tem);
6678	  if (tem == 0)
6679	    break;
6680	  /* Chain thru stack frames, assuming pointer to next lexical frame
6681	     is found at the place we always store it.  */
6682#ifdef FRAME_GROWS_DOWNWARD
6683	  last_ptr = plus_constant (last_ptr,
6684				    -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6685#endif
6686	  last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6687	  set_mem_alias_set (last_ptr, get_frame_alias_set ());
6688	  last_ptr = copy_to_reg (last_ptr);
6689
6690	  /* If we are not optimizing, ensure that we know that this
6691	     piece of context is live over the entire function.  */
6692	  if (! optimize)
6693	    save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6694						save_expr_regs);
6695	}
6696    }
6697
6698  if (current_function_instrument_entry_exit)
6699    {
6700      rtx fun = DECL_RTL (current_function_decl);
6701      if (GET_CODE (fun) == MEM)
6702	fun = XEXP (fun, 0);
6703      else
6704	abort ();
6705      emit_library_call (profile_function_entry_libfunc, LCT_NORMAL, VOIDmode,
6706			 2, fun, Pmode,
6707			 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6708						     0,
6709						     hard_frame_pointer_rtx),
6710			 Pmode);
6711    }
6712
6713  if (current_function_profile)
6714    {
6715#ifdef PROFILE_HOOK
6716      PROFILE_HOOK (current_function_funcdef_no);
6717#endif
6718    }
6719
6720  /* After the display initializations is where the tail-recursion label
6721     should go, if we end up needing one.   Ensure we have a NOTE here
6722     since some things (like trampolines) get placed before this.  */
6723  tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
6724
6725  /* Evaluate now the sizes of any types declared among the arguments.  */
6726  expand_pending_sizes (nreverse (get_pending_sizes ()));
6727
6728  /* Make sure there is a line number after the function entry setup code.  */
6729  force_next_line_note ();
6730}
6731
6732/* Undo the effects of init_dummy_function_start.  */
6733void
6734expand_dummy_function_end ()
6735{
6736  /* End any sequences that failed to be closed due to syntax errors.  */
6737  while (in_sequence_p ())
6738    end_sequence ();
6739
6740  /* Outside function body, can't compute type's actual size
6741     until next function's body starts.  */
6742
6743  free_after_parsing (cfun);
6744  free_after_compilation (cfun);
6745  cfun = 0;
6746}
6747
6748/* Call DOIT for each hard register used as a return value from
6749   the current function.  */
6750
6751void
6752diddle_return_value (doit, arg)
6753     void (*doit) PARAMS ((rtx, void *));
6754     void *arg;
6755{
6756  rtx outgoing = current_function_return_rtx;
6757
6758  if (! outgoing)
6759    return;
6760
6761  if (GET_CODE (outgoing) == REG)
6762    (*doit) (outgoing, arg);
6763  else if (GET_CODE (outgoing) == PARALLEL)
6764    {
6765      int i;
6766
6767      for (i = 0; i < XVECLEN (outgoing, 0); i++)
6768	{
6769	  rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6770
6771	  if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6772	    (*doit) (x, arg);
6773	}
6774    }
6775}
6776
6777static void
6778do_clobber_return_reg (reg, arg)
6779     rtx reg;
6780     void *arg ATTRIBUTE_UNUSED;
6781{
6782  emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6783}
6784
6785void
6786clobber_return_register ()
6787{
6788  diddle_return_value (do_clobber_return_reg, NULL);
6789
6790  /* In case we do use pseudo to return value, clobber it too.  */
6791  if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6792    {
6793      tree decl_result = DECL_RESULT (current_function_decl);
6794      rtx decl_rtl = DECL_RTL (decl_result);
6795      if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6796	{
6797	  do_clobber_return_reg (decl_rtl, NULL);
6798	}
6799    }
6800}
6801
6802static void
6803do_use_return_reg (reg, arg)
6804     rtx reg;
6805     void *arg ATTRIBUTE_UNUSED;
6806{
6807  emit_insn (gen_rtx_USE (VOIDmode, reg));
6808}
6809
6810void
6811use_return_register ()
6812{
6813  diddle_return_value (do_use_return_reg, NULL);
6814}
6815
6816static GTY(()) rtx initial_trampoline;
6817
6818/* Generate RTL for the end of the current function.
6819   FILENAME and LINE are the current position in the source file.
6820
6821   It is up to language-specific callers to do cleanups for parameters--
6822   or else, supply 1 for END_BINDINGS and we will call expand_end_bindings.  */
6823
6824void
6825expand_function_end (filename, line, end_bindings)
6826     const char *filename;
6827     int line;
6828     int end_bindings;
6829{
6830  tree link;
6831  rtx clobber_after;
6832
6833  finish_expr_for_function ();
6834
6835  /* If arg_pointer_save_area was referenced only from a nested
6836     function, we will not have initialized it yet.  Do that now.  */
6837  if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
6838    get_arg_pointer_save_area (cfun);
6839
6840#ifdef NON_SAVING_SETJMP
6841  /* Don't put any variables in registers if we call setjmp
6842     on a machine that fails to restore the registers.  */
6843  if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6844    {
6845      if (DECL_INITIAL (current_function_decl) != error_mark_node)
6846	setjmp_protect (DECL_INITIAL (current_function_decl));
6847
6848      setjmp_protect_args ();
6849    }
6850#endif
6851
6852  /* Initialize any trampolines required by this function.  */
6853  for (link = trampoline_list; link; link = TREE_CHAIN (link))
6854    {
6855      tree function = TREE_PURPOSE (link);
6856      rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6857      rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6858#ifdef TRAMPOLINE_TEMPLATE
6859      rtx blktramp;
6860#endif
6861      rtx seq;
6862
6863#ifdef TRAMPOLINE_TEMPLATE
6864      /* First make sure this compilation has a template for
6865	 initializing trampolines.  */
6866      if (initial_trampoline == 0)
6867	{
6868	  initial_trampoline
6869	    = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6870	  set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
6871	}
6872#endif
6873
6874      /* Generate insns to initialize the trampoline.  */
6875      start_sequence ();
6876      tramp = round_trampoline_addr (XEXP (tramp, 0));
6877#ifdef TRAMPOLINE_TEMPLATE
6878      blktramp = replace_equiv_address (initial_trampoline, tramp);
6879      emit_block_move (blktramp, initial_trampoline,
6880		       GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
6881#endif
6882      INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6883      seq = get_insns ();
6884      end_sequence ();
6885
6886      /* Put those insns at entry to the containing function (this one).  */
6887      emit_insn_before (seq, tail_recursion_reentry);
6888    }
6889
6890  /* If we are doing stack checking and this function makes calls,
6891     do a stack probe at the start of the function to ensure we have enough
6892     space for another stack frame.  */
6893  if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6894    {
6895      rtx insn, seq;
6896
6897      for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6898	if (GET_CODE (insn) == CALL_INSN)
6899	  {
6900	    start_sequence ();
6901	    probe_stack_range (STACK_CHECK_PROTECT,
6902			       GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6903	    seq = get_insns ();
6904	    end_sequence ();
6905	    emit_insn_before (seq, tail_recursion_reentry);
6906	    break;
6907	  }
6908    }
6909
6910  /* Warn about unused parms if extra warnings were specified.  */
6911  /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
6912     warning.  WARN_UNUSED_PARAMETER is negative when set by
6913     -Wunused.  */
6914  if (warn_unused_parameter > 0
6915      || (warn_unused_parameter < 0 && extra_warnings))
6916    {
6917      tree decl;
6918
6919      for (decl = DECL_ARGUMENTS (current_function_decl);
6920	   decl; decl = TREE_CHAIN (decl))
6921	if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6922	    && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6923	  warning_with_decl (decl, "unused parameter `%s'");
6924    }
6925
6926  /* Delete handlers for nonlocal gotos if nothing uses them.  */
6927  if (nonlocal_goto_handler_slots != 0
6928      && ! current_function_has_nonlocal_label)
6929    delete_handlers ();
6930
6931  /* End any sequences that failed to be closed due to syntax errors.  */
6932  while (in_sequence_p ())
6933    end_sequence ();
6934
6935  /* Outside function body, can't compute type's actual size
6936     until next function's body starts.  */
6937  immediate_size_expand--;
6938
6939  clear_pending_stack_adjust ();
6940  do_pending_stack_adjust ();
6941
6942  /* Mark the end of the function body.
6943     If control reaches this insn, the function can drop through
6944     without returning a value.  */
6945  emit_note (NULL, NOTE_INSN_FUNCTION_END);
6946
6947  /* Must mark the last line number note in the function, so that the test
6948     coverage code can avoid counting the last line twice.  This just tells
6949     the code to ignore the immediately following line note, since there
6950     already exists a copy of this note somewhere above.  This line number
6951     note is still needed for debugging though, so we can't delete it.  */
6952  if (flag_test_coverage)
6953    emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
6954
6955  /* Output a linenumber for the end of the function.
6956     SDB depends on this.  */
6957  emit_line_note_force (filename, line);
6958
6959  /* Before the return label (if any), clobber the return
6960     registers so that they are not propagated live to the rest of
6961     the function.  This can only happen with functions that drop
6962     through; if there had been a return statement, there would
6963     have either been a return rtx, or a jump to the return label.
6964
6965     We delay actual code generation after the current_function_value_rtx
6966     is computed.  */
6967  clobber_after = get_last_insn ();
6968
6969  /* Output the label for the actual return from the function,
6970     if one is expected.  This happens either because a function epilogue
6971     is used instead of a return instruction, or because a return was done
6972     with a goto in order to run local cleanups, or because of pcc-style
6973     structure returning.  */
6974  if (return_label)
6975    emit_label (return_label);
6976
6977  /* C++ uses this.  */
6978  if (end_bindings)
6979    expand_end_bindings (0, 0, 0);
6980
6981  if (current_function_instrument_entry_exit)
6982    {
6983      rtx fun = DECL_RTL (current_function_decl);
6984      if (GET_CODE (fun) == MEM)
6985	fun = XEXP (fun, 0);
6986      else
6987	abort ();
6988      emit_library_call (profile_function_exit_libfunc, LCT_NORMAL, VOIDmode,
6989			 2, fun, Pmode,
6990			 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6991						     0,
6992						     hard_frame_pointer_rtx),
6993			 Pmode);
6994    }
6995
6996  /* Let except.c know where it should emit the call to unregister
6997     the function context for sjlj exceptions.  */
6998  if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
6999    sjlj_emit_function_exit_after (get_last_insn ());
7000
7001  /* If we had calls to alloca, and this machine needs
7002     an accurate stack pointer to exit the function,
7003     insert some code to save and restore the stack pointer.  */
7004#ifdef EXIT_IGNORE_STACK
7005  if (! EXIT_IGNORE_STACK)
7006#endif
7007    if (current_function_calls_alloca)
7008      {
7009	rtx tem = 0;
7010
7011	emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
7012	emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
7013      }
7014
7015  /* If scalar return value was computed in a pseudo-reg, or was a named
7016     return value that got dumped to the stack, copy that to the hard
7017     return register.  */
7018  if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
7019    {
7020      tree decl_result = DECL_RESULT (current_function_decl);
7021      rtx decl_rtl = DECL_RTL (decl_result);
7022
7023      if (REG_P (decl_rtl)
7024	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
7025	  : DECL_REGISTER (decl_result))
7026	{
7027	  rtx real_decl_rtl = current_function_return_rtx;
7028
7029	  /* This should be set in assign_parms.  */
7030	  if (! REG_FUNCTION_VALUE_P (real_decl_rtl))
7031	    abort ();
7032
7033	  /* If this is a BLKmode structure being returned in registers,
7034	     then use the mode computed in expand_return.  Note that if
7035	     decl_rtl is memory, then its mode may have been changed,
7036	     but that current_function_return_rtx has not.  */
7037	  if (GET_MODE (real_decl_rtl) == BLKmode)
7038	    PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
7039
7040	  /* If a named return value dumped decl_return to memory, then
7041	     we may need to re-do the PROMOTE_MODE signed/unsigned
7042	     extension.  */
7043	  if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
7044	    {
7045	      int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
7046
7047#ifdef PROMOTE_FUNCTION_RETURN
7048	      promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
7049			    &unsignedp, 1);
7050#endif
7051
7052	      convert_move (real_decl_rtl, decl_rtl, unsignedp);
7053	    }
7054	  else if (GET_CODE (real_decl_rtl) == PARALLEL)
7055	    {
7056	      /* If expand_function_start has created a PARALLEL for decl_rtl,
7057		 move the result to the real return registers.  Otherwise, do
7058		 a group load from decl_rtl for a named return.  */
7059	      if (GET_CODE (decl_rtl) == PARALLEL)
7060		emit_group_move (real_decl_rtl, decl_rtl);
7061	      else
7062		emit_group_load (real_decl_rtl, decl_rtl,
7063				 int_size_in_bytes (TREE_TYPE (decl_result)));
7064	    }
7065	  else
7066	    emit_move_insn (real_decl_rtl, decl_rtl);
7067	}
7068    }
7069
7070  /* If returning a structure, arrange to return the address of the value
7071     in a place where debuggers expect to find it.
7072
7073     If returning a structure PCC style,
7074     the caller also depends on this value.
7075     And current_function_returns_pcc_struct is not necessarily set.  */
7076  if (current_function_returns_struct
7077      || current_function_returns_pcc_struct)
7078    {
7079      rtx value_address
7080	= XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7081      tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
7082#ifdef FUNCTION_OUTGOING_VALUE
7083      rtx outgoing
7084	= FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
7085				   current_function_decl);
7086#else
7087      rtx outgoing
7088	= FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
7089#endif
7090
7091      /* Mark this as a function return value so integrate will delete the
7092	 assignment and USE below when inlining this function.  */
7093      REG_FUNCTION_VALUE_P (outgoing) = 1;
7094
7095#ifdef POINTERS_EXTEND_UNSIGNED
7096      /* The address may be ptr_mode and OUTGOING may be Pmode.  */
7097      if (GET_MODE (outgoing) != GET_MODE (value_address))
7098	value_address = convert_memory_address (GET_MODE (outgoing),
7099						value_address);
7100#endif
7101
7102      emit_move_insn (outgoing, value_address);
7103
7104      /* Show return register used to hold result (in this case the address
7105	 of the result.  */
7106      current_function_return_rtx = outgoing;
7107    }
7108
7109  /* If this is an implementation of throw, do what's necessary to
7110     communicate between __builtin_eh_return and the epilogue.  */
7111  expand_eh_return ();
7112
7113  /* Emit the actual code to clobber return register.  */
7114  {
7115    rtx seq, after;
7116
7117    start_sequence ();
7118    clobber_return_register ();
7119    seq = get_insns ();
7120    end_sequence ();
7121
7122    after = emit_insn_after (seq, clobber_after);
7123
7124    if (clobber_after != after)
7125      cfun->x_clobber_return_insn = after;
7126  }
7127
7128  /* ??? This should no longer be necessary since stupid is no longer with
7129     us, but there are some parts of the compiler (eg reload_combine, and
7130     sh mach_dep_reorg) that still try and compute their own lifetime info
7131     instead of using the general framework.  */
7132  use_return_register ();
7133
7134  /* Fix up any gotos that jumped out to the outermost
7135     binding level of the function.
7136     Must follow emitting RETURN_LABEL.  */
7137
7138  /* If you have any cleanups to do at this point,
7139     and they need to create temporary variables,
7140     then you will lose.  */
7141  expand_fixups (get_insns ());
7142}
7143
7144rtx
7145get_arg_pointer_save_area (f)
7146     struct function *f;
7147{
7148  rtx ret = f->x_arg_pointer_save_area;
7149
7150  if (! ret)
7151    {
7152      ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
7153      f->x_arg_pointer_save_area = ret;
7154    }
7155
7156  if (f == cfun && ! f->arg_pointer_save_area_init)
7157    {
7158      rtx seq;
7159
7160      /* Save the arg pointer at the beginning of the function.  The
7161	 generated stack slot may not be a valid memory address, so we
7162	 have to check it and fix it if necessary.  */
7163      start_sequence ();
7164      emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
7165      seq = get_insns ();
7166      end_sequence ();
7167
7168      push_topmost_sequence ();
7169      emit_insn_after (seq, get_insns ());
7170      pop_topmost_sequence ();
7171    }
7172
7173  return ret;
7174}
7175
7176/* Extend a vector that records the INSN_UIDs of INSNS
7177   (a list of one or more insns).  */
7178
7179static void
7180record_insns (insns, vecp)
7181     rtx insns;
7182     varray_type *vecp;
7183{
7184  int i, len;
7185  rtx tmp;
7186
7187  tmp = insns;
7188  len = 0;
7189  while (tmp != NULL_RTX)
7190    {
7191      len++;
7192      tmp = NEXT_INSN (tmp);
7193    }
7194
7195  i = VARRAY_SIZE (*vecp);
7196  VARRAY_GROW (*vecp, i + len);
7197  tmp = insns;
7198  while (tmp != NULL_RTX)
7199    {
7200      VARRAY_INT (*vecp, i) = INSN_UID (tmp);
7201      i++;
7202      tmp = NEXT_INSN (tmp);
7203    }
7204}
7205
7206/* Determine how many INSN_UIDs in VEC are part of INSN.  Because we can
7207   be running after reorg, SEQUENCE rtl is possible.  */
7208
7209static int
7210contains (insn, vec)
7211     rtx insn;
7212     varray_type vec;
7213{
7214  int i, j;
7215
7216  if (GET_CODE (insn) == INSN
7217      && GET_CODE (PATTERN (insn)) == SEQUENCE)
7218    {
7219      int count = 0;
7220      for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7221	for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7222	  if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7223	    count++;
7224      return count;
7225    }
7226  else
7227    {
7228      for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7229	if (INSN_UID (insn) == VARRAY_INT (vec, j))
7230	  return 1;
7231    }
7232  return 0;
7233}
7234
7235int
7236prologue_epilogue_contains (insn)
7237     rtx insn;
7238{
7239  if (contains (insn, prologue))
7240    return 1;
7241  if (contains (insn, epilogue))
7242    return 1;
7243  return 0;
7244}
7245
7246int
7247sibcall_epilogue_contains (insn)
7248     rtx insn;
7249{
7250  if (sibcall_epilogue)
7251    return contains (insn, sibcall_epilogue);
7252  return 0;
7253}
7254
7255#ifdef HAVE_return
7256/* Insert gen_return at the end of block BB.  This also means updating
7257   block_for_insn appropriately.  */
7258
7259static void
7260emit_return_into_block (bb, line_note)
7261     basic_block bb;
7262     rtx line_note;
7263{
7264  rtx p, end;
7265
7266  p = NEXT_INSN (bb->end);
7267  end = emit_jump_insn_after (gen_return (), bb->end);
7268  if (line_note)
7269    emit_line_note_after (NOTE_SOURCE_FILE (line_note),
7270			  NOTE_LINE_NUMBER (line_note), PREV_INSN (bb->end));
7271}
7272#endif /* HAVE_return */
7273
7274#if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
7275
7276/* These functions convert the epilogue into a variant that does not modify the
7277   stack pointer.  This is used in cases where a function returns an object
7278   whose size is not known until it is computed.  The called function leaves the
7279   object on the stack, leaves the stack depressed, and returns a pointer to
7280   the object.
7281
7282   What we need to do is track all modifications and references to the stack
7283   pointer, deleting the modifications and changing the references to point to
7284   the location the stack pointer would have pointed to had the modifications
7285   taken place.
7286
7287   These functions need to be portable so we need to make as few assumptions
7288   about the epilogue as we can.  However, the epilogue basically contains
7289   three things: instructions to reset the stack pointer, instructions to
7290   reload registers, possibly including the frame pointer, and an
7291   instruction to return to the caller.
7292
7293   If we can't be sure of what a relevant epilogue insn is doing, we abort.
7294   We also make no attempt to validate the insns we make since if they are
7295   invalid, we probably can't do anything valid.  The intent is that these
7296   routines get "smarter" as more and more machines start to use them and
7297   they try operating on different epilogues.
7298
7299   We use the following structure to track what the part of the epilogue that
7300   we've already processed has done.  We keep two copies of the SP equivalence,
7301   one for use during the insn we are processing and one for use in the next
7302   insn.  The difference is because one part of a PARALLEL may adjust SP
7303   and the other may use it.  */
7304
7305struct epi_info
7306{
7307  rtx sp_equiv_reg;		/* REG that SP is set from, perhaps SP.  */
7308  HOST_WIDE_INT sp_offset;	/* Offset from SP_EQUIV_REG of present SP.  */
7309  rtx new_sp_equiv_reg;		/* REG to be used at end of insn.  */
7310  HOST_WIDE_INT new_sp_offset;	/* Offset to be used at end of insn.  */
7311  rtx equiv_reg_src;		/* If nonzero, the value that SP_EQUIV_REG
7312				   should be set to once we no longer need
7313				   its value.  */
7314};
7315
7316static void handle_epilogue_set PARAMS ((rtx, struct epi_info *));
7317static void emit_equiv_load PARAMS ((struct epi_info *));
7318
7319/* Modify INSN, a list of one or more insns that is part of the epilogue, to
7320   no modifications to the stack pointer.  Return the new list of insns.  */
7321
7322static rtx
7323keep_stack_depressed (insns)
7324     rtx insns;
7325{
7326  int j;
7327  struct epi_info info;
7328  rtx insn, next;
7329
7330  /* If the epilogue is just a single instruction, it ust be OK as is.  */
7331
7332  if (NEXT_INSN (insns) == NULL_RTX)
7333    return insns;
7334
7335  /* Otherwise, start a sequence, initialize the information we have, and
7336     process all the insns we were given.  */
7337  start_sequence ();
7338
7339  info.sp_equiv_reg = stack_pointer_rtx;
7340  info.sp_offset = 0;
7341  info.equiv_reg_src = 0;
7342
7343  insn = insns;
7344  next = NULL_RTX;
7345  while (insn != NULL_RTX)
7346    {
7347      next = NEXT_INSN (insn);
7348
7349      if (!INSN_P (insn))
7350	{
7351	  add_insn (insn);
7352	  insn = next;
7353	  continue;
7354	}
7355
7356      /* If this insn references the register that SP is equivalent to and
7357	 we have a pending load to that register, we must force out the load
7358	 first and then indicate we no longer know what SP's equivalent is.  */
7359      if (info.equiv_reg_src != 0
7360	  && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7361	{
7362	  emit_equiv_load (&info);
7363	  info.sp_equiv_reg = 0;
7364	}
7365
7366      info.new_sp_equiv_reg = info.sp_equiv_reg;
7367      info.new_sp_offset = info.sp_offset;
7368
7369      /* If this is a (RETURN) and the return address is on the stack,
7370	 update the address and change to an indirect jump.  */
7371      if (GET_CODE (PATTERN (insn)) == RETURN
7372	  || (GET_CODE (PATTERN (insn)) == PARALLEL
7373	      && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
7374	{
7375	  rtx retaddr = INCOMING_RETURN_ADDR_RTX;
7376	  rtx base = 0;
7377	  HOST_WIDE_INT offset = 0;
7378	  rtx jump_insn, jump_set;
7379
7380	  /* If the return address is in a register, we can emit the insn
7381	     unchanged.  Otherwise, it must be a MEM and we see what the
7382	     base register and offset are.  In any case, we have to emit any
7383	     pending load to the equivalent reg of SP, if any.  */
7384	  if (GET_CODE (retaddr) == REG)
7385	    {
7386	      emit_equiv_load (&info);
7387	      add_insn (insn);
7388	      insn = next;
7389	      continue;
7390	    }
7391	  else if (GET_CODE (retaddr) == MEM
7392		   && GET_CODE (XEXP (retaddr, 0)) == REG)
7393	    base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
7394	  else if (GET_CODE (retaddr) == MEM
7395		   && GET_CODE (XEXP (retaddr, 0)) == PLUS
7396		   && GET_CODE (XEXP (XEXP (retaddr, 0), 0)) == REG
7397		   && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
7398	    {
7399	      base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
7400	      offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
7401	    }
7402	  else
7403	    abort ();
7404
7405	  /* If the base of the location containing the return pointer
7406	     is SP, we must update it with the replacement address.  Otherwise,
7407	     just build the necessary MEM.  */
7408	  retaddr = plus_constant (base, offset);
7409	  if (base == stack_pointer_rtx)
7410	    retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
7411					    plus_constant (info.sp_equiv_reg,
7412							   info.sp_offset));
7413
7414	  retaddr = gen_rtx_MEM (Pmode, retaddr);
7415
7416	  /* If there is a pending load to the equivalent register for SP
7417	     and we reference that register, we must load our address into
7418	     a scratch register and then do that load.  */
7419	  if (info.equiv_reg_src
7420	      && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
7421	    {
7422	      unsigned int regno;
7423	      rtx reg;
7424
7425	      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7426		if (HARD_REGNO_MODE_OK (regno, Pmode)
7427		    && !fixed_regs[regno]
7428		    && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
7429		    && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
7430					 regno)
7431		    && !refers_to_regno_p (regno,
7432					   regno + HARD_REGNO_NREGS (regno,
7433								     Pmode),
7434					   info.equiv_reg_src, NULL))
7435		  break;
7436
7437	      if (regno == FIRST_PSEUDO_REGISTER)
7438		abort ();
7439
7440	      reg = gen_rtx_REG (Pmode, regno);
7441	      emit_move_insn (reg, retaddr);
7442	      retaddr = reg;
7443	    }
7444
7445	  emit_equiv_load (&info);
7446	  jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
7447
7448	  /* Show the SET in the above insn is a RETURN.  */
7449	  jump_set = single_set (jump_insn);
7450	  if (jump_set == 0)
7451	    abort ();
7452	  else
7453	    SET_IS_RETURN_P (jump_set) = 1;
7454	}
7455
7456      /* If SP is not mentioned in the pattern and its equivalent register, if
7457	 any, is not modified, just emit it.  Otherwise, if neither is set,
7458	 replace the reference to SP and emit the insn.  If none of those are
7459	 true, handle each SET individually.  */
7460      else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
7461	       && (info.sp_equiv_reg == stack_pointer_rtx
7462		   || !reg_set_p (info.sp_equiv_reg, insn)))
7463	add_insn (insn);
7464      else if (! reg_set_p (stack_pointer_rtx, insn)
7465	       && (info.sp_equiv_reg == stack_pointer_rtx
7466		   || !reg_set_p (info.sp_equiv_reg, insn)))
7467	{
7468	  if (! validate_replace_rtx (stack_pointer_rtx,
7469				      plus_constant (info.sp_equiv_reg,
7470						     info.sp_offset),
7471				      insn))
7472	    abort ();
7473
7474	  add_insn (insn);
7475	}
7476      else if (GET_CODE (PATTERN (insn)) == SET)
7477	handle_epilogue_set (PATTERN (insn), &info);
7478      else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7479	{
7480	  for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
7481	    if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
7482	      handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
7483	}
7484      else
7485	add_insn (insn);
7486
7487      info.sp_equiv_reg = info.new_sp_equiv_reg;
7488      info.sp_offset = info.new_sp_offset;
7489
7490      insn = next;
7491    }
7492
7493  insns = get_insns ();
7494  end_sequence ();
7495  return insns;
7496}
7497
7498/* SET is a SET from an insn in the epilogue.  P is a pointer to the epi_info
7499   structure that contains information about what we've seen so far.  We
7500   process this SET by either updating that data or by emitting one or
7501   more insns.  */
7502
7503static void
7504handle_epilogue_set (set, p)
7505     rtx set;
7506     struct epi_info *p;
7507{
7508  /* First handle the case where we are setting SP.  Record what it is being
7509     set from.  If unknown, abort.  */
7510  if (reg_set_p (stack_pointer_rtx, set))
7511    {
7512      if (SET_DEST (set) != stack_pointer_rtx)
7513	abort ();
7514
7515      if (GET_CODE (SET_SRC (set)) == PLUS
7516	  && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
7517	{
7518	  p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
7519	  p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
7520	}
7521      else
7522	p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
7523
7524      /* If we are adjusting SP, we adjust from the old data.  */
7525      if (p->new_sp_equiv_reg == stack_pointer_rtx)
7526	{
7527	  p->new_sp_equiv_reg = p->sp_equiv_reg;
7528	  p->new_sp_offset += p->sp_offset;
7529	}
7530
7531      if (p->new_sp_equiv_reg == 0 || GET_CODE (p->new_sp_equiv_reg) != REG)
7532	abort ();
7533
7534      return;
7535    }
7536
7537  /* Next handle the case where we are setting SP's equivalent register.
7538     If we already have a value to set it to, abort.  We could update, but
7539     there seems little point in handling that case.  Note that we have
7540     to allow for the case where we are setting the register set in
7541     the previous part of a PARALLEL inside a single insn.  But use the
7542     old offset for any updates within this insn.  */
7543  else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
7544    {
7545      if (!rtx_equal_p (p->new_sp_equiv_reg, SET_DEST (set))
7546	  || p->equiv_reg_src != 0)
7547	abort ();
7548      else
7549	p->equiv_reg_src
7550	  = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7551				  plus_constant (p->sp_equiv_reg,
7552						 p->sp_offset));
7553    }
7554
7555  /* Otherwise, replace any references to SP in the insn to its new value
7556     and emit the insn.  */
7557  else
7558    {
7559      SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7560					    plus_constant (p->sp_equiv_reg,
7561							   p->sp_offset));
7562      SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
7563					     plus_constant (p->sp_equiv_reg,
7564							    p->sp_offset));
7565      emit_insn (set);
7566    }
7567}
7568
7569/* Emit an insn to do the load shown in p->equiv_reg_src, if needed.  */
7570
7571static void
7572emit_equiv_load (p)
7573     struct epi_info *p;
7574{
7575  if (p->equiv_reg_src != 0)
7576    emit_move_insn (p->sp_equiv_reg, p->equiv_reg_src);
7577
7578  p->equiv_reg_src = 0;
7579}
7580#endif
7581
7582/* Generate the prologue and epilogue RTL if the machine supports it.  Thread
7583   this into place with notes indicating where the prologue ends and where
7584   the epilogue begins.  Update the basic block information when possible.  */
7585
7586void
7587thread_prologue_and_epilogue_insns (f)
7588     rtx f ATTRIBUTE_UNUSED;
7589{
7590  int inserted = 0;
7591  edge e;
7592#if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
7593  rtx seq;
7594#endif
7595#ifdef HAVE_prologue
7596  rtx prologue_end = NULL_RTX;
7597#endif
7598#if defined (HAVE_epilogue) || defined(HAVE_return)
7599  rtx epilogue_end = NULL_RTX;
7600#endif
7601
7602#ifdef HAVE_prologue
7603  if (HAVE_prologue)
7604    {
7605      start_sequence ();
7606      seq = gen_prologue ();
7607      emit_insn (seq);
7608
7609      /* Retain a map of the prologue insns.  */
7610      record_insns (seq, &prologue);
7611      prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7612
7613      seq = get_insns ();
7614      end_sequence ();
7615
7616      /* Can't deal with multiple successors of the entry block
7617         at the moment.  Function should always have at least one
7618         entry point.  */
7619      if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
7620	abort ();
7621
7622      insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7623      inserted = 1;
7624    }
7625#endif
7626
7627  /* If the exit block has no non-fake predecessors, we don't need
7628     an epilogue.  */
7629  for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7630    if ((e->flags & EDGE_FAKE) == 0)
7631      break;
7632  if (e == NULL)
7633    goto epilogue_done;
7634
7635#ifdef HAVE_return
7636  if (optimize && HAVE_return)
7637    {
7638      /* If we're allowed to generate a simple return instruction,
7639	 then by definition we don't need a full epilogue.  Examine
7640	 the block that falls through to EXIT.   If it does not
7641	 contain any code, examine its predecessors and try to
7642	 emit (conditional) return instructions.  */
7643
7644      basic_block last;
7645      edge e_next;
7646      rtx label;
7647
7648      for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7649	if (e->flags & EDGE_FALLTHRU)
7650	  break;
7651      if (e == NULL)
7652	goto epilogue_done;
7653      last = e->src;
7654
7655      /* Verify that there are no active instructions in the last block.  */
7656      label = last->end;
7657      while (label && GET_CODE (label) != CODE_LABEL)
7658	{
7659	  if (active_insn_p (label))
7660	    break;
7661	  label = PREV_INSN (label);
7662	}
7663
7664      if (last->head == label && GET_CODE (label) == CODE_LABEL)
7665	{
7666	  rtx epilogue_line_note = NULL_RTX;
7667
7668	  /* Locate the line number associated with the closing brace,
7669	     if we can find one.  */
7670	  for (seq = get_last_insn ();
7671	       seq && ! active_insn_p (seq);
7672	       seq = PREV_INSN (seq))
7673	    if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7674	      {
7675		epilogue_line_note = seq;
7676		break;
7677	      }
7678
7679	  for (e = last->pred; e; e = e_next)
7680	    {
7681	      basic_block bb = e->src;
7682	      rtx jump;
7683
7684	      e_next = e->pred_next;
7685	      if (bb == ENTRY_BLOCK_PTR)
7686		continue;
7687
7688	      jump = bb->end;
7689	      if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7690		continue;
7691
7692	      /* If we have an unconditional jump, we can replace that
7693		 with a simple return instruction.  */
7694	      if (simplejump_p (jump))
7695		{
7696		  emit_return_into_block (bb, epilogue_line_note);
7697		  delete_insn (jump);
7698		}
7699
7700	      /* If we have a conditional jump, we can try to replace
7701		 that with a conditional return instruction.  */
7702	      else if (condjump_p (jump))
7703		{
7704		  if (! redirect_jump (jump, 0, 0))
7705		    continue;
7706
7707		  /* If this block has only one successor, it both jumps
7708		     and falls through to the fallthru block, so we can't
7709		     delete the edge.  */
7710		  if (bb->succ->succ_next == NULL)
7711		    continue;
7712		}
7713	      else
7714		continue;
7715
7716	      /* Fix up the CFG for the successful change we just made.  */
7717	      redirect_edge_succ (e, EXIT_BLOCK_PTR);
7718	    }
7719
7720	  /* Emit a return insn for the exit fallthru block.  Whether
7721	     this is still reachable will be determined later.  */
7722
7723	  emit_barrier_after (last->end);
7724	  emit_return_into_block (last, epilogue_line_note);
7725	  epilogue_end = last->end;
7726	  last->succ->flags &= ~EDGE_FALLTHRU;
7727	  goto epilogue_done;
7728	}
7729    }
7730#endif
7731#ifdef HAVE_epilogue
7732  if (HAVE_epilogue)
7733    {
7734      /* Find the edge that falls through to EXIT.  Other edges may exist
7735	 due to RETURN instructions, but those don't need epilogues.
7736	 There really shouldn't be a mixture -- either all should have
7737	 been converted or none, however...  */
7738
7739      for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7740	if (e->flags & EDGE_FALLTHRU)
7741	  break;
7742      if (e == NULL)
7743	goto epilogue_done;
7744
7745      start_sequence ();
7746      epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7747
7748      seq = gen_epilogue ();
7749
7750#ifdef INCOMING_RETURN_ADDR_RTX
7751      /* If this function returns with the stack depressed and we can support
7752	 it, massage the epilogue to actually do that.  */
7753      if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7754	  && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7755	seq = keep_stack_depressed (seq);
7756#endif
7757
7758      emit_jump_insn (seq);
7759
7760      /* Retain a map of the epilogue insns.  */
7761      record_insns (seq, &epilogue);
7762
7763      seq = get_insns ();
7764      end_sequence ();
7765
7766      insert_insn_on_edge (seq, e);
7767      inserted = 1;
7768    }
7769#endif
7770epilogue_done:
7771
7772  if (inserted)
7773    commit_edge_insertions ();
7774
7775#ifdef HAVE_sibcall_epilogue
7776  /* Emit sibling epilogues before any sibling call sites.  */
7777  for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7778    {
7779      basic_block bb = e->src;
7780      rtx insn = bb->end;
7781      rtx i;
7782      rtx newinsn;
7783
7784      if (GET_CODE (insn) != CALL_INSN
7785	  || ! SIBLING_CALL_P (insn))
7786	continue;
7787
7788      start_sequence ();
7789      emit_insn (gen_sibcall_epilogue ());
7790      seq = get_insns ();
7791      end_sequence ();
7792
7793      /* Retain a map of the epilogue insns.  Used in life analysis to
7794	 avoid getting rid of sibcall epilogue insns.  Do this before we
7795	 actually emit the sequence.  */
7796      record_insns (seq, &sibcall_epilogue);
7797
7798      i = PREV_INSN (insn);
7799      newinsn = emit_insn_before (seq, insn);
7800    }
7801#endif
7802
7803#ifdef HAVE_prologue
7804  if (prologue_end)
7805    {
7806      rtx insn, prev;
7807
7808      /* GDB handles `break f' by setting a breakpoint on the first
7809	 line note after the prologue.  Which means (1) that if
7810	 there are line number notes before where we inserted the
7811	 prologue we should move them, and (2) we should generate a
7812	 note before the end of the first basic block, if there isn't
7813	 one already there.
7814
7815	 ??? This behavior is completely broken when dealing with
7816	 multiple entry functions.  We simply place the note always
7817	 into first basic block and let alternate entry points
7818	 to be missed.
7819       */
7820
7821      for (insn = prologue_end; insn; insn = prev)
7822	{
7823	  prev = PREV_INSN (insn);
7824	  if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7825	    {
7826	      /* Note that we cannot reorder the first insn in the
7827		 chain, since rest_of_compilation relies on that
7828		 remaining constant.  */
7829	      if (prev == NULL)
7830		break;
7831	      reorder_insns (insn, insn, prologue_end);
7832	    }
7833	}
7834
7835      /* Find the last line number note in the first block.  */
7836      for (insn = ENTRY_BLOCK_PTR->next_bb->end;
7837	   insn != prologue_end && insn;
7838	   insn = PREV_INSN (insn))
7839	if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7840	  break;
7841
7842      /* If we didn't find one, make a copy of the first line number
7843	 we run across.  */
7844      if (! insn)
7845	{
7846	  for (insn = next_active_insn (prologue_end);
7847	       insn;
7848	       insn = PREV_INSN (insn))
7849	    if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7850	      {
7851		emit_line_note_after (NOTE_SOURCE_FILE (insn),
7852				      NOTE_LINE_NUMBER (insn),
7853				      prologue_end);
7854		break;
7855	      }
7856	}
7857    }
7858#endif
7859#ifdef HAVE_epilogue
7860  if (epilogue_end)
7861    {
7862      rtx insn, next;
7863
7864      /* Similarly, move any line notes that appear after the epilogue.
7865         There is no need, however, to be quite so anal about the existence
7866	 of such a note.  */
7867      for (insn = epilogue_end; insn; insn = next)
7868	{
7869	  next = NEXT_INSN (insn);
7870	  if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7871	    reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7872	}
7873    }
7874#endif
7875}
7876
7877/* Reposition the prologue-end and epilogue-begin notes after instruction
7878   scheduling and delayed branch scheduling.  */
7879
7880void
7881reposition_prologue_and_epilogue_notes (f)
7882     rtx f ATTRIBUTE_UNUSED;
7883{
7884#if defined (HAVE_prologue) || defined (HAVE_epilogue)
7885  rtx insn, last, note;
7886  int len;
7887
7888  if ((len = VARRAY_SIZE (prologue)) > 0)
7889    {
7890      last = 0, note = 0;
7891
7892      /* Scan from the beginning until we reach the last prologue insn.
7893	 We apparently can't depend on basic_block_{head,end} after
7894	 reorg has run.  */
7895      for (insn = f; insn; insn = NEXT_INSN (insn))
7896	{
7897	  if (GET_CODE (insn) == NOTE)
7898	    {
7899	      if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7900		note = insn;
7901	    }
7902	  else if (contains (insn, prologue))
7903	    {
7904	      last = insn;
7905	      if (--len == 0)
7906		break;
7907	    }
7908	}
7909
7910      if (last)
7911	{
7912	  rtx next;
7913
7914	  /* Find the prologue-end note if we haven't already, and
7915	     move it to just after the last prologue insn.  */
7916	  if (note == 0)
7917	    {
7918	      for (note = last; (note = NEXT_INSN (note));)
7919		if (GET_CODE (note) == NOTE
7920		    && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7921		  break;
7922	    }
7923
7924	  next = NEXT_INSN (note);
7925
7926	  /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note.  */
7927	  if (GET_CODE (last) == CODE_LABEL)
7928	    last = NEXT_INSN (last);
7929	  reorder_insns (note, note, last);
7930	}
7931    }
7932
7933  if ((len = VARRAY_SIZE (epilogue)) > 0)
7934    {
7935      last = 0, note = 0;
7936
7937      /* Scan from the end until we reach the first epilogue insn.
7938	 We apparently can't depend on basic_block_{head,end} after
7939	 reorg has run.  */
7940      for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
7941	{
7942	  if (GET_CODE (insn) == NOTE)
7943	    {
7944	      if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7945		note = insn;
7946	    }
7947	  else if (contains (insn, epilogue))
7948	    {
7949	      last = insn;
7950	      if (--len == 0)
7951		break;
7952	    }
7953	}
7954
7955      if (last)
7956	{
7957	  /* Find the epilogue-begin note if we haven't already, and
7958	     move it to just before the first epilogue insn.  */
7959	  if (note == 0)
7960	    {
7961	      for (note = insn; (note = PREV_INSN (note));)
7962		if (GET_CODE (note) == NOTE
7963		    && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7964		  break;
7965	    }
7966
7967	  if (PREV_INSN (last) != note)
7968	    reorder_insns (note, note, PREV_INSN (last));
7969	}
7970    }
7971#endif /* HAVE_prologue or HAVE_epilogue */
7972}
7973
7974/* Called once, at initialization, to initialize function.c.  */
7975
7976void
7977init_function_once ()
7978{
7979  VARRAY_INT_INIT (prologue, 0, "prologue");
7980  VARRAY_INT_INIT (epilogue, 0, "epilogue");
7981  VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
7982}
7983
7984#include "gt-function.h"
7985