1/* Expands front end tree to back end RTL for GNU C-Compiler
2   Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3   1998, 1999, 2000, 2001 Free Software Foundation, Inc.
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING.  If not, write to
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA.  */
21
22
23/* This file handles the generation of rtl code from tree structure
24   at the level of the function as a whole.
25   It creates the rtl expressions for parameters and auto variables
26   and has full responsibility for allocating stack slots.
27
28   `expand_function_start' is called at the beginning of a function,
29   before the function body is parsed, and `expand_function_end' is
30   called after parsing the body.
31
32   Call `assign_stack_local' to allocate a stack slot for a local variable.
33   This is usually done during the RTL generation for the function body,
34   but it can also be done in the reload pass when a pseudo-register does
35   not get a hard register.
36
37   Call `put_var_into_stack' when you learn, belatedly, that a variable
38   previously given a pseudo-register must in fact go in the stack.
39   This function changes the DECL_RTL to be a stack slot instead of a reg
40   then scans all the RTL instructions so far generated to correct them.  */
41
42#include "config.h"
43#include "system.h"
44#include "rtl.h"
45#include "tree.h"
46#include "flags.h"
47#include "except.h"
48#include "function.h"
49#include "insn-flags.h"
50#include "expr.h"
51#include "insn-codes.h"
52#include "regs.h"
53#include "hard-reg-set.h"
54#include "insn-config.h"
55#include "recog.h"
56#include "output.h"
57#include "basic-block.h"
58#include "obstack.h"
59#include "toplev.h"
60#include "hash.h"
61
62#ifndef TRAMPOLINE_ALIGNMENT
63#define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
64#endif
65
66#ifndef LOCAL_ALIGNMENT
67#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
68#endif
69
70/* Some systems use __main in a way incompatible with its use in gcc, in these
71   cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
72   give the same symbol without quotes for an alternative entry point.  You
73   must define both, or neither.  */
74#ifndef NAME__MAIN
75#define NAME__MAIN "__main"
76#define SYMBOL__MAIN __main
77#endif
78
79/* Round a value to the lowest integer less than it that is a multiple of
80   the required alignment.  Avoid using division in case the value is
81   negative.  Assume the alignment is a power of two.  */
82#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
83
84/* Similar, but round to the next highest integer that meets the
85   alignment.  */
86#define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
87
88/* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
89   during rtl generation.  If they are different register numbers, this is
90   always true.  It may also be true if
91   FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
92   generation.  See fix_lexical_addr for details.  */
93
94#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
95#define NEED_SEPARATE_AP
96#endif
97
98/* Number of bytes of args popped by function being compiled on its return.
99   Zero if no bytes are to be popped.
100   May affect compilation of return insn or of function epilogue.  */
101
102int current_function_pops_args;
103
104/* Nonzero if function being compiled needs to be given an address
105   where the value should be stored.  */
106
107int current_function_returns_struct;
108
109/* Nonzero if function being compiled needs to
110   return the address of where it has put a structure value.  */
111
112int current_function_returns_pcc_struct;
113
114/* Nonzero if function being compiled needs to be passed a static chain.  */
115
116int current_function_needs_context;
117
118/* Nonzero if function being compiled can call setjmp.  */
119
120int current_function_calls_setjmp;
121
122/* Nonzero if function being compiled can call longjmp.  */
123
124int current_function_calls_longjmp;
125
126/* Nonzero if function being compiled receives nonlocal gotos
127   from nested functions.  */
128
129int current_function_has_nonlocal_label;
130
131/* Nonzero if function being compiled has nonlocal gotos to parent
132   function.  */
133
134int current_function_has_nonlocal_goto;
135
136/* Nonzero if function being compiled contains nested functions.  */
137
138int current_function_contains_functions;
139
140/* Nonzero if function being compiled doesn't contain any calls
141   (ignoring the prologue and epilogue).  This is set prior to
142   local register allocation and is valid for the remaining
143   compiler passes. */
144
145int current_function_is_leaf;
146
147/* Nonzero if function being compiled doesn't modify the stack pointer
148   (ignoring the prologue and epilogue).  This is only valid after
149   life_analysis has run. */
150
151int current_function_sp_is_unchanging;
152
153/* Nonzero if the function being compiled is a leaf function which only
154   uses leaf registers.  This is valid after reload (specifically after
155   sched2) and is useful only if the port defines LEAF_REGISTERS.  */
156
157int current_function_uses_only_leaf_regs;
158
159/* Nonzero if the function being compiled issues a computed jump.  */
160
161int current_function_has_computed_jump;
162
163/* Nonzero if the current function is a thunk (a lightweight function that
164   just adjusts one of its arguments and forwards to another function), so
165   we should try to cut corners where we can.  */
166int current_function_is_thunk;
167
168/* Nonzero if function being compiled can call alloca,
169   either as a subroutine or builtin.  */
170
171int current_function_calls_alloca;
172
173/* Nonzero if the current function returns a pointer type */
174
175int current_function_returns_pointer;
176
177/* If some insns can be deferred to the delay slots of the epilogue, the
178   delay list for them is recorded here.  */
179
180rtx current_function_epilogue_delay_list;
181
182/* If function's args have a fixed size, this is that size, in bytes.
183   Otherwise, it is -1.
184   May affect compilation of return insn or of function epilogue.  */
185
186int current_function_args_size;
187
188/* # bytes the prologue should push and pretend that the caller pushed them.
189   The prologue must do this, but only if parms can be passed in registers.  */
190
191int current_function_pretend_args_size;
192
193/* # of bytes of outgoing arguments.  If ACCUMULATE_OUTGOING_ARGS is
194   defined, the needed space is pushed by the prologue.  */
195
196int current_function_outgoing_args_size;
197
198/* This is the offset from the arg pointer to the place where the first
199   anonymous arg can be found, if there is one.  */
200
201rtx current_function_arg_offset_rtx;
202
203/* Nonzero if current function uses varargs.h or equivalent.
204   Zero for functions that use stdarg.h.  */
205
206int current_function_varargs;
207
208/* Nonzero if current function uses stdarg.h or equivalent.
209   Zero for functions that use varargs.h.  */
210
211int current_function_stdarg;
212
213/* Quantities of various kinds of registers
214   used for the current function's args.  */
215
216CUMULATIVE_ARGS current_function_args_info;
217
218/* Name of function now being compiled.  */
219
220char *current_function_name;
221
222/* If non-zero, an RTL expression for the location at which the current
223   function returns its result.  If the current function returns its
224   result in a register, current_function_return_rtx will always be
225   the hard register containing the result.  */
226
227rtx current_function_return_rtx;
228
229/* Nonzero if the current function uses the constant pool.  */
230
231int current_function_uses_const_pool;
232
233/* Nonzero if the current function uses pic_offset_table_rtx.  */
234int current_function_uses_pic_offset_table;
235
236/* The arg pointer hard register, or the pseudo into which it was copied.  */
237rtx current_function_internal_arg_pointer;
238
239/* Language-specific reason why the current function cannot be made inline.  */
240char *current_function_cannot_inline;
241
242/* Nonzero if instrumentation calls for function entry and exit should be
243   generated.  */
244int current_function_instrument_entry_exit;
245
246/* Nonzero if memory access checking be enabled in the current function.  */
247int current_function_check_memory_usage;
248
249/* The FUNCTION_DECL for an inline function currently being expanded.  */
250tree inline_function_decl;
251
252/* Number of function calls seen so far in current function.  */
253
254int function_call_count;
255
256/* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
257   (labels to which there can be nonlocal gotos from nested functions)
258   in this function.  */
259
260tree nonlocal_labels;
261
262/* List (chain of EXPR_LIST) of stack slots that hold the current handlers
263   for nonlocal gotos.  There is one for every nonlocal label in the function;
264   this list matches the one in nonlocal_labels.
265   Zero when function does not have nonlocal labels.  */
266
267rtx nonlocal_goto_handler_slots;
268
269/* List (chain of EXPR_LIST) of labels heading the current handlers for
270   nonlocal gotos.  */
271
272rtx nonlocal_goto_handler_labels;
273
274/* RTX for stack slot that holds the stack pointer value to restore
275   for a nonlocal goto.
276   Zero when function does not have nonlocal labels.  */
277
278rtx nonlocal_goto_stack_level;
279
280/* Label that will go on parm cleanup code, if any.
281   Jumping to this label runs cleanup code for parameters, if
282   such code must be run.  Following this code is the logical return label.  */
283
284rtx cleanup_label;
285
286/* Label that will go on function epilogue.
287   Jumping to this label serves as a "return" instruction
288   on machines which require execution of the epilogue on all returns.  */
289
290rtx return_label;
291
292/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
293   So we can mark them all live at the end of the function, if nonopt.  */
294rtx save_expr_regs;
295
296/* List (chain of EXPR_LISTs) of all stack slots in this function.
297   Made for the sake of unshare_all_rtl.  */
298rtx stack_slot_list;
299
300/* Chain of all RTL_EXPRs that have insns in them.  */
301tree rtl_expr_chain;
302
303/* Label to jump back to for tail recursion, or 0 if we have
304   not yet needed one for this function.  */
305rtx tail_recursion_label;
306
307/* Place after which to insert the tail_recursion_label if we need one.  */
308rtx tail_recursion_reentry;
309
310/* Location at which to save the argument pointer if it will need to be
311   referenced.  There are two cases where this is done: if nonlocal gotos
312   exist, or if vars stored at an offset from the argument pointer will be
313   needed by inner routines.  */
314
315rtx arg_pointer_save_area;
316
317/* Offset to end of allocated area of stack frame.
318   If stack grows down, this is the address of the last stack slot allocated.
319   If stack grows up, this is the address for the next slot.  */
320HOST_WIDE_INT frame_offset;
321
322/* List (chain of TREE_LISTs) of static chains for containing functions.
323   Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
324   in an RTL_EXPR in the TREE_VALUE.  */
325static tree context_display;
326
327/* List (chain of TREE_LISTs) of trampolines for nested functions.
328   The trampoline sets up the static chain and jumps to the function.
329   We supply the trampoline's address when the function's address is requested.
330
331   Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
332   in an RTL_EXPR in the TREE_VALUE.  */
333static tree trampoline_list;
334
335/* Insn after which register parms and SAVE_EXPRs are born, if nonopt.  */
336static rtx parm_birth_insn;
337
338#if 0
339/* Nonzero if a stack slot has been generated whose address is not
340   actually valid.  It means that the generated rtl must all be scanned
341   to detect and correct the invalid addresses where they occur.  */
342static int invalid_stack_slot;
343#endif
344
345/* Last insn of those whose job was to put parms into their nominal homes.  */
346static rtx last_parm_insn;
347
348/* 1 + last pseudo register number possibly used for loading a copy
349   of a parameter of this function. */
350int max_parm_reg;
351
352/* Vector indexed by REGNO, containing location on stack in which
353   to put the parm which is nominally in pseudo register REGNO,
354   if we discover that that parm must go in the stack.  The highest
355   element in this vector is one less than MAX_PARM_REG, above.  */
356rtx *parm_reg_stack_loc;
357
358/* Nonzero once virtual register instantiation has been done.
359   assign_stack_local uses frame_pointer_rtx when this is nonzero.  */
360static int virtuals_instantiated;
361
362/* These variables hold pointers to functions to
363   save and restore machine-specific data,
364   in push_function_context and pop_function_context.  */
365void (*save_machine_status) PROTO((struct function *));
366void (*restore_machine_status) PROTO((struct function *));
367
368/* Nonzero if we need to distinguish between the return value of this function
369   and the return value of a function called by this function.  This helps
370   integrate.c  */
371
372extern int rtx_equal_function_value_matters;
373extern tree sequence_rtl_expr;
374
375/* In order to evaluate some expressions, such as function calls returning
376   structures in memory, we need to temporarily allocate stack locations.
377   We record each allocated temporary in the following structure.
378
379   Associated with each temporary slot is a nesting level.  When we pop up
380   one level, all temporaries associated with the previous level are freed.
381   Normally, all temporaries are freed after the execution of the statement
382   in which they were created.  However, if we are inside a ({...}) grouping,
383   the result may be in a temporary and hence must be preserved.  If the
384   result could be in a temporary, we preserve it if we can determine which
385   one it is in.  If we cannot determine which temporary may contain the
386   result, all temporaries are preserved.  A temporary is preserved by
387   pretending it was allocated at the previous nesting level.
388
389   Automatic variables are also assigned temporary slots, at the nesting
390   level where they are defined.  They are marked a "kept" so that
391   free_temp_slots will not free them.  */
392
393struct temp_slot
394{
395  /* Points to next temporary slot.  */
396  struct temp_slot *next;
397  /* The rtx to used to reference the slot.  */
398  rtx slot;
399  /* The rtx used to represent the address if not the address of the
400     slot above.  May be an EXPR_LIST if multiple addresses exist.  */
401  rtx address;
402  /* The alignment (in bits) of the slot. */
403  int align;
404  /* The size, in units, of the slot.  */
405  HOST_WIDE_INT size;
406  /* The alias set for the slot.  If the alias set is zero, we don't
407     know anything about the alias set of the slot.  We must only
408     reuse a slot if it is assigned an object of the same alias set.
409     Otherwise, the rest of the compiler may assume that the new use
410     of the slot cannot alias the old use of the slot, which is
411     false.  If the slot has alias set zero, then we can't reuse the
412     slot at all, since we have no idea what alias set may have been
413     imposed on the memory.  For example, if the stack slot is the
414     call frame for an inline functioned, we have no idea what alias
415     sets will be assigned to various pieces of the call frame.  */
416  int alias_set;
417  /* The value of `sequence_rtl_expr' when this temporary is allocated.  */
418  tree rtl_expr;
419  /* Non-zero if this temporary is currently in use.  */
420  char in_use;
421  /* Non-zero if this temporary has its address taken.  */
422  char addr_taken;
423  /* Nesting level at which this slot is being used.  */
424  int level;
425  /* Non-zero if this should survive a call to free_temp_slots.  */
426  int keep;
427  /* The offset of the slot from the frame_pointer, including extra space
428     for alignment.  This info is for combine_temp_slots.  */
429  HOST_WIDE_INT base_offset;
430  /* The size of the slot, including extra space for alignment.  This
431     info is for combine_temp_slots.  */
432  HOST_WIDE_INT full_size;
433};
434
435/* List of all temporaries allocated, both available and in use.  */
436
437struct temp_slot *temp_slots;
438
439/* Current nesting level for temporaries.  */
440
441int temp_slot_level;
442
443/* Current nesting level for variables in a block.  */
444
445int var_temp_slot_level;
446
447/* When temporaries are created by TARGET_EXPRs, they are created at
448   this level of temp_slot_level, so that they can remain allocated
449   until no longer needed.  CLEANUP_POINT_EXPRs define the lifetime
450   of TARGET_EXPRs.  */
451int target_temp_slot_level;
452
453/* This structure is used to record MEMs or pseudos used to replace VAR, any
454   SUBREGs of VAR, and any MEMs containing VAR as an address.  We need to
455   maintain this list in case two operands of an insn were required to match;
456   in that case we must ensure we use the same replacement.  */
457
458struct fixup_replacement
459{
460  rtx old;
461  rtx new;
462  struct fixup_replacement *next;
463};
464
465struct insns_for_mem_entry {
466  /* The KEY in HE will be a MEM.  */
467  struct hash_entry he;
468  /* These are the INSNS which reference the MEM.  */
469  rtx insns;
470};
471
472/* Forward declarations.  */
473
474static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
475					    int, struct function *));
476static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
477					      int, tree));
478static struct temp_slot *find_temp_slot_from_address  PROTO((rtx));
479static void put_reg_into_stack	PROTO((struct function *, rtx, tree,
480				       enum machine_mode, enum machine_mode,
481				       int, int, int,
482				       struct hash_table *));
483static void fixup_var_refs	PROTO((rtx, enum machine_mode, int,
484				       struct hash_table *));
485static struct fixup_replacement
486  *find_fixup_replacement	PROTO((struct fixup_replacement **, rtx));
487static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
488					rtx, int, struct hash_table *));
489static void fixup_var_refs_1	PROTO((rtx, enum machine_mode, rtx *, rtx,
490				       struct fixup_replacement **));
491static rtx fixup_memory_subreg	PROTO((rtx, rtx, int));
492static rtx walk_fixup_memory_subreg  PROTO((rtx, rtx, int));
493static rtx fixup_stack_1	PROTO((rtx, rtx));
494static void optimize_bit_field	PROTO((rtx, rtx, rtx *));
495static void instantiate_decls	PROTO((tree, int));
496static void instantiate_decls_1	PROTO((tree, int));
497static void instantiate_decl	PROTO((rtx, int, int));
498static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
499static void delete_handlers	PROTO((void));
500static void pad_to_arg_alignment PROTO((struct args_size *, int));
501#ifndef ARGS_GROW_DOWNWARD
502static void pad_below		PROTO((struct args_size *, enum  machine_mode,
503				       tree));
504#endif
505#ifdef ARGS_GROW_DOWNWARD
506static tree round_down		PROTO((tree, int));
507#endif
508static rtx round_trampoline_addr PROTO((rtx));
509static tree blocks_nreverse	PROTO((tree));
510static int all_blocks		PROTO((tree, tree *));
511#if defined (HAVE_prologue) || defined (HAVE_epilogue)
512static int *record_insns	PROTO((rtx));
513static int contains		PROTO((rtx, int *));
514#endif /* HAVE_prologue || HAVE_epilogue */
515static void put_addressof_into_stack PROTO((rtx, struct hash_table *));
516static boolean purge_addressof_1 PROTO((rtx *, rtx, int, int,
517				       struct hash_table *));
518static int is_addressof		PROTO ((rtx *, void *));
519static struct hash_entry *insns_for_mem_newfunc PROTO((struct hash_entry *,
520						       struct hash_table *,
521						       hash_table_key));
522static unsigned long insns_for_mem_hash PROTO ((hash_table_key));
523static boolean insns_for_mem_comp PROTO ((hash_table_key, hash_table_key));
524static int insns_for_mem_walk   PROTO ((rtx *, void *));
525static void compute_insns_for_mem PROTO ((rtx, rtx, struct hash_table *));
526
527
528/* Pointer to chain of `struct function' for containing functions.  */
529struct function *outer_function_chain;
530
531/* Given a function decl for a containing function,
532   return the `struct function' for it.  */
533
534struct function *
535find_function_data (decl)
536     tree decl;
537{
538  struct function *p;
539
540  for (p = outer_function_chain; p; p = p->next)
541    if (p->decl == decl)
542      return p;
543
544  abort ();
545}
546
547/* Save the current context for compilation of a nested function.
548   This is called from language-specific code.
549   The caller is responsible for saving any language-specific status,
550   since this function knows only about language-independent variables.  */
551
552void
553push_function_context_to (context)
554     tree context;
555{
556  struct function *p = (struct function *) xmalloc (sizeof (struct function));
557
558  p->next = outer_function_chain;
559  outer_function_chain = p;
560
561  p->name = current_function_name;
562  p->decl = current_function_decl;
563  p->pops_args = current_function_pops_args;
564  p->returns_struct = current_function_returns_struct;
565  p->returns_pcc_struct = current_function_returns_pcc_struct;
566  p->returns_pointer = current_function_returns_pointer;
567  p->needs_context = current_function_needs_context;
568  p->calls_setjmp = current_function_calls_setjmp;
569  p->calls_longjmp = current_function_calls_longjmp;
570  p->calls_alloca = current_function_calls_alloca;
571  p->has_nonlocal_label = current_function_has_nonlocal_label;
572  p->has_nonlocal_goto = current_function_has_nonlocal_goto;
573  p->contains_functions = current_function_contains_functions;
574  p->has_computed_jump = current_function_has_computed_jump;
575  p->is_thunk = current_function_is_thunk;
576  p->args_size = current_function_args_size;
577  p->pretend_args_size = current_function_pretend_args_size;
578  p->arg_offset_rtx = current_function_arg_offset_rtx;
579  p->varargs = current_function_varargs;
580  p->stdarg = current_function_stdarg;
581  p->uses_const_pool = current_function_uses_const_pool;
582  p->uses_pic_offset_table = current_function_uses_pic_offset_table;
583  p->internal_arg_pointer = current_function_internal_arg_pointer;
584  p->cannot_inline = current_function_cannot_inline;
585  p->max_parm_reg = max_parm_reg;
586  p->parm_reg_stack_loc = parm_reg_stack_loc;
587  p->outgoing_args_size = current_function_outgoing_args_size;
588  p->return_rtx = current_function_return_rtx;
589  p->nonlocal_goto_handler_slots = nonlocal_goto_handler_slots;
590  p->nonlocal_goto_handler_labels = nonlocal_goto_handler_labels;
591  p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
592  p->nonlocal_labels = nonlocal_labels;
593  p->cleanup_label = cleanup_label;
594  p->return_label = return_label;
595  p->save_expr_regs = save_expr_regs;
596  p->stack_slot_list = stack_slot_list;
597  p->parm_birth_insn = parm_birth_insn;
598  p->frame_offset = frame_offset;
599  p->tail_recursion_label = tail_recursion_label;
600  p->tail_recursion_reentry = tail_recursion_reentry;
601  p->arg_pointer_save_area = arg_pointer_save_area;
602  p->rtl_expr_chain = rtl_expr_chain;
603  p->last_parm_insn = last_parm_insn;
604  p->context_display = context_display;
605  p->trampoline_list = trampoline_list;
606  p->function_call_count = function_call_count;
607  p->temp_slots = temp_slots;
608  p->temp_slot_level = temp_slot_level;
609  p->target_temp_slot_level = target_temp_slot_level;
610  p->var_temp_slot_level = var_temp_slot_level;
611  p->fixup_var_refs_queue = 0;
612  p->epilogue_delay_list = current_function_epilogue_delay_list;
613  p->args_info = current_function_args_info;
614  p->check_memory_usage = current_function_check_memory_usage;
615  p->instrument_entry_exit = current_function_instrument_entry_exit;
616
617  save_tree_status (p, context);
618  save_storage_status (p);
619  save_emit_status (p);
620  save_expr_status (p);
621  save_stmt_status (p);
622  save_varasm_status (p, context);
623  if (save_machine_status)
624    (*save_machine_status) (p);
625}
626
627void
628push_function_context ()
629{
630  push_function_context_to (current_function_decl);
631}
632
633/* Restore the last saved context, at the end of a nested function.
634   This function is called from language-specific code.  */
635
636void
637pop_function_context_from (context)
638     tree context;
639{
640  struct function *p = outer_function_chain;
641  struct var_refs_queue *queue;
642
643  outer_function_chain = p->next;
644
645  current_function_contains_functions
646    = p->contains_functions || p->inline_obstacks
647      || context == current_function_decl;
648  current_function_has_computed_jump = p->has_computed_jump;
649  current_function_name = p->name;
650  current_function_decl = p->decl;
651  current_function_pops_args = p->pops_args;
652  current_function_returns_struct = p->returns_struct;
653  current_function_returns_pcc_struct = p->returns_pcc_struct;
654  current_function_returns_pointer = p->returns_pointer;
655  current_function_needs_context = p->needs_context;
656  current_function_calls_setjmp = p->calls_setjmp;
657  current_function_calls_longjmp = p->calls_longjmp;
658  current_function_calls_alloca = p->calls_alloca;
659  current_function_has_nonlocal_label = p->has_nonlocal_label;
660  current_function_has_nonlocal_goto = p->has_nonlocal_goto;
661  current_function_is_thunk = p->is_thunk;
662  current_function_args_size = p->args_size;
663  current_function_pretend_args_size = p->pretend_args_size;
664  current_function_arg_offset_rtx = p->arg_offset_rtx;
665  current_function_varargs = p->varargs;
666  current_function_stdarg = p->stdarg;
667  current_function_uses_const_pool = p->uses_const_pool;
668  current_function_uses_pic_offset_table = p->uses_pic_offset_table;
669  current_function_internal_arg_pointer = p->internal_arg_pointer;
670  current_function_cannot_inline = p->cannot_inline;
671  max_parm_reg = p->max_parm_reg;
672  parm_reg_stack_loc = p->parm_reg_stack_loc;
673  current_function_outgoing_args_size = p->outgoing_args_size;
674  current_function_return_rtx = p->return_rtx;
675  nonlocal_goto_handler_slots = p->nonlocal_goto_handler_slots;
676  nonlocal_goto_handler_labels = p->nonlocal_goto_handler_labels;
677  nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
678  nonlocal_labels = p->nonlocal_labels;
679  cleanup_label = p->cleanup_label;
680  return_label = p->return_label;
681  save_expr_regs = p->save_expr_regs;
682  stack_slot_list = p->stack_slot_list;
683  parm_birth_insn = p->parm_birth_insn;
684  frame_offset = p->frame_offset;
685  tail_recursion_label = p->tail_recursion_label;
686  tail_recursion_reentry = p->tail_recursion_reentry;
687  arg_pointer_save_area = p->arg_pointer_save_area;
688  rtl_expr_chain = p->rtl_expr_chain;
689  last_parm_insn = p->last_parm_insn;
690  context_display = p->context_display;
691  trampoline_list = p->trampoline_list;
692  function_call_count = p->function_call_count;
693  temp_slots = p->temp_slots;
694  temp_slot_level = p->temp_slot_level;
695  target_temp_slot_level = p->target_temp_slot_level;
696  var_temp_slot_level = p->var_temp_slot_level;
697  current_function_epilogue_delay_list = p->epilogue_delay_list;
698  reg_renumber = 0;
699  current_function_args_info = p->args_info;
700  current_function_check_memory_usage = p->check_memory_usage;
701  current_function_instrument_entry_exit = p->instrument_entry_exit;
702
703  restore_tree_status (p, context);
704  restore_storage_status (p);
705  restore_expr_status (p);
706  restore_emit_status (p);
707  restore_stmt_status (p);
708  restore_varasm_status (p);
709
710  if (restore_machine_status)
711    (*restore_machine_status) (p);
712
713  /* Finish doing put_var_into_stack for any of our variables
714     which became addressable during the nested function.  */
715  for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
716    fixup_var_refs (queue->modified, queue->promoted_mode,
717		    queue->unsignedp, 0);
718
719  free (p);
720
721  /* Reset variables that have known state during rtx generation.  */
722  rtx_equal_function_value_matters = 1;
723  virtuals_instantiated = 0;
724}
725
726void pop_function_context ()
727{
728  pop_function_context_from (current_function_decl);
729}
730
731/* Allocate fixed slots in the stack frame of the current function.  */
732
733/* Return size needed for stack frame based on slots so far allocated.
734   This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
735   the caller may have to do that.  */
736
737HOST_WIDE_INT
738get_frame_size ()
739{
740#ifdef FRAME_GROWS_DOWNWARD
741  return -frame_offset;
742#else
743  return frame_offset;
744#endif
745}
746
747/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
748   with machine mode MODE.
749
750   ALIGN controls the amount of alignment for the address of the slot:
751   0 means according to MODE,
752   -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
753   positive specifies alignment boundary in bits.
754
755   We do not round to stack_boundary here.  */
756
757rtx
758assign_stack_local (mode, size, align)
759     enum machine_mode mode;
760     HOST_WIDE_INT size;
761     int align;
762{
763  register rtx x, addr;
764  int bigend_correction = 0;
765  int alignment;
766
767  if (align == 0)
768    {
769      tree type;
770
771      alignment = GET_MODE_ALIGNMENT (mode);
772      if (mode == BLKmode)
773	alignment = BIGGEST_ALIGNMENT;
774
775      /* Allow the target to (possibly) increase the alignment of this
776	 stack slot.  */
777      type = type_for_mode (mode, 0);
778      if (type)
779	alignment = LOCAL_ALIGNMENT (type, alignment);
780
781      alignment /= BITS_PER_UNIT;
782    }
783  else if (align == -1)
784    {
785      alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
786      size = CEIL_ROUND (size, alignment);
787    }
788  else
789    alignment = align / BITS_PER_UNIT;
790
791#ifdef FRAME_GROWS_DOWNWARD
792  frame_offset -= size;
793#endif
794
795  /* Round frame offset to that alignment.
796     We must be careful here, since FRAME_OFFSET might be negative and
797     division with a negative dividend isn't as well defined as we might
798     like.  So we instead assume that ALIGNMENT is a power of two and
799     use logical operations which are unambiguous.  */
800#ifdef FRAME_GROWS_DOWNWARD
801  frame_offset = FLOOR_ROUND (frame_offset, alignment);
802#else
803  frame_offset = CEIL_ROUND (frame_offset, alignment);
804#endif
805
806  /* On a big-endian machine, if we are allocating more space than we will use,
807     use the least significant bytes of those that are allocated.  */
808  if (BYTES_BIG_ENDIAN && mode != BLKmode)
809    bigend_correction = size - GET_MODE_SIZE (mode);
810
811  /* If we have already instantiated virtual registers, return the actual
812     address relative to the frame pointer.  */
813  if (virtuals_instantiated)
814    addr = plus_constant (frame_pointer_rtx,
815			  (frame_offset + bigend_correction
816			   + STARTING_FRAME_OFFSET));
817  else
818    addr = plus_constant (virtual_stack_vars_rtx,
819			  frame_offset + bigend_correction);
820
821#ifndef FRAME_GROWS_DOWNWARD
822  frame_offset += size;
823#endif
824
825  x = gen_rtx_MEM (mode, addr);
826
827  stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
828
829  return x;
830}
831
832/* Assign a stack slot in a containing function.
833   First three arguments are same as in preceding function.
834   The last argument specifies the function to allocate in.  */
835
836static rtx
837assign_outer_stack_local (mode, size, align, function)
838     enum machine_mode mode;
839     HOST_WIDE_INT size;
840     int align;
841     struct function *function;
842{
843  register rtx x, addr;
844  int bigend_correction = 0;
845  int alignment;
846
847  /* Allocate in the memory associated with the function in whose frame
848     we are assigning.  */
849  push_obstacks (function->function_obstack,
850		 function->function_maybepermanent_obstack);
851
852  if (align == 0)
853    {
854      tree type;
855
856      alignment = GET_MODE_ALIGNMENT (mode);
857      if (mode == BLKmode)
858	alignment = BIGGEST_ALIGNMENT;
859
860      /* Allow the target to (possibly) increase the alignment of this
861	 stack slot.  */
862      type = type_for_mode (mode, 0);
863      if (type)
864	alignment = LOCAL_ALIGNMENT (type, alignment);
865
866      alignment /= BITS_PER_UNIT;
867    }
868  else if (align == -1)
869    {
870      alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
871      size = CEIL_ROUND (size, alignment);
872    }
873  else
874    alignment = align / BITS_PER_UNIT;
875
876#ifdef FRAME_GROWS_DOWNWARD
877  function->frame_offset -= size;
878#endif
879
880  /* Round frame offset to that alignment.  */
881#ifdef FRAME_GROWS_DOWNWARD
882  function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
883#else
884  function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
885#endif
886
887  /* On a big-endian machine, if we are allocating more space than we will use,
888     use the least significant bytes of those that are allocated.  */
889  if (BYTES_BIG_ENDIAN && mode != BLKmode)
890    bigend_correction = size - GET_MODE_SIZE (mode);
891
892  addr = plus_constant (virtual_stack_vars_rtx,
893			function->frame_offset + bigend_correction);
894#ifndef FRAME_GROWS_DOWNWARD
895  function->frame_offset += size;
896#endif
897
898  x = gen_rtx_MEM (mode, addr);
899
900  function->stack_slot_list
901    = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
902
903  pop_obstacks ();
904
905  return x;
906}
907
908/* Allocate a temporary stack slot and record it for possible later
909   reuse.
910
911   MODE is the machine mode to be given to the returned rtx.
912
913   SIZE is the size in units of the space required.  We do no rounding here
914   since assign_stack_local will do any required rounding.
915
916   KEEP is 1 if this slot is to be retained after a call to
917   free_temp_slots.  Automatic variables for a block are allocated
918   with this flag.  KEEP is 2 if we allocate a longer term temporary,
919   whose lifetime is controlled by CLEANUP_POINT_EXPRs.  KEEP is 3
920   if we are to allocate something at an inner level to be treated as
921   a variable in the block (e.g., a SAVE_EXPR).
922
923   TYPE is the type that will be used for the stack slot.  */
924
925static rtx
926assign_stack_temp_for_type (mode, size, keep, type)
927     enum machine_mode mode;
928     HOST_WIDE_INT size;
929     int keep;
930     tree type;
931{
932  int align;
933  int alias_set;
934  struct temp_slot *p, *best_p = 0;
935
936  /* If SIZE is -1 it means that somebody tried to allocate a temporary
937     of a variable size.  */
938  if (size == -1)
939    abort ();
940
941  /* If we know the alias set for the memory that will be used, use
942     it.  If there's no TYPE, then we don't know anything about the
943     alias set for the memory.  */
944  if (type)
945    alias_set = get_alias_set (type);
946  else
947    alias_set = 0;
948
949  align = GET_MODE_ALIGNMENT (mode);
950  if (mode == BLKmode)
951    align = BIGGEST_ALIGNMENT;
952
953  if (! type)
954    type = type_for_mode (mode, 0);
955  if (type)
956    align = LOCAL_ALIGNMENT (type, align);
957
958  /* Try to find an available, already-allocated temporary of the proper
959     mode which meets the size and alignment requirements.  Choose the
960     smallest one with the closest alignment.  */
961  for (p = temp_slots; p; p = p->next)
962    if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
963	&& ! p->in_use
964	&& (!flag_strict_aliasing
965	    || (alias_set && p->alias_set == alias_set))
966	&& (best_p == 0 || best_p->size > p->size
967	    || (best_p->size == p->size && best_p->align > p->align)))
968      {
969	if (p->align == align && p->size == size)
970	  {
971	    best_p = 0;
972	    break;
973	  }
974	best_p = p;
975      }
976
977  /* Make our best, if any, the one to use.  */
978  if (best_p)
979    {
980      /* If there are enough aligned bytes left over, make them into a new
981	 temp_slot so that the extra bytes don't get wasted.  Do this only
982	 for BLKmode slots, so that we can be sure of the alignment.  */
983      if (GET_MODE (best_p->slot) == BLKmode
984	  /* We can't split slots if -fstrict-aliasing because the
985	     information about the alias set for the new slot will be
986	     lost.  */
987	  && !flag_strict_aliasing)
988	{
989	  int alignment = best_p->align / BITS_PER_UNIT;
990	  HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
991
992	  if (best_p->size - rounded_size >= alignment)
993	    {
994	      p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
995	      p->in_use = p->addr_taken = 0;
996	      p->size = best_p->size - rounded_size;
997	      p->base_offset = best_p->base_offset + rounded_size;
998	      p->full_size = best_p->full_size - rounded_size;
999	      p->slot = gen_rtx_MEM (BLKmode,
1000				     plus_constant (XEXP (best_p->slot, 0),
1001						    rounded_size));
1002	      p->align = best_p->align;
1003	      p->address = 0;
1004	      p->rtl_expr = 0;
1005	      p->next = temp_slots;
1006	      temp_slots = p;
1007
1008	      stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
1009						   stack_slot_list);
1010
1011	      best_p->size = rounded_size;
1012	      best_p->full_size = rounded_size;
1013	    }
1014	}
1015
1016      p = best_p;
1017    }
1018
1019  /* If we still didn't find one, make a new temporary.  */
1020  if (p == 0)
1021    {
1022      HOST_WIDE_INT frame_offset_old = frame_offset;
1023
1024      p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
1025
1026      /* We are passing an explicit alignment request to assign_stack_local.
1027	 One side effect of that is assign_stack_local will not round SIZE
1028	 to ensure the frame offset remains suitably aligned.
1029
1030	 So for requests which depended on the rounding of SIZE, we go ahead
1031	 and round it now.  We also make sure ALIGNMENT is at least
1032	 BIGGEST_ALIGNMENT.  */
1033      if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
1034	abort();
1035      p->slot = assign_stack_local (mode,
1036				    (mode == BLKmode
1037				     ? CEIL_ROUND (size, align / BITS_PER_UNIT)
1038				     : size),
1039				    align);
1040
1041      p->align = align;
1042      p->alias_set = alias_set;
1043
1044      /* The following slot size computation is necessary because we don't
1045	 know the actual size of the temporary slot until assign_stack_local
1046	 has performed all the frame alignment and size rounding for the
1047	 requested temporary.  Note that extra space added for alignment
1048	 can be either above or below this stack slot depending on which
1049	 way the frame grows.  We include the extra space if and only if it
1050	 is above this slot.  */
1051#ifdef FRAME_GROWS_DOWNWARD
1052      p->size = frame_offset_old - frame_offset;
1053#else
1054      p->size = size;
1055#endif
1056
1057      /* Now define the fields used by combine_temp_slots.  */
1058#ifdef FRAME_GROWS_DOWNWARD
1059      p->base_offset = frame_offset;
1060      p->full_size = frame_offset_old - frame_offset;
1061#else
1062      p->base_offset = frame_offset_old;
1063      p->full_size = frame_offset - frame_offset_old;
1064#endif
1065      p->address = 0;
1066      p->next = temp_slots;
1067      temp_slots = p;
1068    }
1069
1070  p->in_use = 1;
1071  p->addr_taken = 0;
1072  p->rtl_expr = sequence_rtl_expr;
1073
1074  if (keep == 2)
1075    {
1076      p->level = target_temp_slot_level;
1077      p->keep = 0;
1078    }
1079  else if (keep == 3)
1080    {
1081      p->level = var_temp_slot_level;
1082      p->keep = 0;
1083    }
1084  else
1085    {
1086      p->level = temp_slot_level;
1087      p->keep = keep;
1088    }
1089
1090  /* We may be reusing an old slot, so clear any MEM flags that may have been
1091     set from before.  */
1092  RTX_UNCHANGING_P (p->slot) = 0;
1093  MEM_IN_STRUCT_P (p->slot) = 0;
1094  MEM_SCALAR_P (p->slot) = 0;
1095  MEM_ALIAS_SET (p->slot) = 0;
1096  return p->slot;
1097}
1098
1099/* Allocate a temporary stack slot and record it for possible later
1100   reuse.  First three arguments are same as in preceding function.  */
1101
1102rtx
1103assign_stack_temp (mode, size, keep)
1104     enum machine_mode mode;
1105     HOST_WIDE_INT size;
1106     int keep;
1107{
1108  return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
1109}
1110
1111/* Assign a temporary of given TYPE.
1112   KEEP is as for assign_stack_temp.
1113   MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
1114   it is 0 if a register is OK.
1115   DONT_PROMOTE is 1 if we should not promote values in register
1116   to wider modes.  */
1117
1118rtx
1119assign_temp (type, keep, memory_required, dont_promote)
1120     tree type;
1121     int keep;
1122     int memory_required;
1123     int dont_promote;
1124{
1125  enum machine_mode mode = TYPE_MODE (type);
1126  int unsignedp = TREE_UNSIGNED (type);
1127
1128  if (mode == BLKmode || memory_required)
1129    {
1130      HOST_WIDE_INT size = int_size_in_bytes (type);
1131      rtx tmp;
1132
1133      /* Unfortunately, we don't yet know how to allocate variable-sized
1134	 temporaries.  However, sometimes we have a fixed upper limit on
1135	 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
1136	 instead.  This is the case for Chill variable-sized strings.  */
1137      if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
1138	  && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
1139	  && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
1140	size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
1141
1142      tmp = assign_stack_temp_for_type (mode, size, keep, type);
1143      MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
1144      return tmp;
1145    }
1146
1147#ifndef PROMOTE_FOR_CALL_ONLY
1148  if (! dont_promote)
1149    mode = promote_mode (type, mode, &unsignedp, 0);
1150#endif
1151
1152  return gen_reg_rtx (mode);
1153}
1154
1155/* Combine temporary stack slots which are adjacent on the stack.
1156
1157   This allows for better use of already allocated stack space.  This is only
1158   done for BLKmode slots because we can be sure that we won't have alignment
1159   problems in this case.  */
1160
1161void
1162combine_temp_slots ()
1163{
1164  struct temp_slot *p, *q;
1165  struct temp_slot *prev_p, *prev_q;
1166  int num_slots;
1167
1168  /* We can't combine slots, because the information about which slot
1169     is in which alias set will be lost.  */
1170  if (flag_strict_aliasing)
1171    return;
1172
1173  /* If there are a lot of temp slots, don't do anything unless
1174     high levels of optimizaton.  */
1175  if (! flag_expensive_optimizations)
1176    for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1177      if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1178	return;
1179
1180  for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1181    {
1182      int delete_p = 0;
1183
1184      if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1185	for (q = p->next, prev_q = p; q; q = prev_q->next)
1186	  {
1187	    int delete_q = 0;
1188	    if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1189	      {
1190		if (p->base_offset + p->full_size == q->base_offset)
1191		  {
1192		    /* Q comes after P; combine Q into P.  */
1193		    p->size += q->size;
1194		    p->full_size += q->full_size;
1195		    delete_q = 1;
1196		  }
1197		else if (q->base_offset + q->full_size == p->base_offset)
1198		  {
1199		    /* P comes after Q; combine P into Q.  */
1200		    q->size += p->size;
1201		    q->full_size += p->full_size;
1202		    delete_p = 1;
1203		    break;
1204		  }
1205	      }
1206	    /* Either delete Q or advance past it.  */
1207	    if (delete_q)
1208	      prev_q->next = q->next;
1209	    else
1210	      prev_q = q;
1211	  }
1212      /* Either delete P or advance past it.  */
1213      if (delete_p)
1214	{
1215	  if (prev_p)
1216	    prev_p->next = p->next;
1217	  else
1218	    temp_slots = p->next;
1219	}
1220      else
1221	prev_p = p;
1222    }
1223}
1224
1225/* Find the temp slot corresponding to the object at address X.  */
1226
1227static struct temp_slot *
1228find_temp_slot_from_address (x)
1229     rtx x;
1230{
1231  struct temp_slot *p;
1232  rtx next;
1233
1234  for (p = temp_slots; p; p = p->next)
1235    {
1236      if (! p->in_use)
1237	continue;
1238
1239      else if (XEXP (p->slot, 0) == x
1240	       || p->address == x
1241	       || (GET_CODE (x) == PLUS
1242		   && XEXP (x, 0) == virtual_stack_vars_rtx
1243		   && GET_CODE (XEXP (x, 1)) == CONST_INT
1244		   && INTVAL (XEXP (x, 1)) >= p->base_offset
1245		   && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1246	return p;
1247
1248      else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1249	for (next = p->address; next; next = XEXP (next, 1))
1250	  if (XEXP (next, 0) == x)
1251	    return p;
1252    }
1253
1254  return 0;
1255}
1256
1257/* Indicate that NEW is an alternate way of referring to the temp slot
1258   that previously was known by OLD.  */
1259
1260void
1261update_temp_slot_address (old, new)
1262     rtx old, new;
1263{
1264  struct temp_slot *p = find_temp_slot_from_address (old);
1265
1266  /* If none, return.  Else add NEW as an alias.  */
1267  if (p == 0)
1268    return;
1269  else if (p->address == 0)
1270    p->address = new;
1271  else
1272    {
1273      if (GET_CODE (p->address) != EXPR_LIST)
1274	p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1275
1276      p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1277    }
1278}
1279
1280/* If X could be a reference to a temporary slot, mark the fact that its
1281   address was taken.  */
1282
1283void
1284mark_temp_addr_taken (x)
1285     rtx x;
1286{
1287  struct temp_slot *p;
1288
1289  if (x == 0)
1290    return;
1291
1292  /* If X is not in memory or is at a constant address, it cannot be in
1293     a temporary slot.  */
1294  if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1295    return;
1296
1297  p = find_temp_slot_from_address (XEXP (x, 0));
1298  if (p != 0)
1299    p->addr_taken = 1;
1300}
1301
1302/* If X could be a reference to a temporary slot, mark that slot as
1303   belonging to the to one level higher than the current level.  If X
1304   matched one of our slots, just mark that one.  Otherwise, we can't
1305   easily predict which it is, so upgrade all of them.  Kept slots
1306   need not be touched.
1307
1308   This is called when an ({...}) construct occurs and a statement
1309   returns a value in memory.  */
1310
1311void
1312preserve_temp_slots (x)
1313     rtx x;
1314{
1315  struct temp_slot *p = 0;
1316
1317  /* If there is no result, we still might have some objects whose address
1318     were taken, so we need to make sure they stay around.  */
1319  if (x == 0)
1320    {
1321      for (p = temp_slots; p; p = p->next)
1322	if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1323	  p->level--;
1324
1325      return;
1326    }
1327
1328  /* If X is a register that is being used as a pointer, see if we have
1329     a temporary slot we know it points to.  To be consistent with
1330     the code below, we really should preserve all non-kept slots
1331     if we can't find a match, but that seems to be much too costly.  */
1332  if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1333    p = find_temp_slot_from_address (x);
1334
1335  /* If X is not in memory or is at a constant address, it cannot be in
1336     a temporary slot, but it can contain something whose address was
1337     taken.  */
1338  if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1339    {
1340      for (p = temp_slots; p; p = p->next)
1341	if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1342	  p->level--;
1343
1344      return;
1345    }
1346
1347  /* First see if we can find a match.  */
1348  if (p == 0)
1349    p = find_temp_slot_from_address (XEXP (x, 0));
1350
1351  if (p != 0)
1352    {
1353      /* Move everything at our level whose address was taken to our new
1354	 level in case we used its address.  */
1355      struct temp_slot *q;
1356
1357      if (p->level == temp_slot_level)
1358	{
1359	  for (q = temp_slots; q; q = q->next)
1360	    if (q != p && q->addr_taken && q->level == p->level)
1361	      q->level--;
1362
1363	  p->level--;
1364	  p->addr_taken = 0;
1365	}
1366      return;
1367    }
1368
1369  /* Otherwise, preserve all non-kept slots at this level.  */
1370  for (p = temp_slots; p; p = p->next)
1371    if (p->in_use && p->level == temp_slot_level && ! p->keep)
1372      p->level--;
1373}
1374
1375/* X is the result of an RTL_EXPR.  If it is a temporary slot associated
1376   with that RTL_EXPR, promote it into a temporary slot at the present
1377   level so it will not be freed when we free slots made in the
1378   RTL_EXPR.  */
1379
1380void
1381preserve_rtl_expr_result (x)
1382     rtx x;
1383{
1384  struct temp_slot *p;
1385
1386  /* If X is not in memory or is at a constant address, it cannot be in
1387     a temporary slot.  */
1388  if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1389    return;
1390
1391  /* If we can find a match, move it to our level unless it is already at
1392     an upper level.  */
1393  p = find_temp_slot_from_address (XEXP (x, 0));
1394  if (p != 0)
1395    {
1396      p->level = MIN (p->level, temp_slot_level);
1397      p->rtl_expr = 0;
1398    }
1399
1400  return;
1401}
1402
1403/* Free all temporaries used so far.  This is normally called at the end
1404   of generating code for a statement.  Don't free any temporaries
1405   currently in use for an RTL_EXPR that hasn't yet been emitted.
1406   We could eventually do better than this since it can be reused while
1407   generating the same RTL_EXPR, but this is complex and probably not
1408   worthwhile.  */
1409
1410void
1411free_temp_slots ()
1412{
1413  struct temp_slot *p;
1414
1415  for (p = temp_slots; p; p = p->next)
1416    if (p->in_use && p->level == temp_slot_level && ! p->keep
1417	&& p->rtl_expr == 0)
1418      p->in_use = 0;
1419
1420  combine_temp_slots ();
1421}
1422
1423/* Free all temporary slots used in T, an RTL_EXPR node.  */
1424
1425void
1426free_temps_for_rtl_expr (t)
1427     tree t;
1428{
1429  struct temp_slot *p;
1430
1431  for (p = temp_slots; p; p = p->next)
1432    if (p->rtl_expr == t)
1433      {
1434	/* If this slot is below the current TEMP_SLOT_LEVEL, then it
1435	   needs to be preserved.  This can happen if a temporary in
1436	   the RTL_EXPR was addressed; preserve_temp_slots will move
1437	   the temporary into a higher level.   */
1438	if (temp_slot_level <= p->level)
1439	  p->in_use = 0;
1440	else
1441	  p->rtl_expr = NULL_TREE;
1442      }
1443
1444  combine_temp_slots ();
1445}
1446
1447/* Mark all temporaries ever allocated in this function as not suitable
1448   for reuse until the current level is exited.  */
1449
1450void
1451mark_all_temps_used ()
1452{
1453  struct temp_slot *p;
1454
1455  for (p = temp_slots; p; p = p->next)
1456    {
1457      p->in_use = p->keep = 1;
1458      p->level = MIN (p->level, temp_slot_level);
1459    }
1460}
1461
1462/* Push deeper into the nesting level for stack temporaries.  */
1463
1464void
1465push_temp_slots ()
1466{
1467  temp_slot_level++;
1468}
1469
1470/* Likewise, but save the new level as the place to allocate variables
1471   for blocks.  */
1472
1473void
1474push_temp_slots_for_block ()
1475{
1476  push_temp_slots ();
1477
1478  var_temp_slot_level = temp_slot_level;
1479}
1480
1481/* Likewise, but save the new level as the place to allocate temporaries
1482   for TARGET_EXPRs.  */
1483
1484void
1485push_temp_slots_for_target ()
1486{
1487  push_temp_slots ();
1488
1489  target_temp_slot_level = temp_slot_level;
1490}
1491
1492/* Set and get the value of target_temp_slot_level.  The only
1493   permitted use of these functions is to save and restore this value.  */
1494
1495int
1496get_target_temp_slot_level ()
1497{
1498  return target_temp_slot_level;
1499}
1500
1501void
1502set_target_temp_slot_level (level)
1503     int level;
1504{
1505  target_temp_slot_level = level;
1506}
1507
1508/* Pop a temporary nesting level.  All slots in use in the current level
1509   are freed.  */
1510
1511void
1512pop_temp_slots ()
1513{
1514  struct temp_slot *p;
1515
1516  for (p = temp_slots; p; p = p->next)
1517    if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1518      p->in_use = 0;
1519
1520  combine_temp_slots ();
1521
1522  temp_slot_level--;
1523}
1524
1525/* Initialize temporary slots.  */
1526
1527void
1528init_temp_slots ()
1529{
1530  /* We have not allocated any temporaries yet.  */
1531  temp_slots = 0;
1532  temp_slot_level = 0;
1533  var_temp_slot_level = 0;
1534  target_temp_slot_level = 0;
1535}
1536
1537/* Retroactively move an auto variable from a register to a stack slot.
1538   This is done when an address-reference to the variable is seen.  */
1539
1540void
1541put_var_into_stack (decl)
1542     tree decl;
1543{
1544  register rtx reg;
1545  enum machine_mode promoted_mode, decl_mode;
1546  struct function *function = 0;
1547  tree context;
1548  int can_use_addressof;
1549
1550  context = decl_function_context (decl);
1551
1552  /* Get the current rtl used for this object and its original mode.  */
1553  reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1554
1555  /* No need to do anything if decl has no rtx yet
1556     since in that case caller is setting TREE_ADDRESSABLE
1557     and a stack slot will be assigned when the rtl is made.  */
1558  if (reg == 0)
1559    return;
1560
1561  /* Get the declared mode for this object.  */
1562  decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1563	       : DECL_MODE (decl));
1564  /* Get the mode it's actually stored in.  */
1565  promoted_mode = GET_MODE (reg);
1566
1567  /* If this variable comes from an outer function,
1568     find that function's saved context.  */
1569  if (context != current_function_decl && context != inline_function_decl)
1570    for (function = outer_function_chain; function; function = function->next)
1571      if (function->decl == context)
1572	break;
1573
1574  /* If this is a variable-size object with a pseudo to address it,
1575     put that pseudo into the stack, if the var is nonlocal.  */
1576  if (DECL_NONLOCAL (decl)
1577      && GET_CODE (reg) == MEM
1578      && GET_CODE (XEXP (reg, 0)) == REG
1579      && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1580    {
1581      reg = XEXP (reg, 0);
1582      decl_mode = promoted_mode = GET_MODE (reg);
1583    }
1584
1585  can_use_addressof
1586    = (function == 0
1587       && optimize > 0
1588       /* FIXME make it work for promoted modes too */
1589       && decl_mode == promoted_mode
1590#ifdef NON_SAVING_SETJMP
1591       && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1592#endif
1593       );
1594
1595  /* If we can't use ADDRESSOF, make sure we see through one we already
1596     generated.  */
1597  if (! can_use_addressof && GET_CODE (reg) == MEM
1598      && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1599    reg = XEXP (XEXP (reg, 0), 0);
1600
1601  /* Now we should have a value that resides in one or more pseudo regs.  */
1602
1603  if (GET_CODE (reg) == REG)
1604    {
1605      /* If this variable lives in the current function and we don't need
1606	 to put things in the stack for the sake of setjmp, try to keep it
1607	 in a register until we know we actually need the address.  */
1608      if (can_use_addressof)
1609	gen_mem_addressof (reg, decl);
1610      else
1611	put_reg_into_stack (function, reg, TREE_TYPE (decl),
1612			    promoted_mode, decl_mode,
1613			    TREE_SIDE_EFFECTS (decl), 0,
1614			    TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1615			    0);
1616    }
1617  else if (GET_CODE (reg) == CONCAT)
1618    {
1619      /* A CONCAT contains two pseudos; put them both in the stack.
1620	 We do it so they end up consecutive.  */
1621      enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1622      tree part_type = TREE_TYPE (TREE_TYPE (decl));
1623#ifdef FRAME_GROWS_DOWNWARD
1624      /* Since part 0 should have a lower address, do it second.  */
1625      put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1626			  part_mode, TREE_SIDE_EFFECTS (decl), 0,
1627			  TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1628			  0);
1629      put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1630			  part_mode, TREE_SIDE_EFFECTS (decl), 0,
1631			  TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1632			  0);
1633#else
1634      put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1635			  part_mode, TREE_SIDE_EFFECTS (decl), 0,
1636			  TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1637			  0);
1638      put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1639			  part_mode, TREE_SIDE_EFFECTS (decl), 0,
1640			  TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1641			  0);
1642#endif
1643
1644      /* Change the CONCAT into a combined MEM for both parts.  */
1645      PUT_CODE (reg, MEM);
1646      MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1647      MEM_ALIAS_SET (reg) = get_alias_set (decl);
1648
1649      /* The two parts are in memory order already.
1650	 Use the lower parts address as ours.  */
1651      XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1652      /* Prevent sharing of rtl that might lose.  */
1653      if (GET_CODE (XEXP (reg, 0)) == PLUS)
1654	XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1655    }
1656  else
1657    return;
1658
1659  if (current_function_check_memory_usage)
1660    emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1661		       XEXP (reg, 0), Pmode,
1662		       GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1663		       TYPE_MODE (sizetype),
1664		       GEN_INT (MEMORY_USE_RW),
1665		       TYPE_MODE (integer_type_node));
1666}
1667
1668/* Subroutine of put_var_into_stack.  This puts a single pseudo reg REG
1669   into the stack frame of FUNCTION (0 means the current function).
1670   DECL_MODE is the machine mode of the user-level data type.
1671   PROMOTED_MODE is the machine mode of the register.
1672   VOLATILE_P is nonzero if this is for a "volatile" decl.
1673   USED_P is nonzero if this reg might have already been used in an insn.  */
1674
1675static void
1676put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1677		    original_regno, used_p, ht)
1678     struct function *function;
1679     rtx reg;
1680     tree type;
1681     enum machine_mode promoted_mode, decl_mode;
1682     int volatile_p;
1683     int original_regno;
1684     int used_p;
1685     struct hash_table *ht;
1686{
1687  rtx new = 0;
1688  int regno = original_regno;
1689
1690  if (regno == 0)
1691    regno = REGNO (reg);
1692
1693  if (function)
1694    {
1695      if (regno < function->max_parm_reg)
1696	new = function->parm_reg_stack_loc[regno];
1697      if (new == 0)
1698	new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1699					0, function);
1700    }
1701  else
1702    {
1703      if (regno < max_parm_reg)
1704	new = parm_reg_stack_loc[regno];
1705      if (new == 0)
1706	new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1707    }
1708
1709  PUT_MODE (reg, decl_mode);
1710  XEXP (reg, 0) = XEXP (new, 0);
1711  /* `volatil' bit means one thing for MEMs, another entirely for REGs.  */
1712  MEM_VOLATILE_P (reg) = volatile_p;
1713  PUT_CODE (reg, MEM);
1714
1715  /* If this is a memory ref that contains aggregate components,
1716     mark it as such for cse and loop optimize.  If we are reusing a
1717     previously generated stack slot, then we need to copy the bit in
1718     case it was set for other reasons.  For instance, it is set for
1719     __builtin_va_alist.  */
1720  MEM_SET_IN_STRUCT_P (reg,
1721		       AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1722  MEM_ALIAS_SET (reg) = get_alias_set (type);
1723
1724  /* Now make sure that all refs to the variable, previously made
1725     when it was a register, are fixed up to be valid again.  */
1726
1727  if (used_p && function != 0)
1728    {
1729      struct var_refs_queue *temp;
1730
1731      /* Variable is inherited; fix it up when we get back to its function.  */
1732      push_obstacks (function->function_obstack,
1733		     function->function_maybepermanent_obstack);
1734
1735      /* See comment in restore_tree_status in tree.c for why this needs to be
1736	 on saveable obstack.  */
1737      temp
1738	= (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1739      temp->modified = reg;
1740      temp->promoted_mode = promoted_mode;
1741      temp->unsignedp = TREE_UNSIGNED (type);
1742      temp->next = function->fixup_var_refs_queue;
1743      function->fixup_var_refs_queue = temp;
1744      pop_obstacks ();
1745    }
1746  else if (used_p)
1747    /* Variable is local; fix it up now.  */
1748    fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1749}
1750
1751static void
1752fixup_var_refs (var, promoted_mode, unsignedp, ht)
1753     rtx var;
1754     enum machine_mode promoted_mode;
1755     int unsignedp;
1756     struct hash_table *ht;
1757{
1758  tree pending;
1759  rtx first_insn = get_insns ();
1760  struct sequence_stack *stack = sequence_stack;
1761  tree rtl_exps = rtl_expr_chain;
1762
1763  /* Must scan all insns for stack-refs that exceed the limit.  */
1764  fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1765			stack == 0, ht);
1766  /* If there's a hash table, it must record all uses of VAR.  */
1767  if (ht)
1768    return;
1769
1770  /* Scan all pending sequences too.  */
1771  for (; stack; stack = stack->next)
1772    {
1773      push_to_sequence (stack->first);
1774      fixup_var_refs_insns (var, promoted_mode, unsignedp,
1775			    stack->first, stack->next != 0, 0);
1776      /* Update remembered end of sequence
1777	 in case we added an insn at the end.  */
1778      stack->last = get_last_insn ();
1779      end_sequence ();
1780    }
1781
1782  /* Scan all waiting RTL_EXPRs too.  */
1783  for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1784    {
1785      rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1786      if (seq != const0_rtx && seq != 0)
1787	{
1788	  push_to_sequence (seq);
1789	  fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1790				0);
1791	  end_sequence ();
1792	}
1793    }
1794
1795  /* Scan the catch clauses for exception handling too.  */
1796  push_to_sequence (catch_clauses);
1797  fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1798			0, 0);
1799  end_sequence ();
1800}
1801
1802/* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1803   some part of an insn.  Return a struct fixup_replacement whose OLD
1804   value is equal to X.  Allocate a new structure if no such entry exists.  */
1805
1806static struct fixup_replacement *
1807find_fixup_replacement (replacements, x)
1808     struct fixup_replacement **replacements;
1809     rtx x;
1810{
1811  struct fixup_replacement *p;
1812
1813  /* See if we have already replaced this.  */
1814  for (p = *replacements; p && p->old != x; p = p->next)
1815    ;
1816
1817  if (p == 0)
1818    {
1819      p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1820      p->old = x;
1821      p->new = 0;
1822      p->next = *replacements;
1823      *replacements = p;
1824    }
1825
1826  return p;
1827}
1828
1829/* Scan the insn-chain starting with INSN for refs to VAR
1830   and fix them up.  TOPLEVEL is nonzero if this chain is the
1831   main chain of insns for the current function.  */
1832
1833static void
1834fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1835     rtx var;
1836     enum machine_mode promoted_mode;
1837     int unsignedp;
1838     rtx insn;
1839     int toplevel;
1840     struct hash_table *ht;
1841{
1842  rtx call_dest = 0;
1843  rtx insn_list = NULL_RTX;
1844
1845  /* If we already know which INSNs reference VAR there's no need
1846     to walk the entire instruction chain.  */
1847  if (ht)
1848    {
1849      insn_list = ((struct insns_for_mem_entry *)
1850		   hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1851      insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1852      insn_list = XEXP (insn_list, 1);
1853    }
1854
1855  while (insn)
1856    {
1857      rtx next = NEXT_INSN (insn);
1858      rtx set, prev, prev_set;
1859      rtx note;
1860
1861      if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1862	{
1863	  /* If this is a CLOBBER of VAR, delete it.
1864
1865	     If it has a REG_LIBCALL note, delete the REG_LIBCALL
1866	     and REG_RETVAL notes too.  */
1867 	  if (GET_CODE (PATTERN (insn)) == CLOBBER
1868	      && (XEXP (PATTERN (insn), 0) == var
1869		  || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1870		      && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1871			  || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1872	    {
1873	      if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1874		/* The REG_LIBCALL note will go away since we are going to
1875		   turn INSN into a NOTE, so just delete the
1876		   corresponding REG_RETVAL note.  */
1877		remove_note (XEXP (note, 0),
1878			     find_reg_note (XEXP (note, 0), REG_RETVAL,
1879					    NULL_RTX));
1880
1881	      /* In unoptimized compilation, we shouldn't call delete_insn
1882		 except in jump.c doing warnings.  */
1883	      PUT_CODE (insn, NOTE);
1884	      NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1885	      NOTE_SOURCE_FILE (insn) = 0;
1886	    }
1887
1888	  /* The insn to load VAR from a home in the arglist
1889	     is now a no-op.  When we see it, just delete it.
1890	     Similarly if this is storing VAR from a register from which
1891	     it was loaded in the previous insn.  This will occur
1892	     when an ADDRESSOF was made for an arglist slot.  */
1893	  else if (toplevel
1894		   && (set = single_set (insn)) != 0
1895		   && SET_DEST (set) == var
1896		   /* If this represents the result of an insn group,
1897		      don't delete the insn.  */
1898		   && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1899		   && (rtx_equal_p (SET_SRC (set), var)
1900		       || (GET_CODE (SET_SRC (set)) == REG
1901			   && (prev = prev_nonnote_insn (insn)) != 0
1902			   && (prev_set = single_set (prev)) != 0
1903			   && SET_DEST (prev_set) == SET_SRC (set)
1904			   && rtx_equal_p (SET_SRC (prev_set), var))))
1905	    {
1906	      /* In unoptimized compilation, we shouldn't call delete_insn
1907		 except in jump.c doing warnings.  */
1908	      PUT_CODE (insn, NOTE);
1909	      NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1910	      NOTE_SOURCE_FILE (insn) = 0;
1911	      if (insn == last_parm_insn)
1912		last_parm_insn = PREV_INSN (next);
1913	    }
1914	  else
1915	    {
1916	      struct fixup_replacement *replacements = 0;
1917	      rtx next_insn = NEXT_INSN (insn);
1918
1919	      if (SMALL_REGISTER_CLASSES)
1920		{
1921		  /* If the insn that copies the results of a CALL_INSN
1922		     into a pseudo now references VAR, we have to use an
1923		     intermediate pseudo since we want the life of the
1924		     return value register to be only a single insn.
1925
1926		     If we don't use an intermediate pseudo, such things as
1927		     address computations to make the address of VAR valid
1928		     if it is not can be placed between the CALL_INSN and INSN.
1929
1930		     To make sure this doesn't happen, we record the destination
1931		     of the CALL_INSN and see if the next insn uses both that
1932		     and VAR.  */
1933
1934		  if (call_dest != 0 && GET_CODE (insn) == INSN
1935		      && reg_mentioned_p (var, PATTERN (insn))
1936		      && reg_mentioned_p (call_dest, PATTERN (insn)))
1937		    {
1938		      rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1939
1940		      emit_insn_before (gen_move_insn (temp, call_dest), insn);
1941
1942		      PATTERN (insn) = replace_rtx (PATTERN (insn),
1943						    call_dest, temp);
1944		    }
1945
1946		  if (GET_CODE (insn) == CALL_INSN
1947		      && GET_CODE (PATTERN (insn)) == SET)
1948		    call_dest = SET_DEST (PATTERN (insn));
1949		  else if (GET_CODE (insn) == CALL_INSN
1950			   && GET_CODE (PATTERN (insn)) == PARALLEL
1951			   && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1952		    call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1953		  else
1954		    call_dest = 0;
1955		}
1956
1957	      /* See if we have to do anything to INSN now that VAR is in
1958		 memory.  If it needs to be loaded into a pseudo, use a single
1959		 pseudo for the entire insn in case there is a MATCH_DUP
1960		 between two operands.  We pass a pointer to the head of
1961		 a list of struct fixup_replacements.  If fixup_var_refs_1
1962		 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1963		 it will record them in this list.
1964
1965		 If it allocated a pseudo for any replacement, we copy into
1966		 it here.  */
1967
1968	      fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1969				&replacements);
1970
1971	      /* If this is last_parm_insn, and any instructions were output
1972		 after it to fix it up, then we must set last_parm_insn to
1973		 the last such instruction emitted.  */
1974	      if (insn == last_parm_insn)
1975		last_parm_insn = PREV_INSN (next_insn);
1976
1977	      while (replacements)
1978		{
1979		  if (GET_CODE (replacements->new) == REG)
1980		    {
1981		      rtx insert_before;
1982		      rtx seq;
1983
1984		      /* OLD might be a (subreg (mem)).  */
1985		      if (GET_CODE (replacements->old) == SUBREG)
1986			replacements->old
1987			  = fixup_memory_subreg (replacements->old, insn, 0);
1988		      else
1989			replacements->old
1990			  = fixup_stack_1 (replacements->old, insn);
1991
1992		      insert_before = insn;
1993
1994		      /* If we are changing the mode, do a conversion.
1995			 This might be wasteful, but combine.c will
1996			 eliminate much of the waste.  */
1997
1998		      if (GET_MODE (replacements->new)
1999			  != GET_MODE (replacements->old))
2000			{
2001			  start_sequence ();
2002			  convert_move (replacements->new,
2003					replacements->old, unsignedp);
2004			  seq = gen_sequence ();
2005			  end_sequence ();
2006			}
2007		      else
2008			seq = gen_move_insn (replacements->new,
2009					     replacements->old);
2010
2011		      emit_insn_before (seq, insert_before);
2012		    }
2013
2014		  replacements = replacements->next;
2015		}
2016	    }
2017
2018	  /* Also fix up any invalid exprs in the REG_NOTES of this insn.
2019	     But don't touch other insns referred to by reg-notes;
2020	     we will get them elsewhere.  */
2021	  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2022	    if (GET_CODE (note) != INSN_LIST)
2023	      XEXP (note, 0)
2024		= walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
2025	}
2026
2027      if (!ht)
2028	insn = next;
2029      else if (insn_list)
2030	{
2031	  insn = XEXP (insn_list, 0);
2032	  insn_list = XEXP (insn_list, 1);
2033	}
2034      else
2035	insn = NULL_RTX;
2036    }
2037}
2038
2039/* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
2040   See if the rtx expression at *LOC in INSN needs to be changed.
2041
2042   REPLACEMENTS is a pointer to a list head that starts out zero, but may
2043   contain a list of original rtx's and replacements. If we find that we need
2044   to modify this insn by replacing a memory reference with a pseudo or by
2045   making a new MEM to implement a SUBREG, we consult that list to see if
2046   we have already chosen a replacement. If none has already been allocated,
2047   we allocate it and update the list.  fixup_var_refs_insns will copy VAR
2048   or the SUBREG, as appropriate, to the pseudo.  */
2049
2050static void
2051fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
2052     register rtx var;
2053     enum machine_mode promoted_mode;
2054     register rtx *loc;
2055     rtx insn;
2056     struct fixup_replacement **replacements;
2057{
2058  register int i;
2059  register rtx x = *loc;
2060  RTX_CODE code = GET_CODE (x);
2061  register char *fmt;
2062  register rtx tem, tem1;
2063  struct fixup_replacement *replacement;
2064
2065  switch (code)
2066    {
2067    case ADDRESSOF:
2068      if (XEXP (x, 0) == var)
2069	{
2070	  /* Prevent sharing of rtl that might lose.  */
2071	  rtx sub = copy_rtx (XEXP (var, 0));
2072
2073	  if (! validate_change (insn, loc, sub, 0))
2074	    {
2075	      rtx y = gen_reg_rtx (GET_MODE (sub));
2076	      rtx seq, new_insn;
2077
2078	      /* We should be able to replace with a register or all is lost.
2079		 Note that we can't use validate_change to verify this, since
2080		 we're not caring for replacing all dups simultaneously.  */
2081	      if (! validate_replace_rtx (*loc, y, insn))
2082		abort ();
2083
2084	      /* Careful!  First try to recognize a direct move of the
2085		 value, mimicking how things are done in gen_reload wrt
2086		 PLUS.  Consider what happens when insn is a conditional
2087		 move instruction and addsi3 clobbers flags.  */
2088
2089	      start_sequence ();
2090	      new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
2091	      seq = gen_sequence ();
2092	      end_sequence ();
2093
2094	      if (recog_memoized (new_insn) < 0)
2095		{
2096		  /* That failed.  Fall back on force_operand and hope.  */
2097
2098		  start_sequence ();
2099		  force_operand (sub, y);
2100		  seq = gen_sequence ();
2101		  end_sequence ();
2102		}
2103
2104#ifdef HAVE_cc0
2105	      /* Don't separate setter from user.  */
2106	      if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
2107		insn = PREV_INSN (insn);
2108#endif
2109
2110	      emit_insn_before (seq, insn);
2111	    }
2112	}
2113      return;
2114
2115    case MEM:
2116      if (var == x)
2117	{
2118	  /* If we already have a replacement, use it.  Otherwise,
2119	     try to fix up this address in case it is invalid.  */
2120
2121	  replacement = find_fixup_replacement (replacements, var);
2122	  if (replacement->new)
2123	    {
2124	      *loc = replacement->new;
2125	      return;
2126	    }
2127
2128	  *loc = replacement->new = x = fixup_stack_1 (x, insn);
2129
2130	  /* Unless we are forcing memory to register or we changed the mode,
2131	     we can leave things the way they are if the insn is valid.  */
2132
2133	  INSN_CODE (insn) = -1;
2134	  if (! flag_force_mem && GET_MODE (x) == promoted_mode
2135	      && recog_memoized (insn) >= 0)
2136	    return;
2137
2138	  *loc = replacement->new = gen_reg_rtx (promoted_mode);
2139	  return;
2140	}
2141
2142      /* If X contains VAR, we need to unshare it here so that we update
2143	 each occurrence separately.  But all identical MEMs in one insn
2144	 must be replaced with the same rtx because of the possibility of
2145	 MATCH_DUPs.  */
2146
2147      if (reg_mentioned_p (var, x))
2148	{
2149	  replacement = find_fixup_replacement (replacements, x);
2150	  if (replacement->new == 0)
2151	    replacement->new = copy_most_rtx (x, var);
2152
2153	  *loc = x = replacement->new;
2154	}
2155      break;
2156
2157    case REG:
2158    case CC0:
2159    case PC:
2160    case CONST_INT:
2161    case CONST:
2162    case SYMBOL_REF:
2163    case LABEL_REF:
2164    case CONST_DOUBLE:
2165      return;
2166
2167    case SIGN_EXTRACT:
2168    case ZERO_EXTRACT:
2169      /* Note that in some cases those types of expressions are altered
2170	 by optimize_bit_field, and do not survive to get here.  */
2171      if (XEXP (x, 0) == var
2172	  || (GET_CODE (XEXP (x, 0)) == SUBREG
2173	      && SUBREG_REG (XEXP (x, 0)) == var))
2174	{
2175	  /* Get TEM as a valid MEM in the mode presently in the insn.
2176
2177	     We don't worry about the possibility of MATCH_DUP here; it
2178	     is highly unlikely and would be tricky to handle.  */
2179
2180	  tem = XEXP (x, 0);
2181	  if (GET_CODE (tem) == SUBREG)
2182	    {
2183	      if (GET_MODE_BITSIZE (GET_MODE (tem))
2184		  > GET_MODE_BITSIZE (GET_MODE (var)))
2185		{
2186		  replacement = find_fixup_replacement (replacements, var);
2187		  if (replacement->new == 0)
2188		    replacement->new = gen_reg_rtx (GET_MODE (var));
2189		  SUBREG_REG (tem) = replacement->new;
2190		}
2191	      else
2192		tem = fixup_memory_subreg (tem, insn, 0);
2193	    }
2194	  else
2195	    tem = fixup_stack_1 (tem, insn);
2196
2197	  /* Unless we want to load from memory, get TEM into the proper mode
2198	     for an extract from memory.  This can only be done if the
2199	     extract is at a constant position and length.  */
2200
2201	  if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2202	      && GET_CODE (XEXP (x, 2)) == CONST_INT
2203	      && ! mode_dependent_address_p (XEXP (tem, 0))
2204	      && ! MEM_VOLATILE_P (tem))
2205	    {
2206	      enum machine_mode wanted_mode = VOIDmode;
2207	      enum machine_mode is_mode = GET_MODE (tem);
2208	      HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2209
2210#ifdef HAVE_extzv
2211	      if (GET_CODE (x) == ZERO_EXTRACT)
2212		{
2213		  wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
2214		  if (wanted_mode == VOIDmode)
2215		    wanted_mode = word_mode;
2216		}
2217#endif
2218#ifdef HAVE_extv
2219	      if (GET_CODE (x) == SIGN_EXTRACT)
2220		{
2221		  wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
2222		  if (wanted_mode == VOIDmode)
2223		    wanted_mode = word_mode;
2224		}
2225#endif
2226	      /* If we have a narrower mode, we can do something.  */
2227	      if (wanted_mode != VOIDmode
2228		  && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2229		{
2230		  HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2231		  rtx old_pos = XEXP (x, 2);
2232		  rtx newmem;
2233
2234		  /* If the bytes and bits are counted differently, we
2235		     must adjust the offset.  */
2236		  if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2237		    offset = (GET_MODE_SIZE (is_mode)
2238			      - GET_MODE_SIZE (wanted_mode) - offset);
2239
2240		  pos %= GET_MODE_BITSIZE (wanted_mode);
2241
2242		  newmem = gen_rtx_MEM (wanted_mode,
2243					plus_constant (XEXP (tem, 0), offset));
2244		  RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2245		  MEM_COPY_ATTRIBUTES (newmem, tem);
2246
2247		  /* Make the change and see if the insn remains valid.  */
2248		  INSN_CODE (insn) = -1;
2249		  XEXP (x, 0) = newmem;
2250		  XEXP (x, 2) = GEN_INT (pos);
2251
2252		  if (recog_memoized (insn) >= 0)
2253		    return;
2254
2255		  /* Otherwise, restore old position.  XEXP (x, 0) will be
2256		     restored later.  */
2257		  XEXP (x, 2) = old_pos;
2258		}
2259	    }
2260
2261	  /* If we get here, the bitfield extract insn can't accept a memory
2262	     reference.  Copy the input into a register.  */
2263
2264	  tem1 = gen_reg_rtx (GET_MODE (tem));
2265	  emit_insn_before (gen_move_insn (tem1, tem), insn);
2266	  XEXP (x, 0) = tem1;
2267	  return;
2268	}
2269      break;
2270
2271    case SUBREG:
2272      if (SUBREG_REG (x) == var)
2273	{
2274	  /* If this is a special SUBREG made because VAR was promoted
2275	     from a wider mode, replace it with VAR and call ourself
2276	     recursively, this time saying that the object previously
2277	     had its current mode (by virtue of the SUBREG).  */
2278
2279	  if (SUBREG_PROMOTED_VAR_P (x))
2280	    {
2281	      *loc = var;
2282	      fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2283	      return;
2284	    }
2285
2286	  /* If this SUBREG makes VAR wider, it has become a paradoxical
2287	     SUBREG with VAR in memory, but these aren't allowed at this
2288	     stage of the compilation.  So load VAR into a pseudo and take
2289	     a SUBREG of that pseudo.  */
2290	  if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2291	    {
2292	      replacement = find_fixup_replacement (replacements, var);
2293	      if (replacement->new == 0)
2294		replacement->new = gen_reg_rtx (GET_MODE (var));
2295	      SUBREG_REG (x) = replacement->new;
2296	      return;
2297	    }
2298
2299	  /* See if we have already found a replacement for this SUBREG.
2300	     If so, use it.  Otherwise, make a MEM and see if the insn
2301	     is recognized.  If not, or if we should force MEM into a register,
2302	     make a pseudo for this SUBREG.  */
2303	  replacement = find_fixup_replacement (replacements, x);
2304	  if (replacement->new)
2305	    {
2306	      enum machine_mode mode = GET_MODE (x);
2307	      *loc = replacement->new;
2308
2309	      /* Careful!  We may have just replaced a SUBREG by a MEM, which
2310		 means that the insn may have become invalid again.  We can't
2311		 in this case make a new replacement since we already have one
2312		 and we must deal with MATCH_DUPs.  */
2313	      if (GET_CODE (replacement->new) == MEM)
2314		{
2315		  INSN_CODE (insn) = -1;
2316		  if (recog_memoized (insn) >= 0)
2317		    return;
2318
2319		  fixup_var_refs_1 (replacement->new, mode, &PATTERN (insn),
2320				    insn, replacements);
2321		}
2322
2323	      return;
2324	    }
2325
2326	  replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2327
2328	  INSN_CODE (insn) = -1;
2329	  if (! flag_force_mem && recog_memoized (insn) >= 0)
2330	    return;
2331
2332	  *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2333	  return;
2334	}
2335      break;
2336
2337    case SET:
2338      /* First do special simplification of bit-field references.  */
2339      if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2340	  || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2341	optimize_bit_field (x, insn, 0);
2342      if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2343	  || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2344	optimize_bit_field (x, insn, NULL_PTR);
2345
2346      /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2347	 into a register and then store it back out.  */
2348      if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2349	  && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2350	  && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2351	  && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2352	      > GET_MODE_SIZE (GET_MODE (var))))
2353	{
2354	  replacement = find_fixup_replacement (replacements, var);
2355	  if (replacement->new == 0)
2356	    replacement->new = gen_reg_rtx (GET_MODE (var));
2357
2358	  SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2359	  emit_insn_after (gen_move_insn (var, replacement->new), insn);
2360	}
2361
2362      /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2363	 insn into a pseudo and store the low part of the pseudo into VAR.  */
2364      if (GET_CODE (SET_DEST (x)) == SUBREG
2365	  && SUBREG_REG (SET_DEST (x)) == var
2366	  && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2367	      > GET_MODE_SIZE (GET_MODE (var))))
2368	{
2369	  SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2370	  emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2371							    tem)),
2372			   insn);
2373	  break;
2374	}
2375
2376      {
2377	rtx dest = SET_DEST (x);
2378	rtx src = SET_SRC (x);
2379#ifdef HAVE_insv
2380	rtx outerdest = dest;
2381#endif
2382
2383	while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2384	       || GET_CODE (dest) == SIGN_EXTRACT
2385	       || GET_CODE (dest) == ZERO_EXTRACT)
2386	  dest = XEXP (dest, 0);
2387
2388	if (GET_CODE (src) == SUBREG)
2389	  src = XEXP (src, 0);
2390
2391	/* If VAR does not appear at the top level of the SET
2392	   just scan the lower levels of the tree.  */
2393
2394        if (src != var && dest != var)
2395	  break;
2396
2397	/* We will need to rerecognize this insn.  */
2398	INSN_CODE (insn) = -1;
2399
2400#ifdef HAVE_insv
2401	if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2402	  {
2403	    /* Since this case will return, ensure we fixup all the
2404	       operands here.  */
2405	    fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2406			      insn, replacements);
2407	    fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2408			      insn, replacements);
2409	    fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2410			      insn, replacements);
2411
2412	    tem = XEXP (outerdest, 0);
2413
2414	    /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2415	       that may appear inside a ZERO_EXTRACT.
2416	       This was legitimate when the MEM was a REG.  */
2417	    if (GET_CODE (tem) == SUBREG
2418		&& SUBREG_REG (tem) == var)
2419	      tem = fixup_memory_subreg (tem, insn, 0);
2420	    else
2421	      tem = fixup_stack_1 (tem, insn);
2422
2423	    if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2424		&& GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2425		&& ! mode_dependent_address_p (XEXP (tem, 0))
2426		&& ! MEM_VOLATILE_P (tem))
2427	      {
2428		enum machine_mode wanted_mode;
2429		enum machine_mode is_mode = GET_MODE (tem);
2430		HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2431
2432		wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2433		if (wanted_mode == VOIDmode)
2434		  wanted_mode = word_mode;
2435
2436		/* If we have a narrower mode, we can do something.  */
2437		if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2438		  {
2439		    HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2440		    rtx old_pos = XEXP (outerdest, 2);
2441		    rtx newmem;
2442
2443		    if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2444		      offset = (GET_MODE_SIZE (is_mode)
2445				- GET_MODE_SIZE (wanted_mode) - offset);
2446
2447		    pos %= GET_MODE_BITSIZE (wanted_mode);
2448
2449		    newmem = gen_rtx_MEM (wanted_mode,
2450					  plus_constant (XEXP (tem, 0), offset));
2451		    RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2452		    MEM_COPY_ATTRIBUTES (newmem, tem);
2453
2454		    /* Make the change and see if the insn remains valid.  */
2455		    INSN_CODE (insn) = -1;
2456		    XEXP (outerdest, 0) = newmem;
2457		    XEXP (outerdest, 2) = GEN_INT (pos);
2458
2459		    if (recog_memoized (insn) >= 0)
2460		      return;
2461
2462		    /* Otherwise, restore old position.  XEXP (x, 0) will be
2463		       restored later.  */
2464		    XEXP (outerdest, 2) = old_pos;
2465		  }
2466	      }
2467
2468	    /* If we get here, the bit-field store doesn't allow memory
2469	       or isn't located at a constant position.  Load the value into
2470	       a register, do the store, and put it back into memory.  */
2471
2472	    tem1 = gen_reg_rtx (GET_MODE (tem));
2473	    emit_insn_before (gen_move_insn (tem1, tem), insn);
2474	    emit_insn_after (gen_move_insn (tem, tem1), insn);
2475	    XEXP (outerdest, 0) = tem1;
2476	    return;
2477	  }
2478#endif
2479
2480	/* STRICT_LOW_PART is a no-op on memory references
2481	   and it can cause combinations to be unrecognizable,
2482	   so eliminate it.  */
2483
2484	if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2485	  SET_DEST (x) = XEXP (SET_DEST (x), 0);
2486
2487	/* A valid insn to copy VAR into or out of a register
2488	   must be left alone, to avoid an infinite loop here.
2489	   If the reference to VAR is by a subreg, fix that up,
2490	   since SUBREG is not valid for a memref.
2491	   Also fix up the address of the stack slot.
2492
2493	   Note that we must not try to recognize the insn until
2494	   after we know that we have valid addresses and no
2495	   (subreg (mem ...) ...) constructs, since these interfere
2496	   with determining the validity of the insn.  */
2497
2498	if ((SET_SRC (x) == var
2499	     || (GET_CODE (SET_SRC (x)) == SUBREG
2500		 && SUBREG_REG (SET_SRC (x)) == var))
2501	    && (GET_CODE (SET_DEST (x)) == REG
2502		|| (GET_CODE (SET_DEST (x)) == SUBREG
2503		    && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2504	    && GET_MODE (var) == promoted_mode
2505	    && x == single_set (insn))
2506	  {
2507	    rtx pat;
2508
2509	    replacement = find_fixup_replacement (replacements, SET_SRC (x));
2510	    if (replacement->new)
2511	      SET_SRC (x) = replacement->new;
2512	    else if (GET_CODE (SET_SRC (x)) == SUBREG)
2513	      SET_SRC (x) = replacement->new
2514		= fixup_memory_subreg (SET_SRC (x), insn, 0);
2515	    else
2516	      SET_SRC (x) = replacement->new
2517		= fixup_stack_1 (SET_SRC (x), insn);
2518
2519	    if (recog_memoized (insn) >= 0)
2520	      return;
2521
2522	    /* INSN is not valid, but we know that we want to
2523	       copy SET_SRC (x) to SET_DEST (x) in some way.  So
2524	       we generate the move and see whether it requires more
2525	       than one insn.  If it does, we emit those insns and
2526	       delete INSN.  Otherwise, we an just replace the pattern
2527	       of INSN; we have already verified above that INSN has
2528	       no other function that to do X.  */
2529
2530	    pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2531	    if (GET_CODE (pat) == SEQUENCE)
2532	      {
2533		emit_insn_after (pat, insn);
2534		PUT_CODE (insn, NOTE);
2535		NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2536		NOTE_SOURCE_FILE (insn) = 0;
2537	      }
2538	    else
2539	      PATTERN (insn) = pat;
2540
2541	    return;
2542	  }
2543
2544	if ((SET_DEST (x) == var
2545	     || (GET_CODE (SET_DEST (x)) == SUBREG
2546		 && SUBREG_REG (SET_DEST (x)) == var))
2547	    && (GET_CODE (SET_SRC (x)) == REG
2548		|| (GET_CODE (SET_SRC (x)) == SUBREG
2549		    && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2550	    && GET_MODE (var) == promoted_mode
2551	    && x == single_set (insn))
2552	  {
2553	    rtx pat;
2554
2555	    if (GET_CODE (SET_DEST (x)) == SUBREG)
2556	      SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2557	    else
2558	      SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2559
2560	    if (recog_memoized (insn) >= 0)
2561	      return;
2562
2563	    pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2564	    if (GET_CODE (pat) == SEQUENCE)
2565	      {
2566		emit_insn_after (pat, insn);
2567		PUT_CODE (insn, NOTE);
2568		NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2569		NOTE_SOURCE_FILE (insn) = 0;
2570	      }
2571	    else
2572	      PATTERN (insn) = pat;
2573
2574	    return;
2575	  }
2576
2577	/* Otherwise, storing into VAR must be handled specially
2578	   by storing into a temporary and copying that into VAR
2579	   with a new insn after this one.  Note that this case
2580	   will be used when storing into a promoted scalar since
2581	   the insn will now have different modes on the input
2582	   and output and hence will be invalid (except for the case
2583	   of setting it to a constant, which does not need any
2584	   change if it is valid).  We generate extra code in that case,
2585	   but combine.c will eliminate it.  */
2586
2587	if (dest == var)
2588	  {
2589	    rtx temp;
2590	    rtx fixeddest = SET_DEST (x);
2591
2592	    /* STRICT_LOW_PART can be discarded, around a MEM.  */
2593	    if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2594	      fixeddest = XEXP (fixeddest, 0);
2595	    /* Convert (SUBREG (MEM)) to a MEM in a changed mode.  */
2596	    if (GET_CODE (fixeddest) == SUBREG)
2597	      {
2598		fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2599		promoted_mode = GET_MODE (fixeddest);
2600	      }
2601	    else
2602	      fixeddest = fixup_stack_1 (fixeddest, insn);
2603
2604	    temp = gen_reg_rtx (promoted_mode);
2605
2606	    emit_insn_after (gen_move_insn (fixeddest,
2607					    gen_lowpart (GET_MODE (fixeddest),
2608							 temp)),
2609			     insn);
2610
2611	    SET_DEST (x) = temp;
2612	  }
2613      }
2614
2615    default:
2616      break;
2617    }
2618
2619  /* Nothing special about this RTX; fix its operands.  */
2620
2621  fmt = GET_RTX_FORMAT (code);
2622  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2623    {
2624      if (fmt[i] == 'e')
2625	fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2626      if (fmt[i] == 'E')
2627	{
2628	  register int j;
2629	  for (j = 0; j < XVECLEN (x, i); j++)
2630	    fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2631			      insn, replacements);
2632	}
2633    }
2634}
2635
2636/* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2637   return an rtx (MEM:m1 newaddr) which is equivalent.
2638   If any insns must be emitted to compute NEWADDR, put them before INSN.
2639
2640   UNCRITICAL nonzero means accept paradoxical subregs.
2641   This is used for subregs found inside REG_NOTES.  */
2642
2643static rtx
2644fixup_memory_subreg (x, insn, uncritical)
2645     rtx x;
2646     rtx insn;
2647     int uncritical;
2648{
2649  int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2650  rtx addr = XEXP (SUBREG_REG (x), 0);
2651  enum machine_mode mode = GET_MODE (x);
2652  rtx result;
2653
2654  /* Paradoxical SUBREGs are usually invalid during RTL generation.  */
2655  if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2656      && ! uncritical)
2657    abort ();
2658
2659  if (BYTES_BIG_ENDIAN)
2660    offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2661	       - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2662  addr = plus_constant (addr, offset);
2663  if (!flag_force_addr && memory_address_p (mode, addr))
2664    /* Shortcut if no insns need be emitted.  */
2665    return change_address (SUBREG_REG (x), mode, addr);
2666  start_sequence ();
2667  result = change_address (SUBREG_REG (x), mode, addr);
2668  emit_insn_before (gen_sequence (), insn);
2669  end_sequence ();
2670  return result;
2671}
2672
2673/* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2674   Replace subexpressions of X in place.
2675   If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2676   Otherwise return X, with its contents possibly altered.
2677
2678   If any insns must be emitted to compute NEWADDR, put them before INSN.
2679
2680   UNCRITICAL is as in fixup_memory_subreg.  */
2681
2682static rtx
2683walk_fixup_memory_subreg (x, insn, uncritical)
2684     register rtx x;
2685     rtx insn;
2686     int uncritical;
2687{
2688  register enum rtx_code code;
2689  register char *fmt;
2690  register int i;
2691
2692  if (x == 0)
2693    return 0;
2694
2695  code = GET_CODE (x);
2696
2697  if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2698    return fixup_memory_subreg (x, insn, uncritical);
2699
2700  /* Nothing special about this RTX; fix its operands.  */
2701
2702  fmt = GET_RTX_FORMAT (code);
2703  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2704    {
2705      if (fmt[i] == 'e')
2706	XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2707      if (fmt[i] == 'E')
2708	{
2709	  register int j;
2710	  for (j = 0; j < XVECLEN (x, i); j++)
2711	    XVECEXP (x, i, j)
2712	      = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2713	}
2714    }
2715  return x;
2716}
2717
2718/* For each memory ref within X, if it refers to a stack slot
2719   with an out of range displacement, put the address in a temp register
2720   (emitting new insns before INSN to load these registers)
2721   and alter the memory ref to use that register.
2722   Replace each such MEM rtx with a copy, to avoid clobberage.  */
2723
2724static rtx
2725fixup_stack_1 (x, insn)
2726     rtx x;
2727     rtx insn;
2728{
2729  register int i;
2730  register RTX_CODE code = GET_CODE (x);
2731  register char *fmt;
2732
2733  if (code == MEM)
2734    {
2735      register rtx ad = XEXP (x, 0);
2736      /* If we have address of a stack slot but it's not valid
2737	 (displacement is too large), compute the sum in a register.  */
2738      if (GET_CODE (ad) == PLUS
2739	  && GET_CODE (XEXP (ad, 0)) == REG
2740	  && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2741	       && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2742	      || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2743#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2744	      || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2745#endif
2746	      || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2747	      || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2748	      || XEXP (ad, 0) == current_function_internal_arg_pointer)
2749	  && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2750	{
2751	  rtx temp, seq;
2752	  if (memory_address_p (GET_MODE (x), ad))
2753	    return x;
2754
2755	  start_sequence ();
2756	  temp = copy_to_reg (ad);
2757	  seq = gen_sequence ();
2758	  end_sequence ();
2759	  emit_insn_before (seq, insn);
2760	  return change_address (x, VOIDmode, temp);
2761	}
2762      return x;
2763    }
2764
2765  fmt = GET_RTX_FORMAT (code);
2766  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2767    {
2768      if (fmt[i] == 'e')
2769	XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2770      if (fmt[i] == 'E')
2771	{
2772	  register int j;
2773	  for (j = 0; j < XVECLEN (x, i); j++)
2774	    XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2775	}
2776    }
2777  return x;
2778}
2779
2780/* Optimization: a bit-field instruction whose field
2781   happens to be a byte or halfword in memory
2782   can be changed to a move instruction.
2783
2784   We call here when INSN is an insn to examine or store into a bit-field.
2785   BODY is the SET-rtx to be altered.
2786
2787   EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2788   (Currently this is called only from function.c, and EQUIV_MEM
2789   is always 0.)  */
2790
2791static void
2792optimize_bit_field (body, insn, equiv_mem)
2793     rtx body;
2794     rtx insn;
2795     rtx *equiv_mem;
2796{
2797  register rtx bitfield;
2798  int destflag;
2799  rtx seq = 0;
2800  enum machine_mode mode;
2801
2802  if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2803      || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2804    bitfield = SET_DEST (body), destflag = 1;
2805  else
2806    bitfield = SET_SRC (body), destflag = 0;
2807
2808  /* First check that the field being stored has constant size and position
2809     and is in fact a byte or halfword suitably aligned.  */
2810
2811  if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2812      && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2813      && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2814	  != BLKmode)
2815      && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2816    {
2817      register rtx memref = 0;
2818
2819      /* Now check that the containing word is memory, not a register,
2820	 and that it is safe to change the machine mode.  */
2821
2822      if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2823	memref = XEXP (bitfield, 0);
2824      else if (GET_CODE (XEXP (bitfield, 0)) == REG
2825	       && equiv_mem != 0)
2826	memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2827      else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2828	       && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2829	memref = SUBREG_REG (XEXP (bitfield, 0));
2830      else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2831	       && equiv_mem != 0
2832	       && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2833	memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2834
2835      if (memref
2836	  && ! mode_dependent_address_p (XEXP (memref, 0))
2837	  && ! MEM_VOLATILE_P (memref))
2838	{
2839	  /* Now adjust the address, first for any subreg'ing
2840	     that we are now getting rid of,
2841	     and then for which byte of the word is wanted.  */
2842
2843	  HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2844	  rtx insns;
2845
2846	  /* Adjust OFFSET to count bits from low-address byte.  */
2847	  if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2848	    offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2849		      - offset - INTVAL (XEXP (bitfield, 1)));
2850
2851	  /* Adjust OFFSET to count bytes from low-address byte.  */
2852	  offset /= BITS_PER_UNIT;
2853	  if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2854	    {
2855	      offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2856	      if (BYTES_BIG_ENDIAN)
2857		offset -= (MIN (UNITS_PER_WORD,
2858				GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2859			   - MIN (UNITS_PER_WORD,
2860				  GET_MODE_SIZE (GET_MODE (memref))));
2861	    }
2862
2863	  start_sequence ();
2864	  memref = change_address (memref, mode,
2865				   plus_constant (XEXP (memref, 0), offset));
2866	  insns = get_insns ();
2867	  end_sequence ();
2868	  emit_insns_before (insns, insn);
2869
2870	  /* Store this memory reference where
2871	     we found the bit field reference.  */
2872
2873	  if (destflag)
2874	    {
2875	      validate_change (insn, &SET_DEST (body), memref, 1);
2876	      if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2877		{
2878		  rtx src = SET_SRC (body);
2879		  while (GET_CODE (src) == SUBREG
2880			 && SUBREG_WORD (src) == 0)
2881		    src = SUBREG_REG (src);
2882		  if (GET_MODE (src) != GET_MODE (memref))
2883		    src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2884		  validate_change (insn, &SET_SRC (body), src, 1);
2885		}
2886	      else if (GET_MODE (SET_SRC (body)) != VOIDmode
2887		       && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2888		/* This shouldn't happen because anything that didn't have
2889		   one of these modes should have got converted explicitly
2890		   and then referenced through a subreg.
2891		   This is so because the original bit-field was
2892		   handled by agg_mode and so its tree structure had
2893		   the same mode that memref now has.  */
2894		abort ();
2895	    }
2896	  else
2897	    {
2898	      rtx dest = SET_DEST (body);
2899
2900	      while (GET_CODE (dest) == SUBREG
2901		     && SUBREG_WORD (dest) == 0
2902		     && (GET_MODE_CLASS (GET_MODE (dest))
2903			 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2904		     && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2905			 <= UNITS_PER_WORD))
2906		dest = SUBREG_REG (dest);
2907
2908	      validate_change (insn, &SET_DEST (body), dest, 1);
2909
2910	      if (GET_MODE (dest) == GET_MODE (memref))
2911		validate_change (insn, &SET_SRC (body), memref, 1);
2912	      else
2913		{
2914		  /* Convert the mem ref to the destination mode.  */
2915		  rtx newreg = gen_reg_rtx (GET_MODE (dest));
2916
2917		  start_sequence ();
2918		  convert_move (newreg, memref,
2919				GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2920		  seq = get_insns ();
2921		  end_sequence ();
2922
2923		  validate_change (insn, &SET_SRC (body), newreg, 1);
2924		}
2925	    }
2926
2927	  /* See if we can convert this extraction or insertion into
2928	     a simple move insn.  We might not be able to do so if this
2929	     was, for example, part of a PARALLEL.
2930
2931	     If we succeed, write out any needed conversions.  If we fail,
2932	     it is hard to guess why we failed, so don't do anything
2933	     special; just let the optimization be suppressed.  */
2934
2935	  if (apply_change_group () && seq)
2936	    emit_insns_before (seq, insn);
2937	}
2938    }
2939}
2940
2941/* These routines are responsible for converting virtual register references
2942   to the actual hard register references once RTL generation is complete.
2943
2944   The following four variables are used for communication between the
2945   routines.  They contain the offsets of the virtual registers from their
2946   respective hard registers.  */
2947
2948static int in_arg_offset;
2949static int var_offset;
2950static int dynamic_offset;
2951static int out_arg_offset;
2952static int cfa_offset;
2953
2954/* In most machines, the stack pointer register is equivalent to the bottom
2955   of the stack.  */
2956
2957#ifndef STACK_POINTER_OFFSET
2958#define STACK_POINTER_OFFSET	0
2959#endif
2960
2961/* If not defined, pick an appropriate default for the offset of dynamically
2962   allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2963   REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE.  */
2964
2965#ifndef STACK_DYNAMIC_OFFSET
2966
2967#ifdef ACCUMULATE_OUTGOING_ARGS
2968/* The bottom of the stack points to the actual arguments.  If
2969   REG_PARM_STACK_SPACE is defined, this includes the space for the register
2970   parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
2971   stack space for register parameters is not pushed by the caller, but
2972   rather part of the fixed stack areas and hence not included in
2973   `current_function_outgoing_args_size'.  Nevertheless, we must allow
2974   for it when allocating stack dynamic objects.  */
2975
2976#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2977#define STACK_DYNAMIC_OFFSET(FNDECL)	\
2978(current_function_outgoing_args_size	\
2979 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2980
2981#else
2982#define STACK_DYNAMIC_OFFSET(FNDECL)	\
2983(current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2984#endif
2985
2986#else
2987#define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2988#endif
2989#endif
2990
2991/* On a few machines, the CFA coincides with the arg pointer.  */
2992
2993#ifndef ARG_POINTER_CFA_OFFSET
2994#define ARG_POINTER_CFA_OFFSET 0
2995#endif
2996
2997
2998/* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2999   its address taken.  DECL is the decl for the object stored in the
3000   register, for later use if we do need to force REG into the stack.
3001   REG is overwritten by the MEM like in put_reg_into_stack.  */
3002
3003rtx
3004gen_mem_addressof (reg, decl)
3005     rtx reg;
3006     tree decl;
3007{
3008  tree type = TREE_TYPE (decl);
3009  rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
3010  SET_ADDRESSOF_DECL (r, decl);
3011  /* If the original REG was a user-variable, then so is the REG whose
3012     address is being taken.  */
3013  REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
3014
3015  XEXP (reg, 0) = r;
3016  PUT_CODE (reg, MEM);
3017  PUT_MODE (reg, DECL_MODE (decl));
3018  MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
3019  MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
3020  MEM_ALIAS_SET (reg) = get_alias_set (decl);
3021
3022  if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
3023    fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
3024
3025  return reg;
3026}
3027
3028/* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack.  */
3029
3030void
3031flush_addressof (decl)
3032     tree decl;
3033{
3034  if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
3035      && DECL_RTL (decl) != 0
3036      && GET_CODE (DECL_RTL (decl)) == MEM
3037      && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
3038      && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
3039    put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
3040}
3041
3042/* Force the register pointed to by R, an ADDRESSOF rtx, into the stack.  */
3043
3044static void
3045put_addressof_into_stack (r, ht)
3046     rtx r;
3047     struct hash_table *ht;
3048{
3049  tree decl = ADDRESSOF_DECL (r);
3050  rtx reg = XEXP (r, 0);
3051
3052  if (GET_CODE (reg) != REG)
3053    abort ();
3054
3055  put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
3056		      DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
3057		      ADDRESSOF_REGNO (r),
3058		      TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
3059}
3060
3061/* List of replacements made below in purge_addressof_1 when creating
3062   bitfield insertions.  */
3063static rtx purge_bitfield_addressof_replacements;
3064
3065/* List of replacements made below in purge_addressof_1 for patterns
3066   (MEM (ADDRESSOF (REG ...))).  The key of the list entry is the
3067   corresponding (ADDRESSOF (REG ...)) and value is a substitution for
3068   the all pattern.  List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
3069   enough in complex cases, e.g. when some field values can be
3070   extracted by usage MEM with narrower mode. */
3071static rtx purge_addressof_replacements;
3072
3073/* Helper function for purge_addressof.  See if the rtx expression at *LOC
3074   in INSN needs to be changed.  If FORCE, always put any ADDRESSOFs into
3075   the stack.  If the function returns FALSE then the replacement could not
3076   be made.  */
3077
3078static boolean
3079purge_addressof_1 (loc, insn, force, store, ht)
3080     rtx *loc;
3081     rtx insn;
3082     int force, store;
3083     struct hash_table *ht;
3084{
3085  rtx x;
3086  RTX_CODE code;
3087  int i, j;
3088  char *fmt;
3089  boolean result = true;
3090
3091  /* Re-start here to avoid recursion in common cases.  */
3092 restart:
3093
3094  x = *loc;
3095  if (x == 0)
3096    return true;
3097
3098  code = GET_CODE (x);
3099
3100  if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
3101    {
3102      rtx insns;
3103      /* We must create a copy of the rtx because it was created by
3104	 overwriting a REG rtx which is always shared.  */
3105      rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3106
3107      if (validate_change (insn, loc, sub, 0)
3108	  || validate_replace_rtx (x, sub, insn))
3109	return true;
3110
3111      start_sequence ();
3112      sub = force_operand (sub, NULL_RTX);
3113      if (! validate_change (insn, loc, sub, 0)
3114	  && ! validate_replace_rtx (x, sub, insn))
3115	abort ();
3116
3117      insns = gen_sequence ();
3118      end_sequence ();
3119      emit_insn_before (insns, insn);
3120      return true;
3121    }
3122  else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3123    {
3124      rtx sub = XEXP (XEXP (x, 0), 0);
3125      rtx sub2;
3126
3127      if (GET_CODE (sub) == MEM)
3128	{
3129	  sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
3130	  MEM_COPY_ATTRIBUTES (sub2, sub);
3131	  RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
3132	  sub = sub2;
3133	}
3134
3135      if (GET_CODE (sub) == REG
3136	  && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3137	{
3138	  put_addressof_into_stack (XEXP (x, 0), ht);
3139	  return true;
3140	}
3141      else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3142	{
3143	  int size_x, size_sub;
3144
3145	  if (!insn)
3146	    {
3147	      /* When processing REG_NOTES look at the list of
3148		 replacements done on the insn to find the register that X
3149		 was replaced by.  */
3150	      rtx tem;
3151
3152	      for (tem = purge_bitfield_addressof_replacements;
3153		   tem != NULL_RTX;
3154		   tem = XEXP (XEXP (tem, 1), 1))
3155		if (rtx_equal_p (x, XEXP (tem, 0)))
3156		  {
3157		    *loc = XEXP (XEXP (tem, 1), 0);
3158		    return true;
3159		  }
3160
3161	      /* See comment for purge_addressof_replacements. */
3162	      for (tem = purge_addressof_replacements;
3163		   tem != NULL_RTX;
3164		   tem = XEXP (XEXP (tem, 1), 1))
3165		if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3166		  {
3167		    rtx z = XEXP (XEXP (tem, 1), 0);
3168
3169		    if (GET_MODE (x) == GET_MODE (z)
3170			|| (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3171			    && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3172		      abort ();
3173
3174		    /* It can happen that the note may speak of things
3175		       in a wider (or just different) mode than the
3176		       code did.  This is especially true of
3177		       REG_RETVAL. */
3178
3179		    if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
3180		      z = SUBREG_REG (z);
3181
3182		    if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3183			&& (GET_MODE_SIZE (GET_MODE (x))
3184			    > GET_MODE_SIZE (GET_MODE (z))))
3185		      {
3186			/* This can occur as a result in invalid
3187			   pointer casts, e.g. float f; ...
3188			   *(long long int *)&f.
3189			   ??? We could emit a warning here, but
3190			   without a line number that wouldn't be
3191			   very helpful.  */
3192			z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3193		      }
3194		    else
3195		      z = gen_lowpart (GET_MODE (x), z);
3196
3197		    *loc = z;
3198		    return true;
3199		  }
3200
3201	      /* Sometimes we may not be able to find the replacement.  For
3202		 example when the original insn was a MEM in a wider mode,
3203		 and the note is part of a sign extension of a narrowed
3204		 version of that MEM.  Gcc testcase compile/990829-1.c can
3205		 generate an example of this siutation.  Rather than complain
3206		 we return false, which will prompt our caller to remove the
3207		 offending note.  */
3208	      return false;
3209	    }
3210
3211	  size_x = GET_MODE_BITSIZE (GET_MODE (x));
3212	  size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3213
3214	  /* Don't even consider working with paradoxical subregs,
3215	     or the moral equivalent seen here.  */
3216	  if (size_x <= size_sub
3217	      && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3218	    {
3219	      /* Do a bitfield insertion to mirror what would happen
3220		 in memory.  */
3221
3222	      rtx val, seq;
3223
3224	      if (store)
3225		{
3226		  rtx p = PREV_INSN (insn);
3227
3228		  start_sequence ();
3229		  val = gen_reg_rtx (GET_MODE (x));
3230		  if (! validate_change (insn, loc, val, 0))
3231		    {
3232		      /* Discard the current sequence and put the
3233			 ADDRESSOF on stack.  */
3234		      end_sequence ();
3235		      goto give_up;
3236		    }
3237		  seq = gen_sequence ();
3238		  end_sequence ();
3239		  emit_insn_before (seq, insn);
3240		  compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3241					 insn, ht);
3242
3243		  start_sequence ();
3244		  store_bit_field (sub, size_x, 0, GET_MODE (x),
3245				   val, GET_MODE_SIZE (GET_MODE (sub)),
3246				   GET_MODE_SIZE (GET_MODE (sub)));
3247
3248		  /* Make sure to unshare any shared rtl that store_bit_field
3249		     might have created.  */
3250		  unshare_all_rtl_again (get_insns ());
3251
3252		  seq = gen_sequence ();
3253		  end_sequence ();
3254		  p = emit_insn_after (seq, insn);
3255		  if (NEXT_INSN (insn))
3256		    compute_insns_for_mem (NEXT_INSN (insn),
3257					   p ? NEXT_INSN (p) : NULL_RTX,
3258					   ht);
3259		}
3260	      else
3261		{
3262		  rtx p = PREV_INSN (insn);
3263
3264		  start_sequence ();
3265		  val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3266					   GET_MODE (x), GET_MODE (x),
3267					   GET_MODE_SIZE (GET_MODE (sub)),
3268					   GET_MODE_SIZE (GET_MODE (sub)));
3269
3270		  if (! validate_change (insn, loc, val, 0))
3271		    {
3272		      /* Discard the current sequence and put the
3273			 ADDRESSOF on stack.  */
3274		      end_sequence ();
3275		      goto give_up;
3276		    }
3277
3278		  seq = gen_sequence ();
3279		  end_sequence ();
3280		  emit_insn_before (seq, insn);
3281		  compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3282					 insn, ht);
3283		}
3284
3285	      /* Remember the replacement so that the same one can be done
3286		 on the REG_NOTES.  */
3287	      purge_bitfield_addressof_replacements
3288		= gen_rtx_EXPR_LIST (VOIDmode, x,
3289				     gen_rtx_EXPR_LIST
3290				     (VOIDmode, val,
3291				      purge_bitfield_addressof_replacements));
3292
3293	      /* We replaced with a reg -- all done.  */
3294	      return true;
3295	    }
3296	}
3297      else if (validate_change (insn, loc, sub, 0))
3298	{
3299	  /* Remember the replacement so that the same one can be done
3300	     on the REG_NOTES.  */
3301	  if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3302	    {
3303	      rtx tem;
3304
3305	      for (tem = purge_addressof_replacements;
3306		   tem != NULL_RTX;
3307		   tem = XEXP (XEXP (tem, 1), 1))
3308		if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3309		  {
3310		    XEXP (XEXP (tem, 1), 0) = sub;
3311		    return true;
3312		  }
3313	      purge_addressof_replacements
3314		= gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3315			   gen_rtx_EXPR_LIST (VOIDmode, sub,
3316					      purge_addressof_replacements));
3317	      return true;
3318	    }
3319	  goto restart;
3320	}
3321    give_up:;
3322      /* else give up and put it into the stack */
3323    }
3324  else if (code == ADDRESSOF)
3325    {
3326      put_addressof_into_stack (x, ht);
3327      return true;
3328    }
3329  else if (code == SET)
3330    {
3331      result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3332      result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3333      return result;
3334    }
3335
3336  /* Scan all subexpressions. */
3337  fmt = GET_RTX_FORMAT (code);
3338  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3339    {
3340      if (*fmt == 'e')
3341	result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3342      else if (*fmt == 'E')
3343	for (j = 0; j < XVECLEN (x, i); j++)
3344	  result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3345    }
3346
3347  return result;
3348}
3349
3350/* Return a new hash table entry in HT.  */
3351
3352static struct hash_entry *
3353insns_for_mem_newfunc (he, ht, k)
3354     struct hash_entry *he;
3355     struct hash_table *ht;
3356     hash_table_key k ATTRIBUTE_UNUSED;
3357{
3358  struct insns_for_mem_entry *ifmhe;
3359  if (he)
3360    return he;
3361
3362  ifmhe = ((struct insns_for_mem_entry *)
3363	   hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3364  ifmhe->insns = NULL_RTX;
3365
3366  return &ifmhe->he;
3367}
3368
3369/* Return a hash value for K, a REG.  */
3370
3371static unsigned long
3372insns_for_mem_hash (k)
3373     hash_table_key k;
3374{
3375  /* K is really a RTX.  Just use the address as the hash value.  */
3376  return (unsigned long) k;
3377}
3378
3379/* Return non-zero if K1 and K2 (two REGs) are the same.  */
3380
3381static boolean
3382insns_for_mem_comp (k1, k2)
3383     hash_table_key k1;
3384     hash_table_key k2;
3385{
3386  return k1 == k2;
3387}
3388
3389struct insns_for_mem_walk_info {
3390  /* The hash table that we are using to record which INSNs use which
3391     MEMs.  */
3392  struct hash_table *ht;
3393
3394  /* The INSN we are currently proessing.  */
3395  rtx insn;
3396
3397  /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3398     to find the insns that use the REGs in the ADDRESSOFs.  */
3399  int pass;
3400};
3401
3402/* Called from compute_insns_for_mem via for_each_rtx.  If R is a REG
3403   that might be used in an ADDRESSOF expression, record this INSN in
3404   the hash table given by DATA (which is really a pointer to an
3405   insns_for_mem_walk_info structure).  */
3406
3407static int
3408insns_for_mem_walk (r, data)
3409     rtx *r;
3410     void *data;
3411{
3412  struct insns_for_mem_walk_info *ifmwi
3413    = (struct insns_for_mem_walk_info *) data;
3414
3415  if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3416      && GET_CODE (XEXP (*r, 0)) == REG)
3417    hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3418  else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3419    {
3420      /* Lookup this MEM in the hashtable, creating it if necessary.  */
3421      struct insns_for_mem_entry *ifme
3422	= (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3423						      *r,
3424						      /*create=*/0,
3425						      /*copy=*/0);
3426
3427      /* If we have not already recorded this INSN, do so now.  Since
3428	 we process the INSNs in order, we know that if we have
3429	 recorded it it must be at the front of the list.  */
3430      if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3431	{
3432	  /* We do the allocation on the same obstack as is used for
3433	     the hash table since this memory will not be used once
3434	     the hash table is deallocated.  */
3435	  push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3436	  ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3437					   ifme->insns);
3438	  pop_obstacks ();
3439	}
3440    }
3441
3442  return 0;
3443}
3444
3445/* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3446   which REGs in HT.  */
3447
3448static void
3449compute_insns_for_mem (insns, last_insn, ht)
3450     rtx insns;
3451     rtx last_insn;
3452     struct hash_table *ht;
3453{
3454  rtx insn;
3455  struct insns_for_mem_walk_info ifmwi;
3456  ifmwi.ht = ht;
3457
3458  for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3459    for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3460      if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3461	{
3462	  ifmwi.insn = insn;
3463	  for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3464	}
3465}
3466
3467/* Helper function for purge_addressof called through for_each_rtx.
3468   Returns true iff the rtl is an ADDRESSOF.  */
3469static int
3470is_addressof (rtl, data)
3471     rtx * rtl;
3472     void * data ATTRIBUTE_UNUSED;
3473{
3474  return GET_CODE (* rtl) == ADDRESSOF;
3475}
3476
3477/* Eliminate all occurrences of ADDRESSOF from INSNS.  Elide any remaining
3478   (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3479   stack.  */
3480
3481void
3482purge_addressof (insns)
3483     rtx insns;
3484{
3485  rtx insn;
3486  struct hash_table ht;
3487
3488  /* When we actually purge ADDRESSOFs, we turn REGs into MEMs.  That
3489     requires a fixup pass over the instruction stream to correct
3490     INSNs that depended on the REG being a REG, and not a MEM.  But,
3491     these fixup passes are slow.  Furthermore, more MEMs are not
3492     mentioned in very many instructions.  So, we speed up the process
3493     by pre-calculating which REGs occur in which INSNs; that allows
3494     us to perform the fixup passes much more quickly.  */
3495  hash_table_init (&ht,
3496		   insns_for_mem_newfunc,
3497		   insns_for_mem_hash,
3498		   insns_for_mem_comp);
3499  compute_insns_for_mem (insns, NULL_RTX, &ht);
3500
3501  for (insn = insns; insn; insn = NEXT_INSN (insn))
3502    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3503	|| GET_CODE (insn) == CALL_INSN)
3504      {
3505	if (! purge_addressof_1 (&PATTERN (insn), insn,
3506				 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3507	  /* If we could not replace the ADDRESSOFs in the insn,
3508	     something is wrong.  */
3509	  abort ();
3510
3511	if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3512	  {
3513	    /* If we could not replace the ADDRESSOFs in the insn's notes,
3514	       we can just remove the offending notes instead.  */
3515	    rtx note;
3516
3517	    for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3518	      {
3519		/* If we find a REG_RETVAL note then the insn is a libcall.
3520		   Such insns must have REG_EQUAL notes as well, in order
3521		   for later passes of the compiler to work.  So it is not
3522		   safe to delete the notes here, and instead we abort.  */
3523		if (REG_NOTE_KIND (note) == REG_RETVAL)
3524		  abort ();
3525		if (for_each_rtx (& note, is_addressof, NULL))
3526		  remove_note (insn, note);
3527	      }
3528	  }
3529      }
3530
3531  /* Clean up.  */
3532  hash_table_free (&ht);
3533  purge_bitfield_addressof_replacements = 0;
3534  purge_addressof_replacements = 0;
3535
3536  /* REGs are shared.  purge_addressof will destructively replace a REG
3537     with a MEM, which creates shared MEMs.
3538
3539     Unfortunately, the children of put_reg_into_stack assume that MEMs
3540     referring to the same stack slot are shared (fixup_var_refs and
3541     the associated hash table code).
3542
3543     So, we have to do another unsharing pass after we have flushed any
3544     REGs that had their address taken into the stack.
3545
3546     It may be worth tracking whether or not we converted any REGs into
3547     MEMs to avoid this overhead when it is not needed.  */
3548  unshare_all_rtl_again (get_insns ());
3549}
3550
3551/* Pass through the INSNS of function FNDECL and convert virtual register
3552   references to hard register references.  */
3553
3554void
3555instantiate_virtual_regs (fndecl, insns)
3556     tree fndecl;
3557     rtx insns;
3558{
3559  rtx insn;
3560  int i;
3561
3562  /* Compute the offsets to use for this function.  */
3563  in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3564  var_offset = STARTING_FRAME_OFFSET;
3565  dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3566  out_arg_offset = STACK_POINTER_OFFSET;
3567  cfa_offset = ARG_POINTER_CFA_OFFSET;
3568
3569  /* Scan all variables and parameters of this function.  For each that is
3570     in memory, instantiate all virtual registers if the result is a valid
3571     address.  If not, we do it later.  That will handle most uses of virtual
3572     regs on many machines.  */
3573  instantiate_decls (fndecl, 1);
3574
3575  /* Initialize recognition, indicating that volatile is OK.  */
3576  init_recog ();
3577
3578  /* Scan through all the insns, instantiating every virtual register still
3579     present.  */
3580  for (insn = insns; insn; insn = NEXT_INSN (insn))
3581    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3582	|| GET_CODE (insn) == CALL_INSN)
3583      {
3584	instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3585	instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3586      }
3587
3588  /* Instantiate the stack slots for the parm registers, for later use in
3589     addressof elimination.  */
3590  for (i = 0; i < max_parm_reg; ++i)
3591    if (parm_reg_stack_loc[i])
3592      instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3593
3594  /* Now instantiate the remaining register equivalences for debugging info.
3595     These will not be valid addresses.  */
3596  instantiate_decls (fndecl, 0);
3597
3598  /* Indicate that, from now on, assign_stack_local should use
3599     frame_pointer_rtx.  */
3600  virtuals_instantiated = 1;
3601}
3602
3603/* Scan all decls in FNDECL (both variables and parameters) and instantiate
3604   all virtual registers in their DECL_RTL's.
3605
3606   If VALID_ONLY, do this only if the resulting address is still valid.
3607   Otherwise, always do it.  */
3608
3609static void
3610instantiate_decls (fndecl, valid_only)
3611     tree fndecl;
3612     int valid_only;
3613{
3614  tree decl;
3615
3616  if (DECL_SAVED_INSNS (fndecl))
3617    /* When compiling an inline function, the obstack used for
3618       rtl allocation is the maybepermanent_obstack.  Calling
3619       `resume_temporary_allocation' switches us back to that
3620       obstack while we process this function's parameters.  */
3621    resume_temporary_allocation ();
3622
3623  /* Process all parameters of the function.  */
3624  for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3625    {
3626      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3627
3628      instantiate_decl (DECL_RTL (decl), size, valid_only);
3629
3630      /* If the parameter was promoted, then the incoming RTL mode may be
3631	 larger than the declared type size.  We must use the larger of
3632	 the two sizes.  */
3633      size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3634      instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3635    }
3636
3637  /* Now process all variables defined in the function or its subblocks.  */
3638  instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3639
3640  if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3641    {
3642      /* Save all rtl allocated for this function by raising the
3643	 high-water mark on the maybepermanent_obstack.  */
3644      preserve_data ();
3645      /* All further rtl allocation is now done in the current_obstack.  */
3646      rtl_in_current_obstack ();
3647    }
3648}
3649
3650/* Subroutine of instantiate_decls: Process all decls in the given
3651   BLOCK node and all its subblocks.  */
3652
3653static void
3654instantiate_decls_1 (let, valid_only)
3655     tree let;
3656     int valid_only;
3657{
3658  tree t;
3659
3660  for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3661    instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3662		      valid_only);
3663
3664  /* Process all subblocks.  */
3665  for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3666    instantiate_decls_1 (t, valid_only);
3667}
3668
3669/* Subroutine of the preceding procedures: Given RTL representing a
3670   decl and the size of the object, do any instantiation required.
3671
3672   If VALID_ONLY is non-zero, it means that the RTL should only be
3673   changed if the new address is valid.  */
3674
3675static void
3676instantiate_decl (x, size, valid_only)
3677     rtx x;
3678     int size;
3679     int valid_only;
3680{
3681  enum machine_mode mode;
3682  rtx addr;
3683
3684  /* If this is not a MEM, no need to do anything.  Similarly if the
3685     address is a constant or a register that is not a virtual register.  */
3686
3687  if (x == 0 || GET_CODE (x) != MEM)
3688    return;
3689
3690  addr = XEXP (x, 0);
3691  if (CONSTANT_P (addr)
3692      || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3693      || (GET_CODE (addr) == REG
3694	  && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3695	      || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3696    return;
3697
3698  /* If we should only do this if the address is valid, copy the address.
3699     We need to do this so we can undo any changes that might make the
3700     address invalid.  This copy is unfortunate, but probably can't be
3701     avoided.  */
3702
3703  if (valid_only)
3704    addr = copy_rtx (addr);
3705
3706  instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3707
3708  if (valid_only)
3709    {
3710      /* Now verify that the resulting address is valid for every integer or
3711	 floating-point mode up to and including SIZE bytes long.  We do this
3712	 since the object might be accessed in any mode and frame addresses
3713	 are shared.  */
3714
3715      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3716	   mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3717	   mode = GET_MODE_WIDER_MODE (mode))
3718	if (! memory_address_p (mode, addr))
3719	  return;
3720
3721      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3722	   mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3723	   mode = GET_MODE_WIDER_MODE (mode))
3724	if (! memory_address_p (mode, addr))
3725	  return;
3726    }
3727
3728  /* Put back the address now that we have updated it and we either know
3729     it is valid or we don't care whether it is valid.  */
3730
3731  XEXP (x, 0) = addr;
3732}
3733
3734
3735/* Called when instantiate_virtual_regs has failed to update the instruction.
3736   Usually this means that non-matching instruction has been emit, however for
3737   asm statements it may be the problem in the constraints.  */
3738/* [zooey]: backported this (and calls to it) from gcc-3.4 in order to
3739   give proper error messages for invalid asm instead of ICEing. */
3740static void
3741instantiate_virtual_regs_lossage (rtx insn)
3742{
3743  if (asm_noperands (PATTERN (insn)) >= 0)
3744    {
3745      error_for_asm (insn, "impossible constraint in `asm'");
3746      delete_insn (insn);
3747    }
3748  else
3749    abort ();
3750}
3751/* Given a pointer to a piece of rtx and an optional pointer to the
3752   containing object, instantiate any virtual registers present in it.
3753
3754   If EXTRA_INSNS, we always do the replacement and generate
3755   any extra insns before OBJECT.  If it zero, we do nothing if replacement
3756   is not valid.
3757
3758   Return 1 if we either had nothing to do or if we were able to do the
3759   needed replacement.  Return 0 otherwise; we only return zero if
3760   EXTRA_INSNS is zero.
3761
3762   We first try some simple transformations to avoid the creation of extra
3763   pseudos.  */
3764
3765static int
3766instantiate_virtual_regs_1 (loc, object, extra_insns)
3767     rtx *loc;
3768     rtx object;
3769     int extra_insns;
3770{
3771  rtx x;
3772  RTX_CODE code;
3773  rtx new = 0;
3774  HOST_WIDE_INT offset = 0;
3775  rtx temp;
3776  rtx seq;
3777  int i, j;
3778  char *fmt;
3779
3780  /* Re-start here to avoid recursion in common cases.  */
3781 restart:
3782
3783  x = *loc;
3784  if (x == 0)
3785    return 1;
3786
3787  code = GET_CODE (x);
3788
3789  /* Check for some special cases.  */
3790  switch (code)
3791    {
3792    case CONST_INT:
3793    case CONST_DOUBLE:
3794    case CONST:
3795    case SYMBOL_REF:
3796    case CODE_LABEL:
3797    case PC:
3798    case CC0:
3799    case ASM_INPUT:
3800    case ADDR_VEC:
3801    case ADDR_DIFF_VEC:
3802    case RETURN:
3803      return 1;
3804
3805    case SET:
3806      /* We are allowed to set the virtual registers.  This means that
3807	 the actual register should receive the source minus the
3808	 appropriate offset.  This is used, for example, in the handling
3809	 of non-local gotos.  */
3810      if (SET_DEST (x) == virtual_incoming_args_rtx)
3811	new = arg_pointer_rtx, offset = - in_arg_offset;
3812      else if (SET_DEST (x) == virtual_stack_vars_rtx)
3813	new = frame_pointer_rtx, offset = - var_offset;
3814      else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3815	new = stack_pointer_rtx, offset = - dynamic_offset;
3816      else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3817	new = stack_pointer_rtx, offset = - out_arg_offset;
3818      else if (SET_DEST (x) == virtual_cfa_rtx)
3819	new = arg_pointer_rtx, offset = - cfa_offset;
3820
3821      if (new)
3822	{
3823	  /* The only valid sources here are PLUS or REG.  Just do
3824	     the simplest possible thing to handle them.  */
3825	  if (GET_CODE (SET_SRC (x)) != REG
3826	      && GET_CODE (SET_SRC (x)) != PLUS)
3827	    {
3828	      instantiate_virtual_regs_lossage (object);
3829	      return 1;
3830	    }
3831
3832	  start_sequence ();
3833	  if (GET_CODE (SET_SRC (x)) != REG)
3834	    temp = force_operand (SET_SRC (x), NULL_RTX);
3835	  else
3836	    temp = SET_SRC (x);
3837	  temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3838	  seq = get_insns ();
3839	  end_sequence ();
3840
3841	  emit_insns_before (seq, object);
3842	  SET_DEST (x) = new;
3843
3844	  if (! validate_change (object, &SET_SRC (x), temp, 0)
3845	      || ! extra_insns)
3846	    instantiate_virtual_regs_lossage (object);
3847
3848	  return 1;
3849	}
3850
3851      instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3852      loc = &SET_SRC (x);
3853      goto restart;
3854
3855    case PLUS:
3856      /* Handle special case of virtual register plus constant.  */
3857      if (CONSTANT_P (XEXP (x, 1)))
3858	{
3859	  rtx old, new_offset;
3860
3861	  /* Check for (plus (plus VIRT foo) (const_int)) first.  */
3862	  if (GET_CODE (XEXP (x, 0)) == PLUS)
3863	    {
3864	      rtx inner = XEXP (XEXP (x, 0), 0);
3865
3866	      if (inner == virtual_incoming_args_rtx)
3867		new = arg_pointer_rtx, offset = in_arg_offset;
3868	      else if (inner == virtual_stack_vars_rtx)
3869		new = frame_pointer_rtx, offset = var_offset;
3870	      else if (inner == virtual_stack_dynamic_rtx)
3871		new = stack_pointer_rtx, offset = dynamic_offset;
3872	      else if (inner == virtual_outgoing_args_rtx)
3873		new = stack_pointer_rtx, offset = out_arg_offset;
3874	      else if (inner == virtual_cfa_rtx)
3875	        new = arg_pointer_rtx, offset = cfa_offset;
3876	      else
3877		{
3878		  loc = &XEXP (x, 0);
3879		  goto restart;
3880		}
3881
3882	      instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3883					  extra_insns);
3884	      new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3885	    }
3886
3887	  else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3888	    new = arg_pointer_rtx, offset = in_arg_offset;
3889	  else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3890	    new = frame_pointer_rtx, offset = var_offset;
3891	  else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3892	    new = stack_pointer_rtx, offset = dynamic_offset;
3893	  else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3894	    new = stack_pointer_rtx, offset = out_arg_offset;
3895          else if (XEXP (x, 0) == virtual_cfa_rtx)
3896            new = arg_pointer_rtx, offset = cfa_offset;
3897	  else
3898	    {
3899	      /* We know the second operand is a constant.  Unless the
3900		 first operand is a REG (which has been already checked),
3901		 it needs to be checked.  */
3902	      if (GET_CODE (XEXP (x, 0)) != REG)
3903		{
3904		  loc = &XEXP (x, 0);
3905		  goto restart;
3906		}
3907	      return 1;
3908	    }
3909
3910	  new_offset = plus_constant (XEXP (x, 1), offset);
3911
3912	  /* If the new constant is zero, try to replace the sum with just
3913	     the register.  */
3914	  if (new_offset == const0_rtx
3915	      && validate_change (object, loc, new, 0))
3916	    return 1;
3917
3918	  /* Next try to replace the register and new offset.
3919	     There are two changes to validate here and we can't assume that
3920	     in the case of old offset equals new just changing the register
3921	     will yield a valid insn.  In the interests of a little efficiency,
3922	     however, we only call validate change once (we don't queue up the
3923	     changes and then call apply_change_group).  */
3924
3925	  old = XEXP (x, 0);
3926	  if (offset == 0
3927	      ? ! validate_change (object, &XEXP (x, 0), new, 0)
3928	      : (XEXP (x, 0) = new,
3929		 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3930	    {
3931	      if (! extra_insns)
3932		{
3933		  XEXP (x, 0) = old;
3934		  return 0;
3935		}
3936
3937	      /* Otherwise copy the new constant into a register and replace
3938		 constant with that register.  */
3939	      temp = gen_reg_rtx (Pmode);
3940	      XEXP (x, 0) = new;
3941	      if (validate_change (object, &XEXP (x, 1), temp, 0))
3942		emit_insn_before (gen_move_insn (temp, new_offset), object);
3943	      else
3944		{
3945		  /* If that didn't work, replace this expression with a
3946		     register containing the sum.  */
3947
3948		  XEXP (x, 0) = old;
3949		  new = gen_rtx_PLUS (Pmode, new, new_offset);
3950
3951		  start_sequence ();
3952		  temp = force_operand (new, NULL_RTX);
3953		  seq = get_insns ();
3954		  end_sequence ();
3955
3956		  emit_insns_before (seq, object);
3957		  if (! validate_change (object, loc, temp, 0)
3958		      && ! validate_replace_rtx (x, temp, object))
3959		    {
3960		      instantiate_virtual_regs_lossage (object);
3961		      return 1;
3962		    }
3963		}
3964	    }
3965
3966	  return 1;
3967	}
3968
3969      /* Fall through to generic two-operand expression case.  */
3970    case EXPR_LIST:
3971    case CALL:
3972    case COMPARE:
3973    case MINUS:
3974    case MULT:
3975    case DIV:      case UDIV:
3976    case MOD:      case UMOD:
3977    case AND:      case IOR:      case XOR:
3978    case ROTATERT: case ROTATE:
3979    case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3980    case NE:       case EQ:
3981    case GE:       case GT:       case GEU:    case GTU:
3982    case LE:       case LT:       case LEU:    case LTU:
3983      if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3984	instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3985      loc = &XEXP (x, 0);
3986      goto restart;
3987
3988    case MEM:
3989      /* Most cases of MEM that convert to valid addresses have already been
3990	 handled by our scan of decls.  The only special handling we
3991	 need here is to make a copy of the rtx to ensure it isn't being
3992	 shared if we have to change it to a pseudo.
3993
3994	 If the rtx is a simple reference to an address via a virtual register,
3995	 it can potentially be shared.  In such cases, first try to make it
3996	 a valid address, which can also be shared.  Otherwise, copy it and
3997	 proceed normally.
3998
3999	 First check for common cases that need no processing.  These are
4000	 usually due to instantiation already being done on a previous instance
4001	 of a shared rtx.  */
4002
4003      temp = XEXP (x, 0);
4004      if (CONSTANT_ADDRESS_P (temp)
4005#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4006	  || temp == arg_pointer_rtx
4007#endif
4008#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4009	  || temp == hard_frame_pointer_rtx
4010#endif
4011	  || temp == frame_pointer_rtx)
4012	return 1;
4013
4014      if (GET_CODE (temp) == PLUS
4015	  && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4016	  && (XEXP (temp, 0) == frame_pointer_rtx
4017#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4018	      || XEXP (temp, 0) == hard_frame_pointer_rtx
4019#endif
4020#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4021	      || XEXP (temp, 0) == arg_pointer_rtx
4022#endif
4023	      ))
4024	return 1;
4025
4026      if (temp == virtual_stack_vars_rtx
4027	  || temp == virtual_incoming_args_rtx
4028	  || (GET_CODE (temp) == PLUS
4029	      && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4030	      && (XEXP (temp, 0) == virtual_stack_vars_rtx
4031		  || XEXP (temp, 0) == virtual_incoming_args_rtx)))
4032	{
4033	  /* This MEM may be shared.  If the substitution can be done without
4034	     the need to generate new pseudos, we want to do it in place
4035	     so all copies of the shared rtx benefit.  The call below will
4036	     only make substitutions if the resulting address is still
4037	     valid.
4038
4039	     Note that we cannot pass X as the object in the recursive call
4040	     since the insn being processed may not allow all valid
4041	     addresses.  However, if we were not passed on object, we can
4042	     only modify X without copying it if X will have a valid
4043	     address.
4044
4045	     ??? Also note that this can still lose if OBJECT is an insn that
4046	     has less restrictions on an address that some other insn.
4047	     In that case, we will modify the shared address.  This case
4048	     doesn't seem very likely, though.  One case where this could
4049	     happen is in the case of a USE or CLOBBER reference, but we
4050	     take care of that below.  */
4051
4052	  if (instantiate_virtual_regs_1 (&XEXP (x, 0),
4053					  object ? object : x, 0))
4054	    return 1;
4055
4056	  /* Otherwise make a copy and process that copy.  We copy the entire
4057	     RTL expression since it might be a PLUS which could also be
4058	     shared.  */
4059	  *loc = x = copy_rtx (x);
4060	}
4061
4062      /* Fall through to generic unary operation case.  */
4063    case SUBREG:
4064    case STRICT_LOW_PART:
4065    case NEG:          case NOT:
4066    case PRE_DEC:      case PRE_INC:      case POST_DEC:    case POST_INC:
4067    case SIGN_EXTEND:  case ZERO_EXTEND:
4068    case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4069    case FLOAT:        case FIX:
4070    case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4071    case ABS:
4072    case SQRT:
4073    case FFS:
4074      /* These case either have just one operand or we know that we need not
4075	 check the rest of the operands.  */
4076      loc = &XEXP (x, 0);
4077      goto restart;
4078
4079    case USE:
4080    case CLOBBER:
4081      /* If the operand is a MEM, see if the change is a valid MEM.  If not,
4082	 go ahead and make the invalid one, but do it to a copy.  For a REG,
4083	 just make the recursive call, since there's no chance of a problem. */
4084
4085      if ((GET_CODE (XEXP (x, 0)) == MEM
4086	   && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4087					  0))
4088	  || (GET_CODE (XEXP (x, 0)) == REG
4089	      && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4090	return 1;
4091
4092      XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4093      loc = &XEXP (x, 0);
4094      goto restart;
4095
4096    case REG:
4097      /* Try to replace with a PLUS.  If that doesn't work, compute the sum
4098	 in front of this insn and substitute the temporary.  */
4099      if (x == virtual_incoming_args_rtx)
4100	new = arg_pointer_rtx, offset = in_arg_offset;
4101      else if (x == virtual_stack_vars_rtx)
4102	new = frame_pointer_rtx, offset = var_offset;
4103      else if (x == virtual_stack_dynamic_rtx)
4104	new = stack_pointer_rtx, offset = dynamic_offset;
4105      else if (x == virtual_outgoing_args_rtx)
4106	new = stack_pointer_rtx, offset = out_arg_offset;
4107      else if (x == virtual_cfa_rtx)
4108        new = arg_pointer_rtx, offset = cfa_offset;
4109
4110      if (new)
4111	{
4112	  temp = plus_constant (new, offset);
4113	  if (!validate_change (object, loc, temp, 0))
4114	    {
4115	      if (! extra_insns)
4116		return 0;
4117
4118	      start_sequence ();
4119	      temp = force_operand (temp, NULL_RTX);
4120	      seq = get_insns ();
4121	      end_sequence ();
4122
4123	      emit_insns_before (seq, object);
4124	      if (! validate_change (object, loc, temp, 0)
4125		  && ! validate_replace_rtx (x, temp, object))
4126	        instantiate_virtual_regs_lossage (object);
4127	    }
4128	}
4129
4130      return 1;
4131
4132    case ADDRESSOF:
4133      if (GET_CODE (XEXP (x, 0)) == REG)
4134	return 1;
4135
4136      else if (GET_CODE (XEXP (x, 0)) == MEM)
4137	{
4138	  /* If we have a (addressof (mem ..)), do any instantiation inside
4139	     since we know we'll be making the inside valid when we finally
4140	     remove the ADDRESSOF.  */
4141	  instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4142	  return 1;
4143	}
4144      break;
4145
4146    default:
4147      break;
4148    }
4149
4150  /* Scan all subexpressions.  */
4151  fmt = GET_RTX_FORMAT (code);
4152  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4153    if (*fmt == 'e')
4154      {
4155	if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4156	  return 0;
4157      }
4158    else if (*fmt == 'E')
4159      for (j = 0; j < XVECLEN (x, i); j++)
4160	if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4161					  extra_insns))
4162	  return 0;
4163
4164  return 1;
4165}
4166
4167/* Optimization: assuming this function does not receive nonlocal gotos,
4168   delete the handlers for such, as well as the insns to establish
4169   and disestablish them.  */
4170
4171static void
4172delete_handlers ()
4173{
4174  rtx insn;
4175  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4176    {
4177      /* Delete the handler by turning off the flag that would
4178	 prevent jump_optimize from deleting it.
4179	 Also permit deletion of the nonlocal labels themselves
4180	 if nothing local refers to them.  */
4181      if (GET_CODE (insn) == CODE_LABEL)
4182	{
4183	  tree t, last_t;
4184
4185	  LABEL_PRESERVE_P (insn) = 0;
4186
4187	  /* Remove it from the nonlocal_label list, to avoid confusing
4188	     flow.  */
4189	  for (t = nonlocal_labels, last_t = 0; t;
4190	       last_t = t, t = TREE_CHAIN (t))
4191	    if (DECL_RTL (TREE_VALUE (t)) == insn)
4192	      break;
4193	  if (t)
4194	    {
4195	      if (! last_t)
4196		nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4197	      else
4198		TREE_CHAIN (last_t) = TREE_CHAIN (t);
4199	    }
4200	}
4201      if (GET_CODE (insn) == INSN)
4202	{
4203	  int can_delete = 0;
4204	  rtx t;
4205	  for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4206	    if (reg_mentioned_p (t, PATTERN (insn)))
4207	      {
4208		can_delete = 1;
4209		break;
4210	      }
4211	  if (can_delete
4212	      || (nonlocal_goto_stack_level != 0
4213		  && reg_mentioned_p (nonlocal_goto_stack_level,
4214				      PATTERN (insn))))
4215	    delete_insn (insn);
4216	}
4217    }
4218}
4219
4220/* Output a USE for any register use in RTL.
4221   This is used with -noreg to mark the extent of lifespan
4222   of any registers used in a user-visible variable's DECL_RTL.  */
4223
4224void
4225use_variable (rtl)
4226     rtx rtl;
4227{
4228  if (GET_CODE (rtl) == REG)
4229    /* This is a register variable.  */
4230    emit_insn (gen_rtx_USE (VOIDmode, rtl));
4231  else if (GET_CODE (rtl) == MEM
4232	   && GET_CODE (XEXP (rtl, 0)) == REG
4233	   && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
4234	       || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
4235	   && XEXP (rtl, 0) != current_function_internal_arg_pointer)
4236    /* This is a variable-sized structure.  */
4237    emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
4238}
4239
4240/* Like use_variable except that it outputs the USEs after INSN
4241   instead of at the end of the insn-chain.  */
4242
4243void
4244use_variable_after (rtl, insn)
4245     rtx rtl, insn;
4246{
4247  if (GET_CODE (rtl) == REG)
4248    /* This is a register variable.  */
4249    emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
4250  else if (GET_CODE (rtl) == MEM
4251	   && GET_CODE (XEXP (rtl, 0)) == REG
4252	   && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
4253	       || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
4254	   && XEXP (rtl, 0) != current_function_internal_arg_pointer)
4255    /* This is a variable-sized structure.  */
4256    emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
4257}
4258
4259int
4260max_parm_reg_num ()
4261{
4262  return max_parm_reg;
4263}
4264
4265/* Return the first insn following those generated by `assign_parms'.  */
4266
4267rtx
4268get_first_nonparm_insn ()
4269{
4270  if (last_parm_insn)
4271    return NEXT_INSN (last_parm_insn);
4272  return get_insns ();
4273}
4274
4275/* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4276   Crash if there is none.  */
4277
4278rtx
4279get_first_block_beg ()
4280{
4281  register rtx searcher;
4282  register rtx insn = get_first_nonparm_insn ();
4283
4284  for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4285    if (GET_CODE (searcher) == NOTE
4286	&& NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4287      return searcher;
4288
4289  abort ();	/* Invalid call to this function.  (See comments above.)  */
4290  return NULL_RTX;
4291}
4292
4293/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4294   This means a type for which function calls must pass an address to the
4295   function or get an address back from the function.
4296   EXP may be a type node or an expression (whose type is tested).  */
4297
4298int
4299aggregate_value_p (exp)
4300     tree exp;
4301{
4302  int i, regno, nregs;
4303  rtx reg;
4304  tree type;
4305  if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
4306    type = exp;
4307  else
4308    type = TREE_TYPE (exp);
4309
4310  if (RETURN_IN_MEMORY (type))
4311    return 1;
4312  /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4313     and thus can't be returned in registers.  */
4314  if (TREE_ADDRESSABLE (type))
4315    return 1;
4316  if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4317    return 1;
4318  /* Make sure we have suitable call-clobbered regs to return
4319     the value in; if not, we must return it in memory.  */
4320  reg = hard_function_value (type, 0);
4321
4322  /* If we have something other than a REG (e.g. a PARALLEL), then assume
4323     it is OK.  */
4324  if (GET_CODE (reg) != REG)
4325    return 0;
4326
4327  regno = REGNO (reg);
4328  nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4329  for (i = 0; i < nregs; i++)
4330    if (! call_used_regs[regno + i])
4331      return 1;
4332  return 0;
4333}
4334
4335/* Assign RTL expressions to the function's parameters.
4336   This may involve copying them into registers and using
4337   those registers as the RTL for them.
4338
4339   If SECOND_TIME is non-zero it means that this function is being
4340   called a second time.  This is done by integrate.c when a function's
4341   compilation is deferred.  We need to come back here in case the
4342   FUNCTION_ARG macro computes items needed for the rest of the compilation
4343   (such as changing which registers are fixed or caller-saved).  But suppress
4344   writing any insns or setting DECL_RTL of anything in this case.  */
4345
4346void
4347assign_parms (fndecl, second_time)
4348     tree fndecl;
4349     int second_time;
4350{
4351  register tree parm;
4352  register rtx entry_parm = 0;
4353  register rtx stack_parm = 0;
4354  CUMULATIVE_ARGS args_so_far;
4355  enum machine_mode promoted_mode, passed_mode;
4356  enum machine_mode nominal_mode, promoted_nominal_mode;
4357  int unsignedp;
4358  /* Total space needed so far for args on the stack,
4359     given as a constant and a tree-expression.  */
4360  struct args_size stack_args_size;
4361  tree fntype = TREE_TYPE (fndecl);
4362  tree fnargs = DECL_ARGUMENTS (fndecl);
4363  /* This is used for the arg pointer when referring to stack args.  */
4364  rtx internal_arg_pointer;
4365  /* This is a dummy PARM_DECL that we used for the function result if
4366     the function returns a structure.  */
4367  tree function_result_decl = 0;
4368#ifdef SETUP_INCOMING_VARARGS
4369  int varargs_setup = 0;
4370#endif
4371  rtx conversion_insns = 0;
4372
4373  /* Nonzero if the last arg is named `__builtin_va_alist',
4374     which is used on some machines for old-fashioned non-ANSI varargs.h;
4375     this should be stuck onto the stack as if it had arrived there.  */
4376  int hide_last_arg
4377    = (current_function_varargs
4378       && fnargs
4379       && (parm = tree_last (fnargs)) != 0
4380       && DECL_NAME (parm)
4381       && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4382		     "__builtin_va_alist")));
4383
4384  /* Nonzero if function takes extra anonymous args.
4385     This means the last named arg must be on the stack
4386     right before the anonymous ones.  */
4387  int stdarg
4388    = (TYPE_ARG_TYPES (fntype) != 0
4389       && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4390	   != void_type_node));
4391
4392  current_function_stdarg = stdarg;
4393
4394  /* If the reg that the virtual arg pointer will be translated into is
4395     not a fixed reg or is the stack pointer, make a copy of the virtual
4396     arg pointer, and address parms via the copy.  The frame pointer is
4397     considered fixed even though it is not marked as such.
4398
4399     The second time through, simply use ap to avoid generating rtx.  */
4400
4401  if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4402       || ! (fixed_regs[ARG_POINTER_REGNUM]
4403	     || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
4404      && ! second_time)
4405    internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4406  else
4407    internal_arg_pointer = virtual_incoming_args_rtx;
4408  current_function_internal_arg_pointer = internal_arg_pointer;
4409
4410  stack_args_size.constant = 0;
4411  stack_args_size.var = 0;
4412
4413  /* If struct value address is treated as the first argument, make it so.  */
4414  if (aggregate_value_p (DECL_RESULT (fndecl))
4415      && ! current_function_returns_pcc_struct
4416      && struct_value_incoming_rtx == 0)
4417    {
4418      tree type = build_pointer_type (TREE_TYPE (fntype));
4419
4420      function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4421
4422      DECL_ARG_TYPE (function_result_decl) = type;
4423      TREE_CHAIN (function_result_decl) = fnargs;
4424      fnargs = function_result_decl;
4425    }
4426
4427  max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4428  parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4429  bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
4430
4431#ifdef INIT_CUMULATIVE_INCOMING_ARGS
4432  INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4433#else
4434  INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4435#endif
4436
4437  /* We haven't yet found an argument that we must push and pretend the
4438     caller did.  */
4439  current_function_pretend_args_size = 0;
4440
4441  for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4442    {
4443      int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
4444      struct args_size stack_offset;
4445      struct args_size arg_size;
4446      int passed_pointer = 0;
4447      int did_conversion = 0;
4448      tree passed_type = DECL_ARG_TYPE (parm);
4449      tree nominal_type = TREE_TYPE (parm);
4450      int pretend_named;
4451
4452      /* Set LAST_NAMED if this is last named arg before some
4453	 anonymous args.  */
4454      int last_named = ((TREE_CHAIN (parm) == 0
4455			 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4456			&& (stdarg || current_function_varargs));
4457      /* Set NAMED_ARG if this arg should be treated as a named arg.  For
4458	 most machines, if this is a varargs/stdarg function, then we treat
4459	 the last named arg as if it were anonymous too.  */
4460      int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4461
4462      if (TREE_TYPE (parm) == error_mark_node
4463	  /* This can happen after weird syntax errors
4464	     or if an enum type is defined among the parms.  */
4465	  || TREE_CODE (parm) != PARM_DECL
4466	  || passed_type == NULL)
4467	{
4468	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4469	    = gen_rtx_MEM (BLKmode, const0_rtx);
4470	  TREE_USED (parm) = 1;
4471	  continue;
4472	}
4473
4474      /* For varargs.h function, save info about regs and stack space
4475	 used by the individual args, not including the va_alist arg.  */
4476      if (hide_last_arg && last_named)
4477	current_function_args_info = args_so_far;
4478
4479      /* Find mode of arg as it is passed, and mode of arg
4480	 as it should be during execution of this function.  */
4481      passed_mode = TYPE_MODE (passed_type);
4482      nominal_mode = TYPE_MODE (nominal_type);
4483
4484      /* If the parm's mode is VOID, its value doesn't matter,
4485	 and avoid the usual things like emit_move_insn that could crash.  */
4486      if (nominal_mode == VOIDmode)
4487	{
4488	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4489	  continue;
4490	}
4491
4492      /* If the parm is to be passed as a transparent union, use the
4493	 type of the first field for the tests below.  We have already
4494	 verified that the modes are the same.  */
4495      if (DECL_TRANSPARENT_UNION (parm)
4496	  || TYPE_TRANSPARENT_UNION (passed_type))
4497	passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4498
4499      /* See if this arg was passed by invisible reference.  It is if
4500	 it is an object whose size depends on the contents of the
4501	 object itself or if the machine requires these objects be passed
4502	 that way.  */
4503
4504      if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4505	   && contains_placeholder_p (TYPE_SIZE (passed_type)))
4506	  || TREE_ADDRESSABLE (passed_type)
4507#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4508	  || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4509					      passed_type, named_arg)
4510#endif
4511	  )
4512	{
4513	  passed_type = nominal_type = build_pointer_type (passed_type);
4514	  passed_pointer = 1;
4515	  passed_mode = nominal_mode = Pmode;
4516	}
4517
4518      promoted_mode = passed_mode;
4519
4520#ifdef PROMOTE_FUNCTION_ARGS
4521      /* Compute the mode in which the arg is actually extended to.  */
4522      unsignedp = TREE_UNSIGNED (passed_type);
4523      promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4524#endif
4525
4526      /* Let machine desc say which reg (if any) the parm arrives in.
4527	 0 means it arrives on the stack.  */
4528#ifdef FUNCTION_INCOMING_ARG
4529      entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4530					  passed_type, named_arg);
4531#else
4532      entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4533				 passed_type, named_arg);
4534#endif
4535
4536      if (entry_parm == 0)
4537	promoted_mode = passed_mode;
4538
4539#ifdef SETUP_INCOMING_VARARGS
4540      /* If this is the last named parameter, do any required setup for
4541	 varargs or stdargs.  We need to know about the case of this being an
4542	 addressable type, in which case we skip the registers it
4543	 would have arrived in.
4544
4545	 For stdargs, LAST_NAMED will be set for two parameters, the one that
4546	 is actually the last named, and the dummy parameter.  We only
4547	 want to do this action once.
4548
4549	 Also, indicate when RTL generation is to be suppressed.  */
4550      if (last_named && !varargs_setup)
4551	{
4552	  SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4553				  current_function_pretend_args_size,
4554				  second_time);
4555	  varargs_setup = 1;
4556	}
4557#endif
4558
4559      /* Determine parm's home in the stack,
4560	 in case it arrives in the stack or we should pretend it did.
4561
4562	 Compute the stack position and rtx where the argument arrives
4563	 and its size.
4564
4565	 There is one complexity here:  If this was a parameter that would
4566	 have been passed in registers, but wasn't only because it is
4567	 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4568	 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4569	 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4570	 0 as it was the previous time.  */
4571
4572      pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4573      locate_and_pad_parm (promoted_mode, passed_type,
4574#ifdef STACK_PARMS_IN_REG_PARM_AREA
4575			   1,
4576#else
4577#ifdef FUNCTION_INCOMING_ARG
4578			   FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4579						  passed_type,
4580						  pretend_named) != 0,
4581#else
4582			   FUNCTION_ARG (args_so_far, promoted_mode,
4583					 passed_type,
4584					 pretend_named) != 0,
4585#endif
4586#endif
4587			   fndecl, &stack_args_size, &stack_offset, &arg_size);
4588
4589      if (! second_time)
4590	{
4591	  rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4592
4593	  if (offset_rtx == const0_rtx)
4594	    stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4595	  else
4596	    stack_parm = gen_rtx_MEM (promoted_mode,
4597				      gen_rtx_PLUS (Pmode,
4598						    internal_arg_pointer,
4599						    offset_rtx));
4600
4601	  /* If this is a memory ref that contains aggregate components,
4602	     mark it as such for cse and loop optimize.  Likewise if it
4603	     is readonly.  */
4604	  MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4605	  RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4606	  MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4607	}
4608
4609      /* If this parameter was passed both in registers and in the stack,
4610	 use the copy on the stack.  */
4611      if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4612	entry_parm = 0;
4613
4614#ifdef FUNCTION_ARG_PARTIAL_NREGS
4615      /* If this parm was passed part in regs and part in memory,
4616	 pretend it arrived entirely in memory
4617	 by pushing the register-part onto the stack.
4618
4619	 In the special case of a DImode or DFmode that is split,
4620	 we could put it together in a pseudoreg directly,
4621	 but for now that's not worth bothering with.  */
4622
4623      if (entry_parm)
4624	{
4625	  int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4626						  passed_type, named_arg);
4627
4628	  if (nregs > 0)
4629	    {
4630	      current_function_pretend_args_size
4631		= (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4632		   / (PARM_BOUNDARY / BITS_PER_UNIT)
4633		   * (PARM_BOUNDARY / BITS_PER_UNIT));
4634
4635	      if (! second_time)
4636		{
4637		  /* Handle calls that pass values in multiple non-contiguous
4638		     locations.  The Irix 6 ABI has examples of this.  */
4639		  if (GET_CODE (entry_parm) == PARALLEL)
4640		    emit_group_store (validize_mem (stack_parm), entry_parm,
4641				      int_size_in_bytes (TREE_TYPE (parm)),
4642				      (TYPE_ALIGN (TREE_TYPE (parm))
4643				       / BITS_PER_UNIT));
4644		  else
4645		    move_block_from_reg (REGNO (entry_parm),
4646					 validize_mem (stack_parm), nregs,
4647					 int_size_in_bytes (TREE_TYPE (parm)));
4648		}
4649	      entry_parm = stack_parm;
4650	    }
4651	}
4652#endif
4653
4654      /* If we didn't decide this parm came in a register,
4655	 by default it came on the stack.  */
4656      if (entry_parm == 0)
4657	entry_parm = stack_parm;
4658
4659      /* Record permanently how this parm was passed.  */
4660      if (! second_time)
4661	DECL_INCOMING_RTL (parm) = entry_parm;
4662
4663      /* If there is actually space on the stack for this parm,
4664	 count it in stack_args_size; otherwise set stack_parm to 0
4665	 to indicate there is no preallocated stack slot for the parm.  */
4666
4667      if (entry_parm == stack_parm
4668          || (GET_CODE (entry_parm) == PARALLEL
4669              && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4670#if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4671	  /* On some machines, even if a parm value arrives in a register
4672	     there is still an (uninitialized) stack slot allocated for it.
4673
4674	     ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4675	     whether this parameter already has a stack slot allocated,
4676	     because an arg block exists only if current_function_args_size
4677	     is larger than some threshold, and we haven't calculated that
4678	     yet.  So, for now, we just assume that stack slots never exist
4679	     in this case.  */
4680	  || REG_PARM_STACK_SPACE (fndecl) > 0
4681#endif
4682	  )
4683	{
4684	  stack_args_size.constant += arg_size.constant;
4685	  if (arg_size.var)
4686	    ADD_PARM_SIZE (stack_args_size, arg_size.var);
4687	}
4688      else
4689	/* No stack slot was pushed for this parm.  */
4690	stack_parm = 0;
4691
4692      /* Update info on where next arg arrives in registers.  */
4693
4694      FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4695			    passed_type, named_arg);
4696
4697      /* If this is our second time through, we are done with this parm.  */
4698      if (second_time)
4699	continue;
4700
4701      /* If we can't trust the parm stack slot to be aligned enough
4702	 for its ultimate type, don't use that slot after entry.
4703	 We'll make another stack slot, if we need one.  */
4704      {
4705	int thisparm_boundary
4706	  = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4707
4708	if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4709	  stack_parm = 0;
4710      }
4711
4712      /* If parm was passed in memory, and we need to convert it on entry,
4713	 don't store it back in that same slot.  */
4714      if (entry_parm != 0
4715	  && nominal_mode != BLKmode && nominal_mode != passed_mode)
4716	stack_parm = 0;
4717
4718#if 0
4719      /* Now adjust STACK_PARM to the mode and precise location
4720	 where this parameter should live during execution,
4721	 if we discover that it must live in the stack during execution.
4722	 To make debuggers happier on big-endian machines, we store
4723	 the value in the last bytes of the space available.  */
4724
4725      if (nominal_mode != BLKmode && nominal_mode != passed_mode
4726	  && stack_parm != 0)
4727	{
4728	  rtx offset_rtx;
4729
4730	  if (BYTES_BIG_ENDIAN
4731	      && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4732	    stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4733				      - GET_MODE_SIZE (nominal_mode));
4734
4735	  offset_rtx = ARGS_SIZE_RTX (stack_offset);
4736	  if (offset_rtx == const0_rtx)
4737	    stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4738	  else
4739	    stack_parm = gen_rtx_MEM (nominal_mode,
4740				      gen_rtx_PLUS (Pmode,
4741						    internal_arg_pointer,
4742						    offset_rtx));
4743
4744	  /* If this is a memory ref that contains aggregate components,
4745	     mark it as such for cse and loop optimize.  */
4746	  MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4747	}
4748#endif /* 0 */
4749
4750#ifdef STACK_REGS
4751      /* We need this "use" info, because the gcc-register->stack-register
4752	 converter in reg-stack.c needs to know which registers are active
4753	 at the start of the function call.  The actual parameter loading
4754	 instructions are not always available then anymore, since they might
4755	 have been optimised away.  */
4756
4757      if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4758	  emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4759#endif
4760
4761      /* ENTRY_PARM is an RTX for the parameter as it arrives,
4762	 in the mode in which it arrives.
4763	 STACK_PARM is an RTX for a stack slot where the parameter can live
4764	 during the function (in case we want to put it there).
4765	 STACK_PARM is 0 if no stack slot was pushed for it.
4766
4767	 Now output code if necessary to convert ENTRY_PARM to
4768	 the type in which this function declares it,
4769	 and store that result in an appropriate place,
4770	 which may be a pseudo reg, may be STACK_PARM,
4771	 or may be a local stack slot if STACK_PARM is 0.
4772
4773	 Set DECL_RTL to that place.  */
4774
4775      if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4776	{
4777	  /* If a BLKmode arrives in registers, copy it to a stack slot.
4778	     Handle calls that pass values in multiple non-contiguous
4779	     locations.  The Irix 6 ABI has examples of this.  */
4780	  if (GET_CODE (entry_parm) == REG
4781	      || GET_CODE (entry_parm) == PARALLEL)
4782	    {
4783	      int size_stored
4784		= CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4785			      UNITS_PER_WORD);
4786
4787	      /* Note that we will be storing an integral number of words.
4788		 So we have to be careful to ensure that we allocate an
4789		 integral number of words.  We do this below in the
4790		 assign_stack_local if space was not allocated in the argument
4791		 list.  If it was, this will not work if PARM_BOUNDARY is not
4792		 a multiple of BITS_PER_WORD.  It isn't clear how to fix this
4793		 if it becomes a problem.  */
4794
4795	      if (stack_parm == 0)
4796		{
4797		  stack_parm
4798		    = assign_stack_local (GET_MODE (entry_parm),
4799					  size_stored, 0);
4800
4801		  /* If this is a memory ref that contains aggregate
4802		     components, mark it as such for cse and loop optimize.  */
4803		  MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4804		}
4805
4806	      else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4807		abort ();
4808
4809	      if (TREE_READONLY (parm))
4810		RTX_UNCHANGING_P (stack_parm) = 1;
4811
4812	      /* Handle calls that pass values in multiple non-contiguous
4813		 locations.  The Irix 6 ABI has examples of this.  */
4814	      if (GET_CODE (entry_parm) == PARALLEL)
4815		emit_group_store (validize_mem (stack_parm), entry_parm,
4816				  int_size_in_bytes (TREE_TYPE (parm)),
4817				  (TYPE_ALIGN (TREE_TYPE (parm))
4818				   / BITS_PER_UNIT));
4819	      else
4820		move_block_from_reg (REGNO (entry_parm),
4821				     validize_mem (stack_parm),
4822				     size_stored / UNITS_PER_WORD,
4823				     int_size_in_bytes (TREE_TYPE (parm)));
4824	    }
4825	  DECL_RTL (parm) = stack_parm;
4826	}
4827      else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4828		   && ! DECL_INLINE (fndecl))
4829		  /* layout_decl may set this.  */
4830		  || TREE_ADDRESSABLE (parm)
4831		  || TREE_SIDE_EFFECTS (parm)
4832		  /* If -ffloat-store specified, don't put explicit
4833		     float variables into registers.  */
4834		  || (flag_float_store
4835		      && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4836	       /* Always assign pseudo to structure return or item passed
4837		  by invisible reference.  */
4838	       || passed_pointer || parm == function_result_decl)
4839	{
4840	  /* Store the parm in a pseudoregister during the function, but we
4841	     may need to do it in a wider mode.  */
4842
4843	  register rtx parmreg;
4844	  int regno, regnoi = 0, regnor = 0;
4845
4846	  unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4847
4848	  promoted_nominal_mode
4849	    = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4850
4851	  parmreg = gen_reg_rtx (promoted_nominal_mode);
4852	  mark_user_reg (parmreg);
4853
4854	  /* If this was an item that we received a pointer to, set DECL_RTL
4855	     appropriately.  */
4856	  if (passed_pointer)
4857	    {
4858	      DECL_RTL (parm)
4859		= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4860	      MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4861	    }
4862	  else
4863	    DECL_RTL (parm) = parmreg;
4864
4865	  /* Copy the value into the register.  */
4866	  if (nominal_mode != passed_mode
4867	      || promoted_nominal_mode != promoted_mode)
4868	    {
4869	      int save_tree_used;
4870	      /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4871		 mode, by the caller.  We now have to convert it to
4872		 NOMINAL_MODE, if different.  However, PARMREG may be in
4873		 a different mode than NOMINAL_MODE if it is being stored
4874		 promoted.
4875
4876		 If ENTRY_PARM is a hard register, it might be in a register
4877		 not valid for operating in its mode (e.g., an odd-numbered
4878		 register for a DFmode).  In that case, moves are the only
4879		 thing valid, so we can't do a convert from there.  This
4880		 occurs when the calling sequence allow such misaligned
4881		 usages.
4882
4883		 In addition, the conversion may involve a call, which could
4884		 clobber parameters which haven't been copied to pseudo
4885		 registers yet.  Therefore, we must first copy the parm to
4886		 a pseudo reg here, and save the conversion until after all
4887		 parameters have been moved.  */
4888
4889	      rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4890
4891	      emit_move_insn (tempreg, validize_mem (entry_parm));
4892
4893	      push_to_sequence (conversion_insns);
4894	      tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4895
4896	      /* TREE_USED gets set erroneously during expand_assignment.  */
4897	      save_tree_used = TREE_USED (parm);
4898	      expand_assignment (parm,
4899				 make_tree (nominal_type, tempreg), 0, 0);
4900	      TREE_USED (parm) = save_tree_used;
4901	      conversion_insns = get_insns ();
4902	      did_conversion = 1;
4903	      end_sequence ();
4904	    }
4905	  else
4906	    emit_move_insn (parmreg, validize_mem (entry_parm));
4907
4908	  /* If we were passed a pointer but the actual value
4909	     can safely live in a register, put it in one.  */
4910	  if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4911	      && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4912		     && ! DECL_INLINE (fndecl))
4913		    /* layout_decl may set this.  */
4914		    || TREE_ADDRESSABLE (parm)
4915		    || TREE_SIDE_EFFECTS (parm)
4916		    /* If -ffloat-store specified, don't put explicit
4917		       float variables into registers.  */
4918		    || (flag_float_store
4919			&& TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4920	    {
4921	      /* We can't use nominal_mode, because it will have been set to
4922		 Pmode above.  We must use the actual mode of the parm.  */
4923	      parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4924	      mark_user_reg (parmreg);
4925	      emit_move_insn (parmreg, DECL_RTL (parm));
4926	      DECL_RTL (parm) = parmreg;
4927	      /* STACK_PARM is the pointer, not the parm, and PARMREG is
4928		 now the parm.  */
4929	      stack_parm = 0;
4930	    }
4931#ifdef FUNCTION_ARG_CALLEE_COPIES
4932	  /* If we are passed an arg by reference and it is our responsibility
4933	     to make a copy, do it now.
4934	     PASSED_TYPE and PASSED mode now refer to the pointer, not the
4935	     original argument, so we must recreate them in the call to
4936	     FUNCTION_ARG_CALLEE_COPIES.  */
4937	  /* ??? Later add code to handle the case that if the argument isn't
4938	     modified, don't do the copy.  */
4939
4940	  else if (passed_pointer
4941		   && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4942						  TYPE_MODE (DECL_ARG_TYPE (parm)),
4943						  DECL_ARG_TYPE (parm),
4944						  named_arg)
4945		   && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4946	    {
4947	      rtx copy;
4948	      tree type = DECL_ARG_TYPE (parm);
4949
4950	      /* This sequence may involve a library call perhaps clobbering
4951		 registers that haven't been copied to pseudos yet.  */
4952
4953	      push_to_sequence (conversion_insns);
4954
4955	      if (TYPE_SIZE (type) == 0
4956		  || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4957		/* This is a variable sized object.  */
4958		copy = gen_rtx_MEM (BLKmode,
4959				    allocate_dynamic_stack_space
4960				    (expr_size (parm), NULL_RTX,
4961				     TYPE_ALIGN (type)));
4962	      else
4963		copy = assign_stack_temp (TYPE_MODE (type),
4964					  int_size_in_bytes (type), 1);
4965	      MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4966	      RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4967
4968	      store_expr (parm, copy, 0);
4969	      emit_move_insn (parmreg, XEXP (copy, 0));
4970	      if (current_function_check_memory_usage)
4971		emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4972				   XEXP (copy, 0), Pmode,
4973				   GEN_INT (int_size_in_bytes (type)),
4974				   TYPE_MODE (sizetype),
4975				   GEN_INT (MEMORY_USE_RW),
4976				   TYPE_MODE (integer_type_node));
4977	      conversion_insns = get_insns ();
4978	      did_conversion = 1;
4979	      end_sequence ();
4980	    }
4981#endif /* FUNCTION_ARG_CALLEE_COPIES */
4982
4983	  /* In any case, record the parm's desired stack location
4984	     in case we later discover it must live in the stack.
4985
4986	     If it is a COMPLEX value, store the stack location for both
4987	     halves.  */
4988
4989	  if (GET_CODE (parmreg) == CONCAT)
4990	    regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4991	  else
4992	    regno = REGNO (parmreg);
4993
4994	  if (regno >= max_parm_reg)
4995	    {
4996	      rtx *new;
4997	      int old_max_parm_reg = max_parm_reg;
4998
4999	      /* It's slow to expand this one register at a time,
5000		 but it's also rare and we need max_parm_reg to be
5001		 precisely correct.  */
5002	      max_parm_reg = regno + 1;
5003	      new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
5004	      bcopy ((char *) parm_reg_stack_loc, (char *) new,
5005		     old_max_parm_reg * sizeof (rtx));
5006	      bzero ((char *) (new + old_max_parm_reg),
5007		     (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
5008	      parm_reg_stack_loc = new;
5009	    }
5010
5011	  if (GET_CODE (parmreg) == CONCAT)
5012	    {
5013	      enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
5014
5015	      regnor = REGNO (gen_realpart (submode, parmreg));
5016	      regnoi = REGNO (gen_imagpart (submode, parmreg));
5017
5018	      if (stack_parm != 0)
5019		{
5020		  parm_reg_stack_loc[regnor]
5021		    = gen_realpart (submode, stack_parm);
5022		  parm_reg_stack_loc[regnoi]
5023		    = gen_imagpart (submode, stack_parm);
5024		}
5025	      else
5026		{
5027		  parm_reg_stack_loc[regnor] = 0;
5028		  parm_reg_stack_loc[regnoi] = 0;
5029		}
5030	    }
5031	  else
5032	    parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
5033
5034	  /* Mark the register as eliminable if we did no conversion
5035	     and it was copied from memory at a fixed offset,
5036	     and the arg pointer was not copied to a pseudo-reg.
5037	     If the arg pointer is a pseudo reg or the offset formed
5038	     an invalid address, such memory-equivalences
5039	     as we make here would screw up life analysis for it.  */
5040	  if (nominal_mode == passed_mode
5041	      && ! did_conversion
5042	      && stack_parm != 0
5043	      && GET_CODE (stack_parm) == MEM
5044	      && stack_offset.var == 0
5045	      && reg_mentioned_p (virtual_incoming_args_rtx,
5046				  XEXP (stack_parm, 0)))
5047	    {
5048	      rtx linsn = get_last_insn ();
5049	      rtx sinsn, set;
5050
5051	      /* Mark complex types separately.  */
5052	      if (GET_CODE (parmreg) == CONCAT)
5053		/* Scan backwards for the set of the real and
5054		   imaginary parts.  */
5055		for (sinsn = linsn; sinsn != 0;
5056		     sinsn = prev_nonnote_insn (sinsn))
5057		  {
5058		    set = single_set (sinsn);
5059		    if (set != 0
5060			&& SET_DEST (set) == regno_reg_rtx [regnoi])
5061		      REG_NOTES (sinsn)
5062			= gen_rtx_EXPR_LIST (REG_EQUIV,
5063					     parm_reg_stack_loc[regnoi],
5064					     REG_NOTES (sinsn));
5065		    else if (set != 0
5066			     && SET_DEST (set) == regno_reg_rtx [regnor])
5067		      REG_NOTES (sinsn)
5068			= gen_rtx_EXPR_LIST (REG_EQUIV,
5069					     parm_reg_stack_loc[regnor],
5070					     REG_NOTES (sinsn));
5071		  }
5072	      else if ((set = single_set (linsn)) != 0
5073		       && SET_DEST (set) == parmreg)
5074	        REG_NOTES (linsn)
5075		  = gen_rtx_EXPR_LIST (REG_EQUIV,
5076				       stack_parm, REG_NOTES (linsn));
5077	    }
5078
5079	  /* For pointer data type, suggest pointer register.  */
5080	  if (POINTER_TYPE_P (TREE_TYPE (parm)))
5081	    mark_reg_pointer (parmreg,
5082			      (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
5083			       / BITS_PER_UNIT));
5084	}
5085      else
5086	{
5087	  /* Value must be stored in the stack slot STACK_PARM
5088	     during function execution.  */
5089
5090	  if (promoted_mode != nominal_mode)
5091	    {
5092	      /* Conversion is required.   */
5093	      rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
5094
5095	      emit_move_insn (tempreg, validize_mem (entry_parm));
5096
5097	      push_to_sequence (conversion_insns);
5098	      entry_parm = convert_to_mode (nominal_mode, tempreg,
5099					    TREE_UNSIGNED (TREE_TYPE (parm)));
5100	      if (stack_parm)
5101		{
5102		  /* ??? This may need a big-endian conversion on sparc64.  */
5103		  stack_parm = change_address (stack_parm, nominal_mode,
5104					       NULL_RTX);
5105		}
5106	      conversion_insns = get_insns ();
5107	      did_conversion = 1;
5108	      end_sequence ();
5109	    }
5110
5111	  if (entry_parm != stack_parm)
5112	    {
5113	      if (stack_parm == 0)
5114		{
5115		  stack_parm
5116		    = assign_stack_local (GET_MODE (entry_parm),
5117					  GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
5118		  /* If this is a memory ref that contains aggregate components,
5119		     mark it as such for cse and loop optimize.  */
5120		  MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
5121		}
5122
5123	      if (promoted_mode != nominal_mode)
5124		{
5125		  push_to_sequence (conversion_insns);
5126		  emit_move_insn (validize_mem (stack_parm),
5127				  validize_mem (entry_parm));
5128		  conversion_insns = get_insns ();
5129		  end_sequence ();
5130		}
5131	      else
5132		emit_move_insn (validize_mem (stack_parm),
5133				validize_mem (entry_parm));
5134	    }
5135	  if (current_function_check_memory_usage)
5136	    {
5137	      push_to_sequence (conversion_insns);
5138	      emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
5139				 XEXP (stack_parm, 0), Pmode,
5140				 GEN_INT (GET_MODE_SIZE (GET_MODE
5141							 (entry_parm))),
5142				 TYPE_MODE (sizetype),
5143				 GEN_INT (MEMORY_USE_RW),
5144				 TYPE_MODE (integer_type_node));
5145
5146	      conversion_insns = get_insns ();
5147	      end_sequence ();
5148	    }
5149	  DECL_RTL (parm) = stack_parm;
5150	}
5151
5152      /* If this "parameter" was the place where we are receiving the
5153	 function's incoming structure pointer, set up the result.  */
5154      if (parm == function_result_decl)
5155	{
5156	  tree result = DECL_RESULT (fndecl);
5157	  tree restype = TREE_TYPE (result);
5158
5159	  DECL_RTL (result)
5160	    = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
5161
5162	  MEM_SET_IN_STRUCT_P (DECL_RTL (result),
5163			       AGGREGATE_TYPE_P (restype));
5164	}
5165
5166      if (TREE_THIS_VOLATILE (parm))
5167	MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
5168      if (TREE_READONLY (parm))
5169	RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
5170    }
5171
5172  /* Output all parameter conversion instructions (possibly including calls)
5173     now that all parameters have been copied out of hard registers.  */
5174  emit_insns (conversion_insns);
5175
5176  last_parm_insn = get_last_insn ();
5177
5178  current_function_args_size = stack_args_size.constant;
5179
5180  /* Adjust function incoming argument size for alignment and
5181     minimum length.  */
5182
5183#ifdef REG_PARM_STACK_SPACE
5184#ifndef MAYBE_REG_PARM_STACK_SPACE
5185  current_function_args_size = MAX (current_function_args_size,
5186				    REG_PARM_STACK_SPACE (fndecl));
5187#endif
5188#endif
5189
5190#ifdef STACK_BOUNDARY
5191#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5192
5193  current_function_args_size
5194    = ((current_function_args_size + STACK_BYTES - 1)
5195       / STACK_BYTES) * STACK_BYTES;
5196#endif
5197
5198#ifdef ARGS_GROW_DOWNWARD
5199  current_function_arg_offset_rtx
5200    = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5201       : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
5202				  size_int (-stack_args_size.constant)),
5203		      NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
5204#else
5205  current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5206#endif
5207
5208  /* See how many bytes, if any, of its args a function should try to pop
5209     on return.  */
5210
5211  current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5212						 current_function_args_size);
5213
5214  /* For stdarg.h function, save info about
5215     regs and stack space used by the named args.  */
5216
5217  if (!hide_last_arg)
5218    current_function_args_info = args_so_far;
5219
5220  /* Set the rtx used for the function return value.  Put this in its
5221     own variable so any optimizers that need this information don't have
5222     to include tree.h.  Do this here so it gets done when an inlined
5223     function gets output.  */
5224
5225  current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
5226}
5227
5228/* Indicate whether REGNO is an incoming argument to the current function
5229   that was promoted to a wider mode.  If so, return the RTX for the
5230   register (to get its mode).  PMODE and PUNSIGNEDP are set to the mode
5231   that REGNO is promoted from and whether the promotion was signed or
5232   unsigned.  */
5233
5234#ifdef PROMOTE_FUNCTION_ARGS
5235
5236rtx
5237promoted_input_arg (regno, pmode, punsignedp)
5238     int regno;
5239     enum machine_mode *pmode;
5240     int *punsignedp;
5241{
5242  tree arg;
5243
5244  for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5245       arg = TREE_CHAIN (arg))
5246    if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5247	&& REGNO (DECL_INCOMING_RTL (arg)) == regno
5248	&& TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5249      {
5250	enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5251	int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5252
5253	mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5254	if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5255	    && mode != DECL_MODE (arg))
5256	  {
5257	    *pmode = DECL_MODE (arg);
5258	    *punsignedp = unsignedp;
5259	    return DECL_INCOMING_RTL (arg);
5260	  }
5261      }
5262
5263  return 0;
5264}
5265
5266#endif
5267
5268/* Compute the size and offset from the start of the stacked arguments for a
5269   parm passed in mode PASSED_MODE and with type TYPE.
5270
5271   INITIAL_OFFSET_PTR points to the current offset into the stacked
5272   arguments.
5273
5274   The starting offset and size for this parm are returned in *OFFSET_PTR
5275   and *ARG_SIZE_PTR, respectively.
5276
5277   IN_REGS is non-zero if the argument will be passed in registers.  It will
5278   never be set if REG_PARM_STACK_SPACE is not defined.
5279
5280   FNDECL is the function in which the argument was defined.
5281
5282   There are two types of rounding that are done.  The first, controlled by
5283   FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5284   list to be aligned to the specific boundary (in bits).  This rounding
5285   affects the initial and starting offsets, but not the argument size.
5286
5287   The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5288   optionally rounds the size of the parm to PARM_BOUNDARY.  The
5289   initial offset is not affected by this rounding, while the size always
5290   is and the starting offset may be.  */
5291
5292/*  offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5293    initial_offset_ptr is positive because locate_and_pad_parm's
5294    callers pass in the total size of args so far as
5295    initial_offset_ptr. arg_size_ptr is always positive.*/
5296
5297void
5298locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5299		     initial_offset_ptr, offset_ptr, arg_size_ptr)
5300     enum machine_mode passed_mode;
5301     tree type;
5302     int in_regs;
5303     tree fndecl ATTRIBUTE_UNUSED;
5304     struct args_size *initial_offset_ptr;
5305     struct args_size *offset_ptr;
5306     struct args_size *arg_size_ptr;
5307{
5308  tree sizetree
5309    = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5310  enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5311  int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5312
5313#ifdef REG_PARM_STACK_SPACE
5314  /* If we have found a stack parm before we reach the end of the
5315     area reserved for registers, skip that area.  */
5316  if (! in_regs)
5317    {
5318      int reg_parm_stack_space = 0;
5319
5320#ifdef MAYBE_REG_PARM_STACK_SPACE
5321      reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5322#else
5323      reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5324#endif
5325      if (reg_parm_stack_space > 0)
5326	{
5327	  if (initial_offset_ptr->var)
5328	    {
5329	      initial_offset_ptr->var
5330		= size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5331			      size_int (reg_parm_stack_space));
5332	      initial_offset_ptr->constant = 0;
5333	    }
5334	  else if (initial_offset_ptr->constant < reg_parm_stack_space)
5335	    initial_offset_ptr->constant = reg_parm_stack_space;
5336	}
5337    }
5338#endif /* REG_PARM_STACK_SPACE */
5339
5340  arg_size_ptr->var = 0;
5341  arg_size_ptr->constant = 0;
5342
5343#ifdef ARGS_GROW_DOWNWARD
5344  if (initial_offset_ptr->var)
5345    {
5346      offset_ptr->constant = 0;
5347      offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
5348				    initial_offset_ptr->var);
5349    }
5350  else
5351    {
5352      offset_ptr->constant = - initial_offset_ptr->constant;
5353      offset_ptr->var = 0;
5354    }
5355  if (where_pad != none
5356      && (TREE_CODE (sizetree) != INTEGER_CST
5357	  || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5358    sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5359  SUB_PARM_SIZE (*offset_ptr, sizetree);
5360  if (where_pad != downward)
5361    pad_to_arg_alignment (offset_ptr, boundary);
5362  if (initial_offset_ptr->var)
5363    {
5364      arg_size_ptr->var = size_binop (MINUS_EXPR,
5365				      size_binop (MINUS_EXPR,
5366						  integer_zero_node,
5367						  initial_offset_ptr->var),
5368				      offset_ptr->var);
5369    }
5370  else
5371    {
5372      arg_size_ptr->constant = (- initial_offset_ptr->constant
5373				- offset_ptr->constant);
5374    }
5375#else /* !ARGS_GROW_DOWNWARD */
5376  if (!in_regs
5377#ifdef REG_PARM_STACK_SPACE
5378      || REG_PARM_STACK_SPACE (fndecl) > 0
5379#else
5380      /* For the gcc-2_95-branch we want to make sure not to break something
5381         on platforms which pass argument in registers but don't define
5382         REG_PARM_STACK_SPACE. So we force the original behaviour here.  */
5383      || 1
5384#endif
5385      )
5386  pad_to_arg_alignment (initial_offset_ptr, boundary);
5387
5388  *offset_ptr = *initial_offset_ptr;
5389
5390#ifdef PUSH_ROUNDING
5391  if (passed_mode != BLKmode)
5392    sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5393#endif
5394
5395  /* Pad_below needs the pre-rounded size to know how much to pad below
5396     so this must be done before rounding up.  */
5397  if (where_pad == downward
5398    /* However, BLKmode args passed in regs have their padding done elsewhere.
5399       The stack slot must be able to hold the entire register.  */
5400      && !(in_regs && passed_mode == BLKmode))
5401    pad_below (offset_ptr, passed_mode, sizetree);
5402
5403  if (where_pad != none
5404      && (TREE_CODE (sizetree) != INTEGER_CST
5405	  || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5406    sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5407
5408  ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5409#endif /* ARGS_GROW_DOWNWARD */
5410}
5411
5412/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5413   BOUNDARY is measured in bits, but must be a multiple of a storage unit.  */
5414
5415static void
5416pad_to_arg_alignment (offset_ptr, boundary)
5417     struct args_size *offset_ptr;
5418     int boundary;
5419{
5420  int boundary_in_bytes = boundary / BITS_PER_UNIT;
5421
5422  if (boundary > BITS_PER_UNIT)
5423    {
5424      if (offset_ptr->var)
5425	{
5426	  offset_ptr->var  =
5427#ifdef ARGS_GROW_DOWNWARD
5428	    round_down
5429#else
5430	    round_up
5431#endif
5432	      (ARGS_SIZE_TREE (*offset_ptr),
5433	       boundary / BITS_PER_UNIT);
5434	  offset_ptr->constant = 0; /*?*/
5435	}
5436      else
5437	offset_ptr->constant =
5438#ifdef ARGS_GROW_DOWNWARD
5439	  FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5440#else
5441	  CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5442#endif
5443    }
5444}
5445
5446#ifndef ARGS_GROW_DOWNWARD
5447static void
5448pad_below (offset_ptr, passed_mode, sizetree)
5449     struct args_size *offset_ptr;
5450     enum machine_mode passed_mode;
5451     tree sizetree;
5452{
5453  if (passed_mode != BLKmode)
5454    {
5455      if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5456	offset_ptr->constant
5457	  += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5458	       / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5459	      - GET_MODE_SIZE (passed_mode));
5460    }
5461  else
5462    {
5463      if (TREE_CODE (sizetree) != INTEGER_CST
5464	  || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5465	{
5466	  /* Round the size up to multiple of PARM_BOUNDARY bits.  */
5467	  tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5468	  /* Add it in.  */
5469	  ADD_PARM_SIZE (*offset_ptr, s2);
5470	  SUB_PARM_SIZE (*offset_ptr, sizetree);
5471	}
5472    }
5473}
5474#endif
5475
5476#ifdef ARGS_GROW_DOWNWARD
5477static tree
5478round_down (value, divisor)
5479     tree value;
5480     int divisor;
5481{
5482  return size_binop (MULT_EXPR,
5483		     size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5484		     size_int (divisor));
5485}
5486#endif
5487
5488/* Walk the tree of blocks describing the binding levels within a function
5489   and warn about uninitialized variables.
5490   This is done after calling flow_analysis and before global_alloc
5491   clobbers the pseudo-regs to hard regs.  */
5492
5493void
5494uninitialized_vars_warning (block)
5495     tree block;
5496{
5497  register tree decl, sub;
5498  for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5499    {
5500      if (TREE_CODE (decl) == VAR_DECL
5501	  /* These warnings are unreliable for and aggregates
5502	     because assigning the fields one by one can fail to convince
5503	     flow.c that the entire aggregate was initialized.
5504	     Unions are troublesome because members may be shorter.  */
5505	  && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5506	  && DECL_RTL (decl) != 0
5507	  && GET_CODE (DECL_RTL (decl)) == REG
5508	  /* Global optimizations can make it difficult to determine if a
5509	     particular variable has been initialized.  However, a VAR_DECL
5510	     with a nonzero DECL_INITIAL had an initializer, so do not
5511	     claim it is potentially uninitialized.
5512
5513	     We do not care about the actual value in DECL_INITIAL, so we do
5514	     not worry that it may be a dangling pointer.  */
5515	  && DECL_INITIAL (decl) == NULL_TREE
5516	  && regno_uninitialized (REGNO (DECL_RTL (decl))))
5517	warning_with_decl (decl,
5518			   "`%s' might be used uninitialized in this function");
5519      if (TREE_CODE (decl) == VAR_DECL
5520	  && DECL_RTL (decl) != 0
5521	  && GET_CODE (DECL_RTL (decl)) == REG
5522	  && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5523	warning_with_decl (decl,
5524			   "variable `%s' might be clobbered by `longjmp' or `vfork'");
5525    }
5526  for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5527    uninitialized_vars_warning (sub);
5528}
5529
5530/* Do the appropriate part of uninitialized_vars_warning
5531   but for arguments instead of local variables.  */
5532
5533void
5534setjmp_args_warning ()
5535{
5536  register tree decl;
5537  for (decl = DECL_ARGUMENTS (current_function_decl);
5538       decl; decl = TREE_CHAIN (decl))
5539    if (DECL_RTL (decl) != 0
5540	&& GET_CODE (DECL_RTL (decl)) == REG
5541	&& regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5542      warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5543}
5544
5545/* If this function call setjmp, put all vars into the stack
5546   unless they were declared `register'.  */
5547
5548void
5549setjmp_protect (block)
5550     tree block;
5551{
5552  register tree decl, sub;
5553  for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5554    if ((TREE_CODE (decl) == VAR_DECL
5555	 || TREE_CODE (decl) == PARM_DECL)
5556	&& DECL_RTL (decl) != 0
5557	&& (GET_CODE (DECL_RTL (decl)) == REG
5558	    || (GET_CODE (DECL_RTL (decl)) == MEM
5559		&& GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5560	/* If this variable came from an inline function, it must be
5561	   that its life doesn't overlap the setjmp.  If there was a
5562	   setjmp in the function, it would already be in memory.  We
5563	   must exclude such variable because their DECL_RTL might be
5564	   set to strange things such as virtual_stack_vars_rtx.  */
5565	&& ! DECL_FROM_INLINE (decl)
5566	&& (
5567#ifdef NON_SAVING_SETJMP
5568	    /* If longjmp doesn't restore the registers,
5569	       don't put anything in them.  */
5570	    NON_SAVING_SETJMP
5571	    ||
5572#endif
5573	    ! DECL_REGISTER (decl)))
5574      put_var_into_stack (decl);
5575  for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5576    setjmp_protect (sub);
5577}
5578
5579/* Like the previous function, but for args instead of local variables.  */
5580
5581void
5582setjmp_protect_args ()
5583{
5584  register tree decl;
5585  for (decl = DECL_ARGUMENTS (current_function_decl);
5586       decl; decl = TREE_CHAIN (decl))
5587    if ((TREE_CODE (decl) == VAR_DECL
5588	 || TREE_CODE (decl) == PARM_DECL)
5589	&& DECL_RTL (decl) != 0
5590	&& (GET_CODE (DECL_RTL (decl)) == REG
5591	    || (GET_CODE (DECL_RTL (decl)) == MEM
5592		&& GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5593	&& (
5594	    /* If longjmp doesn't restore the registers,
5595	       don't put anything in them.  */
5596#ifdef NON_SAVING_SETJMP
5597	    NON_SAVING_SETJMP
5598	    ||
5599#endif
5600	    ! DECL_REGISTER (decl)))
5601      put_var_into_stack (decl);
5602}
5603
5604/* Return the context-pointer register corresponding to DECL,
5605   or 0 if it does not need one.  */
5606
5607rtx
5608lookup_static_chain (decl)
5609     tree decl;
5610{
5611  tree context = decl_function_context (decl);
5612  tree link;
5613
5614  if (context == 0
5615      || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5616    return 0;
5617
5618  /* We treat inline_function_decl as an alias for the current function
5619     because that is the inline function whose vars, types, etc.
5620     are being merged into the current function.
5621     See expand_inline_function.  */
5622  if (context == current_function_decl || context == inline_function_decl)
5623    return virtual_stack_vars_rtx;
5624
5625  for (link = context_display; link; link = TREE_CHAIN (link))
5626    if (TREE_PURPOSE (link) == context)
5627      return RTL_EXPR_RTL (TREE_VALUE (link));
5628
5629  abort ();
5630}
5631
5632/* Convert a stack slot address ADDR for variable VAR
5633   (from a containing function)
5634   into an address valid in this function (using a static chain).  */
5635
5636rtx
5637fix_lexical_addr (addr, var)
5638     rtx addr;
5639     tree var;
5640{
5641  rtx basereg;
5642  HOST_WIDE_INT displacement;
5643  tree context = decl_function_context (var);
5644  struct function *fp;
5645  rtx base = 0;
5646
5647  /* If this is the present function, we need not do anything.  */
5648  if (context == current_function_decl || context == inline_function_decl)
5649    return addr;
5650
5651  for (fp = outer_function_chain; fp; fp = fp->next)
5652    if (fp->decl == context)
5653      break;
5654
5655  if (fp == 0)
5656    abort ();
5657
5658  if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5659    addr = XEXP (XEXP (addr, 0), 0);
5660
5661  /* Decode given address as base reg plus displacement.  */
5662  if (GET_CODE (addr) == REG)
5663    basereg = addr, displacement = 0;
5664  else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5665    basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5666  else
5667    abort ();
5668
5669  /* We accept vars reached via the containing function's
5670     incoming arg pointer and via its stack variables pointer.  */
5671  if (basereg == fp->internal_arg_pointer)
5672    {
5673      /* If reached via arg pointer, get the arg pointer value
5674	 out of that function's stack frame.
5675
5676	 There are two cases:  If a separate ap is needed, allocate a
5677	 slot in the outer function for it and dereference it that way.
5678	 This is correct even if the real ap is actually a pseudo.
5679	 Otherwise, just adjust the offset from the frame pointer to
5680	 compensate.  */
5681
5682#ifdef NEED_SEPARATE_AP
5683      rtx addr;
5684
5685      if (fp->arg_pointer_save_area == 0)
5686	fp->arg_pointer_save_area
5687	  = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5688
5689      addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
5690      addr = memory_address (Pmode, addr);
5691
5692      base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5693#else
5694      displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5695      base = lookup_static_chain (var);
5696#endif
5697    }
5698
5699  else if (basereg == virtual_stack_vars_rtx)
5700    {
5701      /* This is the same code as lookup_static_chain, duplicated here to
5702	 avoid an extra call to decl_function_context.  */
5703      tree link;
5704
5705      for (link = context_display; link; link = TREE_CHAIN (link))
5706	if (TREE_PURPOSE (link) == context)
5707	  {
5708	    base = RTL_EXPR_RTL (TREE_VALUE (link));
5709	    break;
5710	  }
5711    }
5712
5713  if (base == 0)
5714    abort ();
5715
5716  /* Use same offset, relative to appropriate static chain or argument
5717     pointer.  */
5718  return plus_constant (base, displacement);
5719}
5720
5721/* Return the address of the trampoline for entering nested fn FUNCTION.
5722   If necessary, allocate a trampoline (in the stack frame)
5723   and emit rtl to initialize its contents (at entry to this function).  */
5724
5725rtx
5726trampoline_address (function)
5727     tree function;
5728{
5729  tree link;
5730  tree rtlexp;
5731  rtx tramp;
5732  struct function *fp;
5733  tree fn_context;
5734
5735  /* Find an existing trampoline and return it.  */
5736  for (link = trampoline_list; link; link = TREE_CHAIN (link))
5737    if (TREE_PURPOSE (link) == function)
5738      return
5739	round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5740
5741  for (fp = outer_function_chain; fp; fp = fp->next)
5742    for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5743      if (TREE_PURPOSE (link) == function)
5744	{
5745	  tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5746				    function);
5747	  return round_trampoline_addr (tramp);
5748	}
5749
5750  /* None exists; we must make one.  */
5751
5752  /* Find the `struct function' for the function containing FUNCTION.  */
5753  fp = 0;
5754  fn_context = decl_function_context (function);
5755  if (fn_context != current_function_decl
5756      && fn_context != inline_function_decl)
5757    for (fp = outer_function_chain; fp; fp = fp->next)
5758      if (fp->decl == fn_context)
5759	break;
5760
5761  /* Allocate run-time space for this trampoline
5762     (usually in the defining function's stack frame).  */
5763#ifdef ALLOCATE_TRAMPOLINE
5764  tramp = ALLOCATE_TRAMPOLINE (fp);
5765#else
5766  /* If rounding needed, allocate extra space
5767     to ensure we have TRAMPOLINE_SIZE bytes left after rounding up.  */
5768#ifdef TRAMPOLINE_ALIGNMENT
5769#define TRAMPOLINE_REAL_SIZE \
5770  (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5771#else
5772#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5773#endif
5774  if (fp != 0)
5775    tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5776  else
5777    tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5778#endif
5779
5780  /* Record the trampoline for reuse and note it for later initialization
5781     by expand_function_end.  */
5782  if (fp != 0)
5783    {
5784      push_obstacks (fp->function_maybepermanent_obstack,
5785		     fp->function_maybepermanent_obstack);
5786      rtlexp = make_node (RTL_EXPR);
5787      RTL_EXPR_RTL (rtlexp) = tramp;
5788      fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5789      pop_obstacks ();
5790    }
5791  else
5792    {
5793      /* Make the RTL_EXPR node temporary, not momentary, so that the
5794	 trampoline_list doesn't become garbage.  */
5795      int momentary = suspend_momentary ();
5796      rtlexp = make_node (RTL_EXPR);
5797      resume_momentary (momentary);
5798
5799      RTL_EXPR_RTL (rtlexp) = tramp;
5800      trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5801    }
5802
5803  tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5804  return round_trampoline_addr (tramp);
5805}
5806
5807/* Given a trampoline address,
5808   round it to multiple of TRAMPOLINE_ALIGNMENT.  */
5809
5810static rtx
5811round_trampoline_addr (tramp)
5812     rtx tramp;
5813{
5814#ifdef TRAMPOLINE_ALIGNMENT
5815  /* Round address up to desired boundary.  */
5816  rtx temp = gen_reg_rtx (Pmode);
5817  temp = expand_binop (Pmode, add_optab, tramp,
5818		       GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5819		       temp, 0, OPTAB_LIB_WIDEN);
5820  tramp = expand_binop (Pmode, and_optab, temp,
5821			GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5822			temp, 0, OPTAB_LIB_WIDEN);
5823#endif
5824  return tramp;
5825}
5826
5827/* The functions identify_blocks and reorder_blocks provide a way to
5828   reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5829   duplicate portions of the RTL code.  Call identify_blocks before
5830   changing the RTL, and call reorder_blocks after.  */
5831
5832/* Put all this function's BLOCK nodes including those that are chained
5833   onto the first block into a vector, and return it.
5834   Also store in each NOTE for the beginning or end of a block
5835   the index of that block in the vector.
5836   The arguments are BLOCK, the chain of top-level blocks of the function,
5837   and INSNS, the insn chain of the function.  */
5838
5839tree *
5840identify_blocks (block, insns)
5841     tree block;
5842     rtx insns;
5843{
5844  int n_blocks;
5845  tree *block_vector;
5846  int *block_stack;
5847  int depth = 0;
5848  int next_block_number = 1;
5849  int current_block_number = 1;
5850  rtx insn;
5851
5852  if (block == 0)
5853    return 0;
5854
5855  n_blocks = all_blocks (block, 0);
5856  block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5857  block_stack = (int *) alloca (n_blocks * sizeof (int));
5858
5859  all_blocks (block, block_vector);
5860
5861  for (insn = insns; insn; insn = NEXT_INSN (insn))
5862    if (GET_CODE (insn) == NOTE)
5863      {
5864	if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5865	  {
5866	    block_stack[depth++] = current_block_number;
5867	    current_block_number = next_block_number;
5868	    NOTE_BLOCK_NUMBER (insn) =  next_block_number++;
5869	  }
5870	if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5871	  {
5872	    NOTE_BLOCK_NUMBER (insn) = current_block_number;
5873	    current_block_number = block_stack[--depth];
5874	  }
5875      }
5876
5877  if (n_blocks != next_block_number)
5878    abort ();
5879
5880  return block_vector;
5881}
5882
5883/* Given BLOCK_VECTOR which was returned by identify_blocks,
5884   and a revised instruction chain, rebuild the tree structure
5885   of BLOCK nodes to correspond to the new order of RTL.
5886   The new block tree is inserted below TOP_BLOCK.
5887   Returns the current top-level block.  */
5888
5889tree
5890reorder_blocks (block_vector, block, insns)
5891     tree *block_vector;
5892     tree block;
5893     rtx insns;
5894{
5895  tree current_block = block;
5896  rtx insn;
5897
5898  if (block_vector == 0)
5899    return block;
5900
5901  /* Prune the old trees away, so that it doesn't get in the way.  */
5902  BLOCK_SUBBLOCKS (current_block) = 0;
5903  BLOCK_CHAIN (current_block) = 0;
5904
5905  for (insn = insns; insn; insn = NEXT_INSN (insn))
5906    if (GET_CODE (insn) == NOTE)
5907      {
5908	if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5909	  {
5910	    tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5911	    /* If we have seen this block before, copy it.  */
5912	    if (TREE_ASM_WRITTEN (block))
5913	      block = copy_node (block);
5914	    BLOCK_SUBBLOCKS (block) = 0;
5915	    TREE_ASM_WRITTEN (block) = 1;
5916	    BLOCK_SUPERCONTEXT (block) = current_block;
5917	    BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5918	    BLOCK_SUBBLOCKS (current_block) = block;
5919	    current_block = block;
5920	    NOTE_SOURCE_FILE (insn) = 0;
5921	  }
5922	if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5923	  {
5924	    BLOCK_SUBBLOCKS (current_block)
5925	      = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5926	    current_block = BLOCK_SUPERCONTEXT (current_block);
5927	    NOTE_SOURCE_FILE (insn) = 0;
5928	  }
5929      }
5930
5931  BLOCK_SUBBLOCKS (current_block)
5932    = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5933  return current_block;
5934}
5935
5936/* Reverse the order of elements in the chain T of blocks,
5937   and return the new head of the chain (old last element).  */
5938
5939static tree
5940blocks_nreverse (t)
5941     tree t;
5942{
5943  register tree prev = 0, decl, next;
5944  for (decl = t; decl; decl = next)
5945    {
5946      next = BLOCK_CHAIN (decl);
5947      BLOCK_CHAIN (decl) = prev;
5948      prev = decl;
5949    }
5950  return prev;
5951}
5952
5953/* Count the subblocks of the list starting with BLOCK, and list them
5954   all into the vector VECTOR.  Also clear TREE_ASM_WRITTEN in all
5955   blocks.  */
5956
5957static int
5958all_blocks (block, vector)
5959     tree block;
5960     tree *vector;
5961{
5962  int n_blocks = 0;
5963
5964  while (block)
5965    {
5966      TREE_ASM_WRITTEN (block) = 0;
5967
5968      /* Record this block.  */
5969      if (vector)
5970	vector[n_blocks] = block;
5971
5972      ++n_blocks;
5973
5974      /* Record the subblocks, and their subblocks...  */
5975      n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5976			      vector ? vector + n_blocks : 0);
5977      block = BLOCK_CHAIN (block);
5978    }
5979
5980  return n_blocks;
5981}
5982
5983/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5984   and initialize static variables for generating RTL for the statements
5985   of the function.  */
5986
5987void
5988init_function_start (subr, filename, line)
5989     tree subr;
5990     char *filename;
5991     int line;
5992{
5993  init_stmt_for_function ();
5994
5995  cse_not_expected = ! optimize;
5996
5997  /* Caller save not needed yet.  */
5998  caller_save_needed = 0;
5999
6000  /* No stack slots have been made yet.  */
6001  stack_slot_list = 0;
6002
6003  /* There is no stack slot for handling nonlocal gotos.  */
6004  nonlocal_goto_handler_slots = 0;
6005  nonlocal_goto_stack_level = 0;
6006
6007  /* No labels have been declared for nonlocal use.  */
6008  nonlocal_labels = 0;
6009  nonlocal_goto_handler_labels = 0;
6010
6011  /* No function calls so far in this function.  */
6012  function_call_count = 0;
6013
6014  /* No parm regs have been allocated.
6015     (This is important for output_inline_function.)  */
6016  max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6017
6018  /* Initialize the RTL mechanism.  */
6019  init_emit ();
6020
6021  /* Initialize the queue of pending postincrement and postdecrements,
6022     and some other info in expr.c.  */
6023  init_expr ();
6024
6025  /* We haven't done register allocation yet.  */
6026  reg_renumber = 0;
6027
6028  init_const_rtx_hash_table ();
6029
6030  current_function_name = (*decl_printable_name) (subr, 2);
6031
6032  /* Nonzero if this is a nested function that uses a static chain.  */
6033
6034  current_function_needs_context
6035    = (decl_function_context (current_function_decl) != 0
6036       && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6037
6038  /* Set if a call to setjmp is seen.  */
6039  current_function_calls_setjmp = 0;
6040
6041  /* Set if a call to longjmp is seen.  */
6042  current_function_calls_longjmp = 0;
6043
6044  current_function_calls_alloca = 0;
6045  current_function_has_nonlocal_label = 0;
6046  current_function_has_nonlocal_goto = 0;
6047  current_function_contains_functions = 0;
6048  current_function_is_leaf = 0;
6049  current_function_sp_is_unchanging = 0;
6050  current_function_uses_only_leaf_regs = 0;
6051  current_function_has_computed_jump = 0;
6052  current_function_is_thunk = 0;
6053
6054  current_function_returns_pcc_struct = 0;
6055  current_function_returns_struct = 0;
6056  current_function_epilogue_delay_list = 0;
6057  current_function_uses_const_pool = 0;
6058  current_function_uses_pic_offset_table = 0;
6059  current_function_cannot_inline = 0;
6060
6061  /* We have not yet needed to make a label to jump to for tail-recursion.  */
6062  tail_recursion_label = 0;
6063
6064  /* We haven't had a need to make a save area for ap yet.  */
6065
6066  arg_pointer_save_area = 0;
6067
6068  /* No stack slots allocated yet.  */
6069  frame_offset = 0;
6070
6071  /* No SAVE_EXPRs in this function yet.  */
6072  save_expr_regs = 0;
6073
6074  /* No RTL_EXPRs in this function yet.  */
6075  rtl_expr_chain = 0;
6076
6077  /* Set up to allocate temporaries.  */
6078  init_temp_slots ();
6079
6080  /* Within function body, compute a type's size as soon it is laid out.  */
6081  immediate_size_expand++;
6082
6083  /* We haven't made any trampolines for this function yet.  */
6084  trampoline_list = 0;
6085
6086  init_pending_stack_adjust ();
6087  inhibit_defer_pop = 0;
6088
6089  current_function_outgoing_args_size = 0;
6090
6091  /* Prevent ever trying to delete the first instruction of a function.
6092     Also tell final how to output a linenum before the function prologue.
6093     Note linenums could be missing, e.g. when compiling a Java .class file. */
6094  if (line > 0)
6095    emit_line_note (filename, line);
6096
6097  /* Make sure first insn is a note even if we don't want linenums.
6098     This makes sure the first insn will never be deleted.
6099     Also, final expects a note to appear there.  */
6100  emit_note (NULL_PTR, NOTE_INSN_DELETED);
6101
6102  /* Set flags used by final.c.  */
6103  if (aggregate_value_p (DECL_RESULT (subr)))
6104    {
6105#ifdef PCC_STATIC_STRUCT_RETURN
6106      current_function_returns_pcc_struct = 1;
6107#endif
6108      current_function_returns_struct = 1;
6109    }
6110
6111  /* Warn if this value is an aggregate type,
6112     regardless of which calling convention we are using for it.  */
6113  if (warn_aggregate_return
6114      && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6115    warning ("function returns an aggregate");
6116
6117  current_function_returns_pointer
6118    = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6119
6120  /* Indicate that we need to distinguish between the return value of the
6121     present function and the return value of a function being called.  */
6122  rtx_equal_function_value_matters = 1;
6123
6124  /* Indicate that we have not instantiated virtual registers yet.  */
6125  virtuals_instantiated = 0;
6126
6127  /* Indicate we have no need of a frame pointer yet.  */
6128  frame_pointer_needed = 0;
6129
6130  /* By default assume not varargs or stdarg.  */
6131  current_function_varargs = 0;
6132  current_function_stdarg = 0;
6133}
6134
6135/* Indicate that the current function uses extra args
6136   not explicitly mentioned in the argument list in any fashion.  */
6137
6138void
6139mark_varargs ()
6140{
6141  current_function_varargs = 1;
6142}
6143
6144/* Expand a call to __main at the beginning of a possible main function.  */
6145
6146#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6147#undef HAS_INIT_SECTION
6148#define HAS_INIT_SECTION
6149#endif
6150
6151void
6152expand_main_function ()
6153{
6154#if !defined (HAS_INIT_SECTION)
6155  emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
6156		     VOIDmode, 0);
6157#endif /* not HAS_INIT_SECTION */
6158}
6159
6160extern struct obstack permanent_obstack;
6161
6162/* Start the RTL for a new function, and set variables used for
6163   emitting RTL.
6164   SUBR is the FUNCTION_DECL node.
6165   PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6166   the function's parameters, which must be run at any return statement.  */
6167
6168void
6169expand_function_start (subr, parms_have_cleanups)
6170     tree subr;
6171     int parms_have_cleanups;
6172{
6173  register int i;
6174  tree tem;
6175  rtx last_ptr = NULL_RTX;
6176
6177  /* Make sure volatile mem refs aren't considered
6178     valid operands of arithmetic insns.  */
6179  init_recog_no_volatile ();
6180
6181  /* Set this before generating any memory accesses.  */
6182  current_function_check_memory_usage
6183    = (flag_check_memory_usage
6184       && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
6185
6186  current_function_instrument_entry_exit
6187    = (flag_instrument_function_entry_exit
6188       && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6189
6190  /* If function gets a static chain arg, store it in the stack frame.
6191     Do this first, so it gets the first stack slot offset.  */
6192  if (current_function_needs_context)
6193    {
6194      last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6195
6196      /* Delay copying static chain if it is not a register to avoid
6197	 conflicts with regs used for parameters.  */
6198      if (! SMALL_REGISTER_CLASSES
6199	  || GET_CODE (static_chain_incoming_rtx) == REG)
6200        emit_move_insn (last_ptr, static_chain_incoming_rtx);
6201    }
6202
6203  /* If the parameters of this function need cleaning up, get a label
6204     for the beginning of the code which executes those cleanups.  This must
6205     be done before doing anything with return_label.  */
6206  if (parms_have_cleanups)
6207    cleanup_label = gen_label_rtx ();
6208  else
6209    cleanup_label = 0;
6210
6211  /* Make the label for return statements to jump to, if this machine
6212     does not have a one-instruction return and uses an epilogue,
6213     or if it returns a structure, or if it has parm cleanups.  */
6214#ifdef HAVE_return
6215  if (cleanup_label == 0 && HAVE_return
6216      && ! current_function_instrument_entry_exit
6217      && ! current_function_returns_pcc_struct
6218      && ! (current_function_returns_struct && ! optimize))
6219    return_label = 0;
6220  else
6221    return_label = gen_label_rtx ();
6222#else
6223  return_label = gen_label_rtx ();
6224#endif
6225
6226  /* Initialize rtx used to return the value.  */
6227  /* Do this before assign_parms so that we copy the struct value address
6228     before any library calls that assign parms might generate.  */
6229
6230  /* Decide whether to return the value in memory or in a register.  */
6231  if (aggregate_value_p (DECL_RESULT (subr)))
6232    {
6233      /* Returning something that won't go in a register.  */
6234      register rtx value_address = 0;
6235
6236#ifdef PCC_STATIC_STRUCT_RETURN
6237      if (current_function_returns_pcc_struct)
6238	{
6239	  int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6240	  value_address = assemble_static_space (size);
6241	}
6242      else
6243#endif
6244	{
6245	  /* Expect to be passed the address of a place to store the value.
6246	     If it is passed as an argument, assign_parms will take care of
6247	     it.  */
6248	  if (struct_value_incoming_rtx)
6249	    {
6250	      value_address = gen_reg_rtx (Pmode);
6251	      emit_move_insn (value_address, struct_value_incoming_rtx);
6252	    }
6253	}
6254      if (value_address)
6255	{
6256	  DECL_RTL (DECL_RESULT (subr))
6257	    = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6258	  MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
6259			       AGGREGATE_TYPE_P (TREE_TYPE
6260						 (DECL_RESULT
6261						  (subr))));
6262	}
6263    }
6264  else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6265    /* If return mode is void, this decl rtl should not be used.  */
6266    DECL_RTL (DECL_RESULT (subr)) = 0;
6267  else if (parms_have_cleanups || current_function_instrument_entry_exit)
6268    {
6269      /* If function will end with cleanup code for parms,
6270	 compute the return values into a pseudo reg,
6271	 which we will copy into the true return register
6272	 after the cleanups are done.  */
6273
6274      enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
6275
6276#ifdef PROMOTE_FUNCTION_RETURN
6277      tree type = TREE_TYPE (DECL_RESULT (subr));
6278      int unsignedp = TREE_UNSIGNED (type);
6279
6280      mode = promote_mode (type, mode, &unsignedp, 1);
6281#endif
6282
6283      DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
6284    }
6285  else
6286    /* Scalar, returned in a register.  */
6287    {
6288#ifdef FUNCTION_OUTGOING_VALUE
6289      DECL_RTL (DECL_RESULT (subr))
6290	= FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6291#else
6292      DECL_RTL (DECL_RESULT (subr))
6293	= FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6294#endif
6295
6296      /* Mark this reg as the function's return value.  */
6297      if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
6298	{
6299	  REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6300	  /* Needed because we may need to move this to memory
6301	     in case it's a named return value whose address is taken.  */
6302	  DECL_REGISTER (DECL_RESULT (subr)) = 1;
6303	}
6304    }
6305
6306  /* Initialize rtx for parameters and local variables.
6307     In some cases this requires emitting insns.  */
6308
6309  assign_parms (subr, 0);
6310
6311  /* Copy the static chain now if it wasn't a register.  The delay is to
6312     avoid conflicts with the parameter passing registers.  */
6313
6314  if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6315      if (GET_CODE (static_chain_incoming_rtx) != REG)
6316        emit_move_insn (last_ptr, static_chain_incoming_rtx);
6317
6318  /* The following was moved from init_function_start.
6319     The move is supposed to make sdb output more accurate.  */
6320  /* Indicate the beginning of the function body,
6321     as opposed to parm setup.  */
6322  emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6323
6324  /* If doing stupid allocation, mark parms as born here.  */
6325
6326  if (GET_CODE (get_last_insn ()) != NOTE)
6327    emit_note (NULL_PTR, NOTE_INSN_DELETED);
6328  parm_birth_insn = get_last_insn ();
6329
6330  if (obey_regdecls)
6331    {
6332      for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6333	use_variable (regno_reg_rtx[i]);
6334
6335      if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6336	use_variable (current_function_internal_arg_pointer);
6337    }
6338
6339  context_display = 0;
6340  if (current_function_needs_context)
6341    {
6342      /* Fetch static chain values for containing functions.  */
6343      tem = decl_function_context (current_function_decl);
6344      /* If not doing stupid register allocation copy the static chain
6345	 pointer into a pseudo.  If we have small register classes, copy
6346	 the value from memory if static_chain_incoming_rtx is a REG.  If
6347	 we do stupid register allocation, we use the stack address
6348	 generated above.  */
6349      if (tem && ! obey_regdecls)
6350	{
6351	  /* If the static chain originally came in a register, put it back
6352	     there, then move it out in the next insn.  The reason for
6353	     this peculiar code is to satisfy function integration.  */
6354	  if (SMALL_REGISTER_CLASSES
6355	      && GET_CODE (static_chain_incoming_rtx) == REG)
6356	    emit_move_insn (static_chain_incoming_rtx, last_ptr);
6357	  last_ptr = copy_to_reg (static_chain_incoming_rtx);
6358	}
6359
6360      while (tem)
6361	{
6362	  tree rtlexp = make_node (RTL_EXPR);
6363
6364	  RTL_EXPR_RTL (rtlexp) = last_ptr;
6365	  context_display = tree_cons (tem, rtlexp, context_display);
6366	  tem = decl_function_context (tem);
6367	  if (tem == 0)
6368	    break;
6369	  /* Chain thru stack frames, assuming pointer to next lexical frame
6370	     is found at the place we always store it.  */
6371#ifdef FRAME_GROWS_DOWNWARD
6372	  last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6373#endif
6374	  last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
6375					       memory_address (Pmode, last_ptr)));
6376
6377	  /* If we are not optimizing, ensure that we know that this
6378	     piece of context is live over the entire function.  */
6379	  if (! optimize)
6380	    save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6381						save_expr_regs);
6382	}
6383    }
6384
6385  if (current_function_instrument_entry_exit)
6386    {
6387      rtx fun = DECL_RTL (current_function_decl);
6388      if (GET_CODE (fun) == MEM)
6389	fun = XEXP (fun, 0);
6390      else
6391	abort ();
6392      emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6393			 fun, Pmode,
6394			 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6395						     0,
6396						     hard_frame_pointer_rtx),
6397			 Pmode);
6398    }
6399
6400  /* After the display initializations is where the tail-recursion label
6401     should go, if we end up needing one.   Ensure we have a NOTE here
6402     since some things (like trampolines) get placed before this.  */
6403  tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6404
6405  /* Evaluate now the sizes of any types declared among the arguments.  */
6406  for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6407    {
6408      expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6409		   EXPAND_MEMORY_USE_BAD);
6410      /* Flush the queue in case this parameter declaration has
6411	 side-effects.  */
6412      emit_queue ();
6413    }
6414
6415  /* Make sure there is a line number after the function entry setup code.  */
6416  force_next_line_note ();
6417}
6418
6419/* Generate RTL for the end of the current function.
6420   FILENAME and LINE are the current position in the source file.
6421
6422   It is up to language-specific callers to do cleanups for parameters--
6423   or else, supply 1 for END_BINDINGS and we will call expand_end_bindings.  */
6424
6425void
6426expand_function_end (filename, line, end_bindings)
6427     char *filename;
6428     int line;
6429     int end_bindings;
6430{
6431  register int i;
6432  tree link;
6433
6434#ifdef TRAMPOLINE_TEMPLATE
6435  static rtx initial_trampoline;
6436#endif
6437
6438#ifdef NON_SAVING_SETJMP
6439  /* Don't put any variables in registers if we call setjmp
6440     on a machine that fails to restore the registers.  */
6441  if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6442    {
6443      if (DECL_INITIAL (current_function_decl) != error_mark_node)
6444	setjmp_protect (DECL_INITIAL (current_function_decl));
6445
6446      setjmp_protect_args ();
6447    }
6448#endif
6449
6450  /* Save the argument pointer if a save area was made for it.  */
6451  if (arg_pointer_save_area)
6452    {
6453      /* arg_pointer_save_area may not be a valid memory address, so we
6454	 have to check it and fix it if necessary.  */
6455      rtx seq;
6456      start_sequence ();
6457      emit_move_insn (validize_mem (arg_pointer_save_area),
6458		      virtual_incoming_args_rtx);
6459      seq = gen_sequence ();
6460      end_sequence ();
6461      emit_insn_before (seq, tail_recursion_reentry);
6462    }
6463
6464  /* Initialize any trampolines required by this function.  */
6465  for (link = trampoline_list; link; link = TREE_CHAIN (link))
6466    {
6467      tree function = TREE_PURPOSE (link);
6468      rtx context = lookup_static_chain (function);
6469      rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6470#ifdef TRAMPOLINE_TEMPLATE
6471      rtx blktramp;
6472#endif
6473      rtx seq;
6474
6475#ifdef TRAMPOLINE_TEMPLATE
6476      /* First make sure this compilation has a template for
6477	 initializing trampolines.  */
6478      if (initial_trampoline == 0)
6479	{
6480	  end_temporary_allocation ();
6481	  initial_trampoline
6482	    = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6483	  resume_temporary_allocation ();
6484	}
6485#endif
6486
6487      /* Generate insns to initialize the trampoline.  */
6488      start_sequence ();
6489      tramp = round_trampoline_addr (XEXP (tramp, 0));
6490#ifdef TRAMPOLINE_TEMPLATE
6491      blktramp = change_address (initial_trampoline, BLKmode, tramp);
6492      emit_block_move (blktramp, initial_trampoline,
6493		       GEN_INT (TRAMPOLINE_SIZE),
6494		       TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6495#endif
6496      INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6497      seq = get_insns ();
6498      end_sequence ();
6499
6500      /* Put those insns at entry to the containing function (this one).  */
6501      emit_insns_before (seq, tail_recursion_reentry);
6502    }
6503
6504  /* If we are doing stack checking and this function makes calls,
6505     do a stack probe at the start of the function to ensure we have enough
6506     space for another stack frame.  */
6507  if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6508    {
6509      rtx insn, seq;
6510
6511      for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6512	if (GET_CODE (insn) == CALL_INSN)
6513	  {
6514	    start_sequence ();
6515	    probe_stack_range (STACK_CHECK_PROTECT,
6516			       GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6517	    seq = get_insns ();
6518	    end_sequence ();
6519	    emit_insns_before (seq, tail_recursion_reentry);
6520	    break;
6521	  }
6522    }
6523
6524  /* Warn about unused parms if extra warnings were specified.  */
6525  if (warn_unused && extra_warnings)
6526    {
6527      tree decl;
6528
6529      for (decl = DECL_ARGUMENTS (current_function_decl);
6530	   decl; decl = TREE_CHAIN (decl))
6531	if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6532	    && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6533	  warning_with_decl (decl, "unused parameter `%s'");
6534    }
6535
6536  /* Delete handlers for nonlocal gotos if nothing uses them.  */
6537  if (nonlocal_goto_handler_slots != 0
6538      && ! current_function_has_nonlocal_label)
6539    delete_handlers ();
6540
6541  /* End any sequences that failed to be closed due to syntax errors.  */
6542  while (in_sequence_p ())
6543    end_sequence ();
6544
6545  /* Outside function body, can't compute type's actual size
6546     until next function's body starts.  */
6547  immediate_size_expand--;
6548
6549  /* If doing stupid register allocation,
6550     mark register parms as dying here.  */
6551
6552  if (obey_regdecls)
6553    {
6554      rtx tem;
6555      for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6556	use_variable (regno_reg_rtx[i]);
6557
6558      /* Likewise for the regs of all the SAVE_EXPRs in the function.  */
6559
6560      for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6561	{
6562	  use_variable (XEXP (tem, 0));
6563	  use_variable_after (XEXP (tem, 0), parm_birth_insn);
6564	}
6565
6566      if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6567	use_variable (current_function_internal_arg_pointer);
6568    }
6569
6570  clear_pending_stack_adjust ();
6571  do_pending_stack_adjust ();
6572
6573  /* Mark the end of the function body.
6574     If control reaches this insn, the function can drop through
6575     without returning a value.  */
6576  emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6577
6578  /* Must mark the last line number note in the function, so that the test
6579     coverage code can avoid counting the last line twice.  This just tells
6580     the code to ignore the immediately following line note, since there
6581     already exists a copy of this note somewhere above.  This line number
6582     note is still needed for debugging though, so we can't delete it.  */
6583  if (flag_test_coverage)
6584    emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6585
6586  /* Output a linenumber for the end of the function.
6587     SDB depends on this.  */
6588  emit_line_note_force (filename, line);
6589
6590  /* Output the label for the actual return from the function,
6591     if one is expected.  This happens either because a function epilogue
6592     is used instead of a return instruction, or because a return was done
6593     with a goto in order to run local cleanups, or because of pcc-style
6594     structure returning.  */
6595
6596  if (return_label)
6597    emit_label (return_label);
6598
6599  /* C++ uses this.  */
6600  if (end_bindings)
6601    expand_end_bindings (0, 0, 0);
6602
6603  /* Now handle any leftover exception regions that may have been
6604     created for the parameters.  */
6605  {
6606    rtx last = get_last_insn ();
6607    rtx label;
6608
6609    expand_leftover_cleanups ();
6610
6611    /* If the above emitted any code, may sure we jump around it.  */
6612    if (last != get_last_insn ())
6613      {
6614	label = gen_label_rtx ();
6615	last = emit_jump_insn_after (gen_jump (label), last);
6616	last = emit_barrier_after (last);
6617	emit_label (label);
6618      }
6619  }
6620
6621  if (current_function_instrument_entry_exit)
6622    {
6623      rtx fun = DECL_RTL (current_function_decl);
6624      if (GET_CODE (fun) == MEM)
6625	fun = XEXP (fun, 0);
6626      else
6627	abort ();
6628      emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6629			 fun, Pmode,
6630			 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6631						     0,
6632						     hard_frame_pointer_rtx),
6633			 Pmode);
6634    }
6635
6636  /* If we had calls to alloca, and this machine needs
6637     an accurate stack pointer to exit the function,
6638     insert some code to save and restore the stack pointer.  */
6639#ifdef EXIT_IGNORE_STACK
6640  if (! EXIT_IGNORE_STACK)
6641#endif
6642    if (current_function_calls_alloca)
6643      {
6644	rtx tem = 0;
6645
6646	emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6647	emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6648      }
6649
6650  /* If scalar return value was computed in a pseudo-reg,
6651     copy that to the hard return register.  */
6652  if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6653      && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6654      && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6655	  >= FIRST_PSEUDO_REGISTER))
6656    {
6657      rtx real_decl_result;
6658
6659#ifdef FUNCTION_OUTGOING_VALUE
6660      real_decl_result
6661	= FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6662				   current_function_decl);
6663#else
6664      real_decl_result
6665	= FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6666			  current_function_decl);
6667#endif
6668      REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6669      /* If this is a BLKmode structure being returned in registers, then use
6670	 the mode computed in expand_return.  */
6671      if (GET_MODE (real_decl_result) == BLKmode)
6672	PUT_MODE (real_decl_result,
6673		  GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6674      emit_move_insn (real_decl_result,
6675		      DECL_RTL (DECL_RESULT (current_function_decl)));
6676      emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6677
6678      /* The delay slot scheduler assumes that current_function_return_rtx
6679	 holds the hard register containing the return value, not a temporary
6680	 pseudo.  */
6681      current_function_return_rtx = real_decl_result;
6682    }
6683
6684  /* If returning a structure, arrange to return the address of the value
6685     in a place where debuggers expect to find it.
6686
6687     If returning a structure PCC style,
6688     the caller also depends on this value.
6689     And current_function_returns_pcc_struct is not necessarily set.  */
6690  if (current_function_returns_struct
6691      || current_function_returns_pcc_struct)
6692    {
6693      rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6694      tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6695#ifdef FUNCTION_OUTGOING_VALUE
6696      rtx outgoing
6697	= FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6698				   current_function_decl);
6699#else
6700      rtx outgoing
6701	= FUNCTION_VALUE (build_pointer_type (type),
6702			  current_function_decl);
6703#endif
6704
6705      /* Mark this as a function return value so integrate will delete the
6706	 assignment and USE below when inlining this function.  */
6707      REG_FUNCTION_VALUE_P (outgoing) = 1;
6708
6709      emit_move_insn (outgoing, value_address);
6710      use_variable (outgoing);
6711    }
6712
6713  /* If this is an implementation of __throw, do what's necessary to
6714     communicate between __builtin_eh_return and the epilogue.  */
6715  expand_eh_return ();
6716
6717  /* Output a return insn if we are using one.
6718     Otherwise, let the rtl chain end here, to drop through
6719     into the epilogue.  */
6720
6721#ifdef HAVE_return
6722  if (HAVE_return)
6723    {
6724      emit_jump_insn (gen_return ());
6725      emit_barrier ();
6726    }
6727#endif
6728
6729  /* Fix up any gotos that jumped out to the outermost
6730     binding level of the function.
6731     Must follow emitting RETURN_LABEL.  */
6732
6733  /* If you have any cleanups to do at this point,
6734     and they need to create temporary variables,
6735     then you will lose.  */
6736  expand_fixups (get_insns ());
6737}
6738
6739/* These arrays record the INSN_UIDs of the prologue and epilogue insns.  */
6740
6741static int *prologue;
6742static int *epilogue;
6743
6744/* Create an array that records the INSN_UIDs of INSNS (either a sequence
6745   or a single insn).  */
6746
6747#if defined (HAVE_prologue) || defined (HAVE_epilogue)
6748static int *
6749record_insns (insns)
6750     rtx insns;
6751{
6752  int *vec;
6753
6754  if (GET_CODE (insns) == SEQUENCE)
6755    {
6756      int len = XVECLEN (insns, 0);
6757      vec = (int *) oballoc ((len + 1) * sizeof (int));
6758      vec[len] = 0;
6759      while (--len >= 0)
6760	vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6761    }
6762  else
6763    {
6764      vec = (int *) oballoc (2 * sizeof (int));
6765      vec[0] = INSN_UID (insns);
6766      vec[1] = 0;
6767    }
6768  return vec;
6769}
6770
6771/* Determine how many INSN_UIDs in VEC are part of INSN.  */
6772
6773static int
6774contains (insn, vec)
6775     rtx insn;
6776     int *vec;
6777{
6778  register int i, j;
6779
6780  if (GET_CODE (insn) == INSN
6781      && GET_CODE (PATTERN (insn)) == SEQUENCE)
6782    {
6783      int count = 0;
6784      for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6785	for (j = 0; vec[j]; j++)
6786	  if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6787	    count++;
6788      return count;
6789    }
6790  else
6791    {
6792      for (j = 0; vec[j]; j++)
6793	if (INSN_UID (insn) == vec[j])
6794	  return 1;
6795    }
6796  return 0;
6797}
6798#endif /* HAVE_prologue || HAVE_epilogue */
6799
6800/* Generate the prologue and epilogue RTL if the machine supports it.  Thread
6801   this into place with notes indicating where the prologue ends and where
6802   the epilogue begins.  Update the basic block information when possible.  */
6803
6804void
6805thread_prologue_and_epilogue_insns (f)
6806     rtx f ATTRIBUTE_UNUSED;
6807{
6808  int inserted = 0;
6809#ifdef HAVE_prologue
6810  rtx prologue_end = NULL_RTX;
6811#endif
6812
6813  prologue = 0;
6814#ifdef HAVE_prologue
6815  if (HAVE_prologue)
6816    {
6817      rtx seq;
6818
6819      start_sequence ();
6820      seq = gen_prologue();
6821      emit_insn (seq);
6822
6823      /* Retain a map of the prologue insns.  */
6824      if (GET_CODE (seq) != SEQUENCE)
6825	seq = get_insns ();
6826      prologue = record_insns (seq);
6827
6828      prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6829      seq = gen_sequence ();
6830      end_sequence ();
6831
6832      /* If optimization is off, and perhaps in an empty function,
6833	 the entry block will have no successors.  */
6834      if (ENTRY_BLOCK_PTR->succ)
6835	{
6836	  /* Can't deal with multiple successsors of the entry block.  */
6837	  if (ENTRY_BLOCK_PTR->succ->succ_next)
6838	    abort ();
6839
6840	  insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6841	  inserted = 1;
6842	}
6843      else
6844	emit_insn_after (seq, f);
6845    }
6846#endif
6847
6848  epilogue = 0;
6849#ifdef HAVE_epilogue
6850  if (HAVE_epilogue)
6851    {
6852      edge e;
6853      basic_block bb = 0;
6854      rtx tail = get_last_insn ();
6855
6856      /* ??? This is gastly.  If function returns were not done via uses,
6857	 but via mark_regs_live_at_end, we could use insert_insn_on_edge
6858	 and all of this uglyness would go away.  */
6859
6860      switch (optimize)
6861	{
6862	default:
6863	  /* If the exit block has no non-fake predecessors, we don't
6864	     need an epilogue.  Furthermore, only pay attention to the
6865	     fallthru predecessors; if (conditional) return insns were
6866	     generated, by definition we do not need to emit epilogue
6867	     insns.  */
6868
6869	  for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6870	    if ((e->flags & EDGE_FAKE) == 0
6871		&& (e->flags & EDGE_FALLTHRU) != 0)
6872	      break;
6873	  if (e == NULL)
6874	    break;
6875
6876	  /* We can't handle multiple epilogues -- if one is needed,
6877	     we won't be able to place it multiple times.
6878
6879	     ??? Fix epilogue expanders to not assume they are the
6880	     last thing done compiling the function.  Either that
6881	     or copy_rtx each insn.
6882
6883	     ??? Blah, it's not a simple expression to assert that
6884	     we've exactly one fallthru exit edge.  */
6885
6886	  bb = e->src;
6887	  tail = bb->end;
6888
6889	  /* ??? If the last insn of the basic block is a jump, then we
6890	     are creating a new basic block.  Wimp out and leave these
6891	     insns outside any block.  */
6892	  if (GET_CODE (tail) == JUMP_INSN)
6893	    bb = 0;
6894
6895	  /* FALLTHRU */
6896	case 0:
6897	  {
6898	    rtx prev, seq, first_use;
6899
6900	    /* Move the USE insns at the end of a function onto a list.  */
6901	    prev = tail;
6902	    if (GET_CODE (prev) == BARRIER
6903		|| GET_CODE (prev) == NOTE)
6904	      prev = prev_nonnote_insn (prev);
6905
6906	    first_use = 0;
6907	    if (prev
6908		&& GET_CODE (prev) == INSN
6909		&& GET_CODE (PATTERN (prev)) == USE)
6910	      {
6911		/* If the end of the block is the use, grab hold of something
6912		   else so that we emit barriers etc in the right place.  */
6913		if (prev == tail)
6914		  {
6915		    do
6916		      tail = PREV_INSN (tail);
6917		    while (GET_CODE (tail) == INSN
6918			   && GET_CODE (PATTERN (tail)) == USE);
6919		  }
6920
6921		do
6922		  {
6923		    rtx use = prev;
6924		    prev = prev_nonnote_insn (prev);
6925
6926		    remove_insn (use);
6927		    if (first_use)
6928		      {
6929			NEXT_INSN (use) = first_use;
6930			PREV_INSN (first_use) = use;
6931		      }
6932		    else
6933		      NEXT_INSN (use) = NULL_RTX;
6934		    first_use = use;
6935		  }
6936		while (prev
6937		       && GET_CODE (prev) == INSN
6938		       && GET_CODE (PATTERN (prev)) == USE);
6939	      }
6940
6941	    /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6942	       epilogue insns, the USE insns at the end of a function,
6943	       the jump insn that returns, and then a BARRIER.  */
6944
6945	    if (GET_CODE (tail) != BARRIER)
6946	      {
6947		prev = next_nonnote_insn (tail);
6948		if (!prev || GET_CODE (prev) != BARRIER)
6949		  emit_barrier_after (tail);
6950	      }
6951
6952	    seq = gen_epilogue ();
6953	    prev = tail;
6954	    tail = emit_jump_insn_after (seq, tail);
6955
6956	    /* Insert the USE insns immediately before the return insn, which
6957	       must be the last instruction emitted in the sequence.  */
6958	    if (first_use)
6959	      emit_insns_before (first_use, tail);
6960	    emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6961
6962	    /* Update the tail of the basic block.  */
6963	    if (bb)
6964	      bb->end = tail;
6965
6966	    /* Retain a map of the epilogue insns.  */
6967	    epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6968	  }
6969	}
6970    }
6971#endif
6972
6973  if (inserted)
6974    commit_edge_insertions ();
6975
6976#ifdef HAVE_prologue
6977  if (prologue_end)
6978    {
6979      rtx insn, prev;
6980
6981      /* GDB handles `break f' by setting a breakpoint on the first
6982	 line note *after* the prologue.  Which means (1) that if
6983	 there are line number notes before where we inserted the
6984	 prologue we should move them, and (2) if there is no such
6985	 note, then we should generate one at the prologue.  */
6986
6987      for (insn = prologue_end; insn ; insn = prev)
6988	{
6989	  prev = PREV_INSN (insn);
6990	  if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
6991	    {
6992	      /* Note that we cannot reorder the first insn in the
6993		 chain, since rest_of_compilation relies on that
6994		 remaining constant.  Do the next best thing.  */
6995	      if (prev == NULL)
6996		{
6997		  emit_line_note_after (NOTE_SOURCE_FILE (insn),
6998					NOTE_LINE_NUMBER (insn),
6999					prologue_end);
7000		  NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7001		}
7002	      else
7003		reorder_insns (insn, insn, prologue_end);
7004	    }
7005	}
7006
7007      insn = NEXT_INSN (prologue_end);
7008      if (! insn || GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) <= 0)
7009	{
7010	  for (insn = next_active_insn (f); insn ; insn = PREV_INSN (insn))
7011	    {
7012	      if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7013		{
7014		  emit_line_note_after (NOTE_SOURCE_FILE (insn),
7015					NOTE_LINE_NUMBER (insn),
7016					prologue_end);
7017		  break;
7018		}
7019	    }
7020	}
7021      }
7022  #endif
7023}
7024
7025/* Reposition the prologue-end and epilogue-begin notes after instruction
7026   scheduling and delayed branch scheduling.  */
7027
7028void
7029reposition_prologue_and_epilogue_notes (f)
7030     rtx f ATTRIBUTE_UNUSED;
7031{
7032#if defined (HAVE_prologue) || defined (HAVE_epilogue)
7033  /* Reposition the prologue and epilogue notes.  */
7034  if (n_basic_blocks)
7035    {
7036      int len;
7037
7038      if (prologue)
7039	{
7040	  register rtx insn, note = 0;
7041
7042	  /* Scan from the beginning until we reach the last prologue insn.
7043	     We apparently can't depend on basic_block_{head,end} after
7044	     reorg has run.  */
7045	  for (len = 0; prologue[len]; len++)
7046	    ;
7047	  for (insn = f; len && insn; insn = NEXT_INSN (insn))
7048	    {
7049	      if (GET_CODE (insn) == NOTE)
7050		{
7051		  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7052		    note = insn;
7053		}
7054	      else if ((len -= contains (insn, prologue)) == 0)
7055		{
7056		  rtx next;
7057		  /* Find the prologue-end note if we haven't already, and
7058		     move it to just after the last prologue insn.  */
7059		  if (note == 0)
7060		    {
7061		      for (note = insn; (note = NEXT_INSN (note));)
7062			if (GET_CODE (note) == NOTE
7063			    && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7064			  break;
7065		    }
7066
7067		  next = NEXT_INSN (note);
7068
7069		  /* Whether or not we can depend on BLOCK_HEAD,
7070		     attempt to keep it up-to-date.  */
7071		  if (BLOCK_HEAD (0) == note)
7072		    BLOCK_HEAD (0) = next;
7073
7074		  remove_insn (note);
7075		  add_insn_after (note, insn);
7076		}
7077	    }
7078	}
7079
7080      if (epilogue)
7081	{
7082	  register rtx insn, note = 0;
7083
7084	  /* Scan from the end until we reach the first epilogue insn.
7085	     We apparently can't depend on basic_block_{head,end} after
7086	     reorg has run.  */
7087	  for (len = 0; epilogue[len]; len++)
7088	    ;
7089	  for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
7090	    {
7091	      if (GET_CODE (insn) == NOTE)
7092		{
7093		  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7094		    note = insn;
7095		}
7096	      else if ((len -= contains (insn, epilogue)) == 0)
7097		{
7098		  /* Find the epilogue-begin note if we haven't already, and
7099		     move it to just before the first epilogue insn.  */
7100		  if (note == 0)
7101		    {
7102		      for (note = insn; (note = PREV_INSN (note));)
7103			if (GET_CODE (note) == NOTE
7104			    && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7105			  break;
7106		    }
7107
7108		  /* Whether or not we can depend on BLOCK_HEAD,
7109		     attempt to keep it up-to-date.  */
7110		  if (n_basic_blocks
7111		      && BLOCK_HEAD (n_basic_blocks-1) == insn)
7112		    BLOCK_HEAD (n_basic_blocks-1) = note;
7113
7114		  remove_insn (note);
7115		  add_insn_before (note, insn);
7116		}
7117	    }
7118	}
7119    }
7120#endif /* HAVE_prologue or HAVE_epilogue */
7121}
7122