function.c revision 52272
1/* Expands front end tree to back end RTL for GNU C-Compiler
2   Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING.  If not, write to
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA.  */
20
21
22/* This file handles the generation of rtl code from tree structure
23   at the level of the function as a whole.
24   It creates the rtl expressions for parameters and auto variables
25   and has full responsibility for allocating stack slots.
26
27   `expand_function_start' is called at the beginning of a function,
28   before the function body is parsed, and `expand_function_end' is
29   called after parsing the body.
30
31   Call `assign_stack_local' to allocate a stack slot for a local variable.
32   This is usually done during the RTL generation for the function body,
33   but it can also be done in the reload pass when a pseudo-register does
34   not get a hard register.
35
36   Call `put_var_into_stack' when you learn, belatedly, that a variable
37   previously given a pseudo-register must in fact go in the stack.
38   This function changes the DECL_RTL to be a stack slot instead of a reg
39   then scans all the RTL instructions so far generated to correct them.  */
40
41/* $FreeBSD: head/contrib/gcc/function.c 52272 1999-10-15 21:49:40Z obrien $ */
42
43#include "config.h"
44#include "system.h"
45#include "rtl.h"
46#include "tree.h"
47#include "flags.h"
48#include "except.h"
49#include "function.h"
50#include "insn-flags.h"
51#include "expr.h"
52#include "insn-codes.h"
53#include "regs.h"
54#include "hard-reg-set.h"
55#include "insn-config.h"
56#include "recog.h"
57#include "output.h"
58#include "basic-block.h"
59#include "obstack.h"
60#include "toplev.h"
61
62#ifndef TRAMPOLINE_ALIGNMENT
63#define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
64#endif
65
66/* Some systems use __main in a way incompatible with its use in gcc, in these
67   cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
68   give the same symbol without quotes for an alternative entry point.  You
69   must define both, or neither.  */
70#ifndef NAME__MAIN
71#define NAME__MAIN "__main"
72#define SYMBOL__MAIN __main
73#endif
74
75/* Round a value to the lowest integer less than it that is a multiple of
76   the required alignment.  Avoid using division in case the value is
77   negative.  Assume the alignment is a power of two.  */
78#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
79
80/* Similar, but round to the next highest integer that meets the
81   alignment.  */
82#define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
83
84/* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
85   during rtl generation.  If they are different register numbers, this is
86   always true.  It may also be true if
87   FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
88   generation.  See fix_lexical_addr for details.  */
89
90#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
91#define NEED_SEPARATE_AP
92#endif
93
94/* Number of bytes of args popped by function being compiled on its return.
95   Zero if no bytes are to be popped.
96   May affect compilation of return insn or of function epilogue.  */
97
98int current_function_pops_args;
99
100/* Nonzero if function being compiled needs to be given an address
101   where the value should be stored.  */
102
103int current_function_returns_struct;
104
105/* Nonzero if function being compiled needs to
106   return the address of where it has put a structure value.  */
107
108int current_function_returns_pcc_struct;
109
110/* Nonzero if function being compiled needs to be passed a static chain.  */
111
112int current_function_needs_context;
113
114/* Nonzero if function being compiled can call setjmp.  */
115
116int current_function_calls_setjmp;
117
118/* Nonzero if function being compiled can call longjmp.  */
119
120int current_function_calls_longjmp;
121
122/* Nonzero if function being compiled receives nonlocal gotos
123   from nested functions.  */
124
125int current_function_has_nonlocal_label;
126
127/* Nonzero if function being compiled has nonlocal gotos to parent
128   function.  */
129
130int current_function_has_nonlocal_goto;
131
132/* Nonzero if this function has a computed goto.
133
134   It is computed during find_basic_blocks or during stupid life
135   analysis.  */
136
137int current_function_has_computed_jump;
138
139/* Nonzero if function being compiled contains nested functions.  */
140
141int current_function_contains_functions;
142
143/* Nonzero if the current function is a thunk (a lightweight function that
144   just adjusts one of its arguments and forwards to another function), so
145   we should try to cut corners where we can.  */
146int current_function_is_thunk;
147
148/* Nonzero if function being compiled can call alloca,
149   either as a subroutine or builtin.  */
150
151int current_function_calls_alloca;
152
153/* Nonzero if the current function returns a pointer type */
154
155int current_function_returns_pointer;
156
157/* If some insns can be deferred to the delay slots of the epilogue, the
158   delay list for them is recorded here.  */
159
160rtx current_function_epilogue_delay_list;
161
162/* If function's args have a fixed size, this is that size, in bytes.
163   Otherwise, it is -1.
164   May affect compilation of return insn or of function epilogue.  */
165
166int current_function_args_size;
167
168/* # bytes the prologue should push and pretend that the caller pushed them.
169   The prologue must do this, but only if parms can be passed in registers.  */
170
171int current_function_pretend_args_size;
172
173/* # of bytes of outgoing arguments.  If ACCUMULATE_OUTGOING_ARGS is
174   defined, the needed space is pushed by the prologue.  */
175
176int current_function_outgoing_args_size;
177
178/* This is the offset from the arg pointer to the place where the first
179   anonymous arg can be found, if there is one.  */
180
181rtx current_function_arg_offset_rtx;
182
183/* Nonzero if current function uses varargs.h or equivalent.
184   Zero for functions that use stdarg.h.  */
185
186int current_function_varargs;
187
188/* Nonzero if current function uses stdarg.h or equivalent.
189   Zero for functions that use varargs.h.  */
190
191int current_function_stdarg;
192
193/* Quantities of various kinds of registers
194   used for the current function's args.  */
195
196CUMULATIVE_ARGS current_function_args_info;
197
198/* Name of function now being compiled.  */
199
200char *current_function_name;
201
202/* If non-zero, an RTL expression for the location at which the current
203   function returns its result.  If the current function returns its
204   result in a register, current_function_return_rtx will always be
205   the hard register containing the result.  */
206
207rtx current_function_return_rtx;
208
209/* Nonzero if the current function uses the constant pool.  */
210
211int current_function_uses_const_pool;
212
213/* Nonzero if the current function uses pic_offset_table_rtx.  */
214int current_function_uses_pic_offset_table;
215
216/* The arg pointer hard register, or the pseudo into which it was copied.  */
217rtx current_function_internal_arg_pointer;
218
219/* Language-specific reason why the current function cannot be made inline.  */
220char *current_function_cannot_inline;
221
222/* The FUNCTION_DECL for an inline function currently being expanded.  */
223tree inline_function_decl;
224
225/* Number of function calls seen so far in current function.  */
226
227int function_call_count;
228
229/* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
230   (labels to which there can be nonlocal gotos from nested functions)
231   in this function.  */
232
233tree nonlocal_labels;
234
235/* RTX for stack slot that holds the current handler for nonlocal gotos.
236   Zero when function does not have nonlocal labels.  */
237
238rtx nonlocal_goto_handler_slot;
239
240/* RTX for stack slot that holds the stack pointer value to restore
241   for a nonlocal goto.
242   Zero when function does not have nonlocal labels.  */
243
244rtx nonlocal_goto_stack_level;
245
246/* Label that will go on parm cleanup code, if any.
247   Jumping to this label runs cleanup code for parameters, if
248   such code must be run.  Following this code is the logical return label.  */
249
250rtx cleanup_label;
251
252/* Label that will go on function epilogue.
253   Jumping to this label serves as a "return" instruction
254   on machines which require execution of the epilogue on all returns.  */
255
256rtx return_label;
257
258/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
259   So we can mark them all live at the end of the function, if nonopt.  */
260rtx save_expr_regs;
261
262/* List (chain of EXPR_LISTs) of all stack slots in this function.
263   Made for the sake of unshare_all_rtl.  */
264rtx stack_slot_list;
265
266/* Chain of all RTL_EXPRs that have insns in them.  */
267tree rtl_expr_chain;
268
269/* Label to jump back to for tail recursion, or 0 if we have
270   not yet needed one for this function.  */
271rtx tail_recursion_label;
272
273/* Place after which to insert the tail_recursion_label if we need one.  */
274rtx tail_recursion_reentry;
275
276/* Location at which to save the argument pointer if it will need to be
277   referenced.  There are two cases where this is done: if nonlocal gotos
278   exist, or if vars stored at an offset from the argument pointer will be
279   needed by inner routines.  */
280
281rtx arg_pointer_save_area;
282
283/* Offset to end of allocated area of stack frame.
284   If stack grows down, this is the address of the last stack slot allocated.
285   If stack grows up, this is the address for the next slot.  */
286HOST_WIDE_INT frame_offset;
287
288/* List (chain of TREE_LISTs) of static chains for containing functions.
289   Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
290   in an RTL_EXPR in the TREE_VALUE.  */
291static tree context_display;
292
293/* List (chain of TREE_LISTs) of trampolines for nested functions.
294   The trampoline sets up the static chain and jumps to the function.
295   We supply the trampoline's address when the function's address is requested.
296
297   Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
298   in an RTL_EXPR in the TREE_VALUE.  */
299static tree trampoline_list;
300
301/* Insn after which register parms and SAVE_EXPRs are born, if nonopt.  */
302static rtx parm_birth_insn;
303
304#if 0
305/* Nonzero if a stack slot has been generated whose address is not
306   actually valid.  It means that the generated rtl must all be scanned
307   to detect and correct the invalid addresses where they occur.  */
308static int invalid_stack_slot;
309#endif
310
311/* Last insn of those whose job was to put parms into their nominal homes.  */
312static rtx last_parm_insn;
313
314/* 1 + last pseudo register number possibly used for loading a copy
315   of a parameter of this function. */
316int max_parm_reg;
317
318/* Vector indexed by REGNO, containing location on stack in which
319   to put the parm which is nominally in pseudo register REGNO,
320   if we discover that that parm must go in the stack.  The highest
321   element in this vector is one less than MAX_PARM_REG, above.  */
322rtx *parm_reg_stack_loc;
323
324/* Nonzero once virtual register instantiation has been done.
325   assign_stack_local uses frame_pointer_rtx when this is nonzero.  */
326static int virtuals_instantiated;
327
328/* These variables hold pointers to functions to
329   save and restore machine-specific data,
330   in push_function_context and pop_function_context.  */
331void (*save_machine_status) PROTO((struct function *));
332void (*restore_machine_status) PROTO((struct function *));
333
334/* Nonzero if we need to distinguish between the return value of this function
335   and the return value of a function called by this function.  This helps
336   integrate.c  */
337
338extern int rtx_equal_function_value_matters;
339extern tree sequence_rtl_expr;
340
341/* In order to evaluate some expressions, such as function calls returning
342   structures in memory, we need to temporarily allocate stack locations.
343   We record each allocated temporary in the following structure.
344
345   Associated with each temporary slot is a nesting level.  When we pop up
346   one level, all temporaries associated with the previous level are freed.
347   Normally, all temporaries are freed after the execution of the statement
348   in which they were created.  However, if we are inside a ({...}) grouping,
349   the result may be in a temporary and hence must be preserved.  If the
350   result could be in a temporary, we preserve it if we can determine which
351   one it is in.  If we cannot determine which temporary may contain the
352   result, all temporaries are preserved.  A temporary is preserved by
353   pretending it was allocated at the previous nesting level.
354
355   Automatic variables are also assigned temporary slots, at the nesting
356   level where they are defined.  They are marked a "kept" so that
357   free_temp_slots will not free them.  */
358
359struct temp_slot
360{
361  /* Points to next temporary slot.  */
362  struct temp_slot *next;
363  /* The rtx to used to reference the slot.  */
364  rtx slot;
365  /* The rtx used to represent the address if not the address of the
366     slot above.  May be an EXPR_LIST if multiple addresses exist.  */
367  rtx address;
368  /* The size, in units, of the slot.  */
369  HOST_WIDE_INT size;
370  /* The value of `sequence_rtl_expr' when this temporary is allocated.  */
371  tree rtl_expr;
372  /* Non-zero if this temporary is currently in use.  */
373  char in_use;
374  /* Non-zero if this temporary has its address taken.  */
375  char addr_taken;
376  /* Nesting level at which this slot is being used.  */
377  int level;
378  /* Non-zero if this should survive a call to free_temp_slots.  */
379  int keep;
380  /* The offset of the slot from the frame_pointer, including extra space
381     for alignment.  This info is for combine_temp_slots.  */
382  HOST_WIDE_INT base_offset;
383  /* The size of the slot, including extra space for alignment.  This
384     info is for combine_temp_slots.  */
385  HOST_WIDE_INT full_size;
386};
387
388/* List of all temporaries allocated, both available and in use.  */
389
390struct temp_slot *temp_slots;
391
392/* Current nesting level for temporaries.  */
393
394int temp_slot_level;
395
396/* Current nesting level for variables in a block.  */
397
398int var_temp_slot_level;
399
400/* When temporaries are created by TARGET_EXPRs, they are created at
401   this level of temp_slot_level, so that they can remain allocated
402   until no longer needed.  CLEANUP_POINT_EXPRs define the lifetime
403   of TARGET_EXPRs.  */
404int target_temp_slot_level;
405
406/* This structure is used to record MEMs or pseudos used to replace VAR, any
407   SUBREGs of VAR, and any MEMs containing VAR as an address.  We need to
408   maintain this list in case two operands of an insn were required to match;
409   in that case we must ensure we use the same replacement.  */
410
411struct fixup_replacement
412{
413  rtx old;
414  rtx new;
415  struct fixup_replacement *next;
416};
417
418/* Forward declarations.  */
419
420static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
421					    int, struct function *));
422static struct temp_slot *find_temp_slot_from_address  PROTO((rtx));
423static void put_reg_into_stack	PROTO((struct function *, rtx, tree,
424				       enum machine_mode, enum machine_mode,
425				       int, int, int));
426static void fixup_var_refs	PROTO((rtx, enum machine_mode, int));
427static struct fixup_replacement
428  *find_fixup_replacement	PROTO((struct fixup_replacement **, rtx));
429static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
430					rtx, int));
431static void fixup_var_refs_1	PROTO((rtx, enum machine_mode, rtx *, rtx,
432				       struct fixup_replacement **));
433static rtx fixup_memory_subreg	PROTO((rtx, rtx, int));
434static rtx walk_fixup_memory_subreg  PROTO((rtx, rtx, int));
435static rtx fixup_stack_1	PROTO((rtx, rtx));
436static void optimize_bit_field	PROTO((rtx, rtx, rtx *));
437static void instantiate_decls	PROTO((tree, int));
438static void instantiate_decls_1	PROTO((tree, int));
439static void instantiate_decl	PROTO((rtx, int, int));
440static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
441static void delete_handlers	PROTO((void));
442static void pad_to_arg_alignment PROTO((struct args_size *, int));
443#ifndef ARGS_GROW_DOWNWARD
444static void pad_below		PROTO((struct args_size *, enum  machine_mode,
445				       tree));
446#endif
447#ifdef ARGS_GROW_DOWNWARD
448static tree round_down		PROTO((tree, int));
449#endif
450static rtx round_trampoline_addr PROTO((rtx));
451static tree blocks_nreverse	PROTO((tree));
452static int all_blocks		PROTO((tree, tree *));
453#if defined (HAVE_prologue) || defined (HAVE_epilogue)
454static int *record_insns	PROTO((rtx));
455static int contains		PROTO((rtx, int *));
456#endif /* HAVE_prologue || HAVE_epilogue */
457static void put_addressof_into_stack PROTO((rtx));
458static void purge_addressof_1	PROTO((rtx *, rtx, int));
459
460/* Pointer to chain of `struct function' for containing functions.  */
461struct function *outer_function_chain;
462
463/* Given a function decl for a containing function,
464   return the `struct function' for it.  */
465
466struct function *
467find_function_data (decl)
468     tree decl;
469{
470  struct function *p;
471
472  for (p = outer_function_chain; p; p = p->next)
473    if (p->decl == decl)
474      return p;
475
476  abort ();
477}
478
479/* Save the current context for compilation of a nested function.
480   This is called from language-specific code.
481   The caller is responsible for saving any language-specific status,
482   since this function knows only about language-independent variables.  */
483
484void
485push_function_context_to (context)
486     tree context;
487{
488  struct function *p = (struct function *) xmalloc (sizeof (struct function));
489
490  p->next = outer_function_chain;
491  outer_function_chain = p;
492
493  p->name = current_function_name;
494  p->decl = current_function_decl;
495  p->pops_args = current_function_pops_args;
496  p->returns_struct = current_function_returns_struct;
497  p->returns_pcc_struct = current_function_returns_pcc_struct;
498  p->returns_pointer = current_function_returns_pointer;
499  p->needs_context = current_function_needs_context;
500  p->calls_setjmp = current_function_calls_setjmp;
501  p->calls_longjmp = current_function_calls_longjmp;
502  p->calls_alloca = current_function_calls_alloca;
503  p->has_nonlocal_label = current_function_has_nonlocal_label;
504  p->has_nonlocal_goto = current_function_has_nonlocal_goto;
505  p->contains_functions = current_function_contains_functions;
506  p->is_thunk = current_function_is_thunk;
507  p->args_size = current_function_args_size;
508  p->pretend_args_size = current_function_pretend_args_size;
509  p->arg_offset_rtx = current_function_arg_offset_rtx;
510  p->varargs = current_function_varargs;
511  p->stdarg = current_function_stdarg;
512  p->uses_const_pool = current_function_uses_const_pool;
513  p->uses_pic_offset_table = current_function_uses_pic_offset_table;
514  p->internal_arg_pointer = current_function_internal_arg_pointer;
515  p->cannot_inline = current_function_cannot_inline;
516  p->max_parm_reg = max_parm_reg;
517  p->parm_reg_stack_loc = parm_reg_stack_loc;
518  p->outgoing_args_size = current_function_outgoing_args_size;
519  p->return_rtx = current_function_return_rtx;
520  p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
521  p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
522  p->nonlocal_labels = nonlocal_labels;
523  p->cleanup_label = cleanup_label;
524  p->return_label = return_label;
525  p->save_expr_regs = save_expr_regs;
526  p->stack_slot_list = stack_slot_list;
527  p->parm_birth_insn = parm_birth_insn;
528  p->frame_offset = frame_offset;
529  p->tail_recursion_label = tail_recursion_label;
530  p->tail_recursion_reentry = tail_recursion_reentry;
531  p->arg_pointer_save_area = arg_pointer_save_area;
532  p->rtl_expr_chain = rtl_expr_chain;
533  p->last_parm_insn = last_parm_insn;
534  p->context_display = context_display;
535  p->trampoline_list = trampoline_list;
536  p->function_call_count = function_call_count;
537  p->temp_slots = temp_slots;
538  p->temp_slot_level = temp_slot_level;
539  p->target_temp_slot_level = target_temp_slot_level;
540  p->var_temp_slot_level = var_temp_slot_level;
541  p->fixup_var_refs_queue = 0;
542  p->epilogue_delay_list = current_function_epilogue_delay_list;
543  p->args_info = current_function_args_info;
544
545  save_tree_status (p, context);
546  save_storage_status (p);
547  save_emit_status (p);
548  save_expr_status (p);
549  save_stmt_status (p);
550  save_varasm_status (p, context);
551  if (save_machine_status)
552    (*save_machine_status) (p);
553}
554
555void
556push_function_context ()
557{
558  push_function_context_to (current_function_decl);
559}
560
561/* Restore the last saved context, at the end of a nested function.
562   This function is called from language-specific code.  */
563
564void
565pop_function_context_from (context)
566     tree context;
567{
568  struct function *p = outer_function_chain;
569  struct var_refs_queue *queue;
570
571  outer_function_chain = p->next;
572
573  current_function_contains_functions
574    = p->contains_functions || p->inline_obstacks
575      || context == current_function_decl;
576  current_function_name = p->name;
577  current_function_decl = p->decl;
578  current_function_pops_args = p->pops_args;
579  current_function_returns_struct = p->returns_struct;
580  current_function_returns_pcc_struct = p->returns_pcc_struct;
581  current_function_returns_pointer = p->returns_pointer;
582  current_function_needs_context = p->needs_context;
583  current_function_calls_setjmp = p->calls_setjmp;
584  current_function_calls_longjmp = p->calls_longjmp;
585  current_function_calls_alloca = p->calls_alloca;
586  current_function_has_nonlocal_label = p->has_nonlocal_label;
587  current_function_has_nonlocal_goto = p->has_nonlocal_goto;
588  current_function_is_thunk = p->is_thunk;
589  current_function_args_size = p->args_size;
590  current_function_pretend_args_size = p->pretend_args_size;
591  current_function_arg_offset_rtx = p->arg_offset_rtx;
592  current_function_varargs = p->varargs;
593  current_function_stdarg = p->stdarg;
594  current_function_uses_const_pool = p->uses_const_pool;
595  current_function_uses_pic_offset_table = p->uses_pic_offset_table;
596  current_function_internal_arg_pointer = p->internal_arg_pointer;
597  current_function_cannot_inline = p->cannot_inline;
598  max_parm_reg = p->max_parm_reg;
599  parm_reg_stack_loc = p->parm_reg_stack_loc;
600  current_function_outgoing_args_size = p->outgoing_args_size;
601  current_function_return_rtx = p->return_rtx;
602  nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
603  nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
604  nonlocal_labels = p->nonlocal_labels;
605  cleanup_label = p->cleanup_label;
606  return_label = p->return_label;
607  save_expr_regs = p->save_expr_regs;
608  stack_slot_list = p->stack_slot_list;
609  parm_birth_insn = p->parm_birth_insn;
610  frame_offset = p->frame_offset;
611  tail_recursion_label = p->tail_recursion_label;
612  tail_recursion_reentry = p->tail_recursion_reentry;
613  arg_pointer_save_area = p->arg_pointer_save_area;
614  rtl_expr_chain = p->rtl_expr_chain;
615  last_parm_insn = p->last_parm_insn;
616  context_display = p->context_display;
617  trampoline_list = p->trampoline_list;
618  function_call_count = p->function_call_count;
619  temp_slots = p->temp_slots;
620  temp_slot_level = p->temp_slot_level;
621  target_temp_slot_level = p->target_temp_slot_level;
622  var_temp_slot_level = p->var_temp_slot_level;
623  current_function_epilogue_delay_list = p->epilogue_delay_list;
624  reg_renumber = 0;
625  current_function_args_info = p->args_info;
626
627  restore_tree_status (p, context);
628  restore_storage_status (p);
629  restore_expr_status (p);
630  restore_emit_status (p);
631  restore_stmt_status (p);
632  restore_varasm_status (p);
633
634  if (restore_machine_status)
635    (*restore_machine_status) (p);
636
637  /* Finish doing put_var_into_stack for any of our variables
638     which became addressable during the nested function.  */
639  for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
640    fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
641
642  free (p);
643
644  /* Reset variables that have known state during rtx generation.  */
645  rtx_equal_function_value_matters = 1;
646  virtuals_instantiated = 0;
647}
648
649void pop_function_context ()
650{
651  pop_function_context_from (current_function_decl);
652}
653
654/* Allocate fixed slots in the stack frame of the current function.  */
655
656/* Return size needed for stack frame based on slots so far allocated.
657   This size counts from zero.  It is not rounded to STACK_BOUNDARY;
658   the caller may have to do that.  */
659
660HOST_WIDE_INT
661get_frame_size ()
662{
663#ifdef FRAME_GROWS_DOWNWARD
664  return -frame_offset;
665#else
666  return frame_offset;
667#endif
668}
669
670/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
671   with machine mode MODE.
672
673   ALIGN controls the amount of alignment for the address of the slot:
674   0 means according to MODE,
675   -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
676   positive specifies alignment boundary in bits.
677
678   We do not round to stack_boundary here.  */
679
680rtx
681assign_stack_local (mode, size, align)
682     enum machine_mode mode;
683     HOST_WIDE_INT size;
684     int align;
685{
686  register rtx x, addr;
687  int bigend_correction = 0;
688  int alignment;
689
690  if (align == 0)
691    {
692      alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
693      if (mode == BLKmode)
694	alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
695    }
696  else if (align == -1)
697    {
698      alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
699      size = CEIL_ROUND (size, alignment);
700    }
701  else
702    alignment = align / BITS_PER_UNIT;
703
704  /* Round frame offset to that alignment.
705     We must be careful here, since FRAME_OFFSET might be negative and
706     division with a negative dividend isn't as well defined as we might
707     like.  So we instead assume that ALIGNMENT is a power of two and
708     use logical operations which are unambiguous.  */
709#ifdef FRAME_GROWS_DOWNWARD
710  frame_offset = FLOOR_ROUND (frame_offset, alignment);
711#else
712  frame_offset = CEIL_ROUND (frame_offset, alignment);
713#endif
714
715  /* On a big-endian machine, if we are allocating more space than we will use,
716     use the least significant bytes of those that are allocated.  */
717  if (BYTES_BIG_ENDIAN && mode != BLKmode)
718    bigend_correction = size - GET_MODE_SIZE (mode);
719
720#ifdef FRAME_GROWS_DOWNWARD
721  frame_offset -= size;
722#endif
723
724  /* If we have already instantiated virtual registers, return the actual
725     address relative to the frame pointer.  */
726  if (virtuals_instantiated)
727    addr = plus_constant (frame_pointer_rtx,
728			  (frame_offset + bigend_correction
729			   + STARTING_FRAME_OFFSET));
730  else
731    addr = plus_constant (virtual_stack_vars_rtx,
732			  frame_offset + bigend_correction);
733
734#ifndef FRAME_GROWS_DOWNWARD
735  frame_offset += size;
736#endif
737
738  x = gen_rtx_MEM (mode, addr);
739
740  stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
741
742  return x;
743}
744
745/* Assign a stack slot in a containing function.
746   First three arguments are same as in preceding function.
747   The last argument specifies the function to allocate in.  */
748
749static rtx
750assign_outer_stack_local (mode, size, align, function)
751     enum machine_mode mode;
752     HOST_WIDE_INT size;
753     int align;
754     struct function *function;
755{
756  register rtx x, addr;
757  int bigend_correction = 0;
758  int alignment;
759
760  /* Allocate in the memory associated with the function in whose frame
761     we are assigning.  */
762  push_obstacks (function->function_obstack,
763		 function->function_maybepermanent_obstack);
764
765  if (align == 0)
766    {
767      alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
768      if (mode == BLKmode)
769	alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
770    }
771  else if (align == -1)
772    {
773      alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
774      size = CEIL_ROUND (size, alignment);
775    }
776  else
777    alignment = align / BITS_PER_UNIT;
778
779  /* Round frame offset to that alignment.  */
780#ifdef FRAME_GROWS_DOWNWARD
781  function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
782#else
783  function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
784#endif
785
786  /* On a big-endian machine, if we are allocating more space than we will use,
787     use the least significant bytes of those that are allocated.  */
788  if (BYTES_BIG_ENDIAN && mode != BLKmode)
789    bigend_correction = size - GET_MODE_SIZE (mode);
790
791#ifdef FRAME_GROWS_DOWNWARD
792  function->frame_offset -= size;
793#endif
794  addr = plus_constant (virtual_stack_vars_rtx,
795			function->frame_offset + bigend_correction);
796#ifndef FRAME_GROWS_DOWNWARD
797  function->frame_offset += size;
798#endif
799
800  x = gen_rtx_MEM (mode, addr);
801
802  function->stack_slot_list
803    = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
804
805  pop_obstacks ();
806
807  return x;
808}
809
810/* Allocate a temporary stack slot and record it for possible later
811   reuse.
812
813   MODE is the machine mode to be given to the returned rtx.
814
815   SIZE is the size in units of the space required.  We do no rounding here
816   since assign_stack_local will do any required rounding.
817
818   KEEP is 1 if this slot is to be retained after a call to
819   free_temp_slots.  Automatic variables for a block are allocated
820   with this flag.  KEEP is 2 if we allocate a longer term temporary,
821   whose lifetime is controlled by CLEANUP_POINT_EXPRs.  KEEP is 3
822   if we are to allocate something at an inner level to be treated as
823   a variable in the block (e.g., a SAVE_EXPR).  */
824
825rtx
826assign_stack_temp (mode, size, keep)
827     enum machine_mode mode;
828     HOST_WIDE_INT size;
829     int keep;
830{
831  struct temp_slot *p, *best_p = 0;
832
833  /* If SIZE is -1 it means that somebody tried to allocate a temporary
834     of a variable size.  */
835  if (size == -1)
836    abort ();
837
838  /* First try to find an available, already-allocated temporary that is the
839     exact size we require.  */
840  for (p = temp_slots; p; p = p->next)
841    if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
842      break;
843
844  /* If we didn't find, one, try one that is larger than what we want.  We
845     find the smallest such.  */
846  if (p == 0)
847    for (p = temp_slots; p; p = p->next)
848      if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
849	  && (best_p == 0 || best_p->size > p->size))
850	best_p = p;
851
852  /* Make our best, if any, the one to use.  */
853  if (best_p)
854    {
855      /* If there are enough aligned bytes left over, make them into a new
856	 temp_slot so that the extra bytes don't get wasted.  Do this only
857	 for BLKmode slots, so that we can be sure of the alignment.  */
858      if (GET_MODE (best_p->slot) == BLKmode)
859	{
860	  int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
861	  HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
862
863	  if (best_p->size - rounded_size >= alignment)
864	    {
865	      p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
866	      p->in_use = p->addr_taken = 0;
867	      p->size = best_p->size - rounded_size;
868	      p->base_offset = best_p->base_offset + rounded_size;
869	      p->full_size = best_p->full_size - rounded_size;
870	      p->slot = gen_rtx_MEM (BLKmode,
871				     plus_constant (XEXP (best_p->slot, 0),
872						    rounded_size));
873	      p->address = 0;
874	      p->rtl_expr = 0;
875	      p->next = temp_slots;
876	      temp_slots = p;
877
878	      stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
879						   stack_slot_list);
880
881	      best_p->size = rounded_size;
882	      best_p->full_size = rounded_size;
883	    }
884	}
885
886      p = best_p;
887    }
888
889  /* If we still didn't find one, make a new temporary.  */
890  if (p == 0)
891    {
892      HOST_WIDE_INT frame_offset_old = frame_offset;
893
894      p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
895
896      /* If the temp slot mode doesn't indicate the alignment,
897	 use the largest possible, so no one will be disappointed.  */
898      p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
899
900      /* The following slot size computation is necessary because we don't
901	 know the actual size of the temporary slot until assign_stack_local
902	 has performed all the frame alignment and size rounding for the
903	 requested temporary.  Note that extra space added for alignment
904	 can be either above or below this stack slot depending on which
905	 way the frame grows.  We include the extra space if and only if it
906	 is above this slot.  */
907#ifdef FRAME_GROWS_DOWNWARD
908      p->size = frame_offset_old - frame_offset;
909#else
910      p->size = size;
911#endif
912
913      /* Now define the fields used by combine_temp_slots.  */
914#ifdef FRAME_GROWS_DOWNWARD
915      p->base_offset = frame_offset;
916      p->full_size = frame_offset_old - frame_offset;
917#else
918      p->base_offset = frame_offset_old;
919      p->full_size = frame_offset - frame_offset_old;
920#endif
921      p->address = 0;
922      p->next = temp_slots;
923      temp_slots = p;
924    }
925
926  p->in_use = 1;
927  p->addr_taken = 0;
928  p->rtl_expr = sequence_rtl_expr;
929
930  if (keep == 2)
931    {
932      p->level = target_temp_slot_level;
933      p->keep = 0;
934    }
935  else if (keep == 3)
936    {
937      p->level = var_temp_slot_level;
938      p->keep = 0;
939    }
940  else
941    {
942      p->level = temp_slot_level;
943      p->keep = keep;
944    }
945
946  /* We may be reusing an old slot, so clear any MEM flags that may have been
947     set from before.  */
948  RTX_UNCHANGING_P (p->slot) = 0;
949  MEM_IN_STRUCT_P (p->slot) = 0;
950  return p->slot;
951}
952
953/* Assign a temporary of given TYPE.
954   KEEP is as for assign_stack_temp.
955   MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
956   it is 0 if a register is OK.
957   DONT_PROMOTE is 1 if we should not promote values in register
958   to wider modes.  */
959
960rtx
961assign_temp (type, keep, memory_required, dont_promote)
962     tree type;
963     int keep;
964     int memory_required;
965     int dont_promote;
966{
967  enum machine_mode mode = TYPE_MODE (type);
968  int unsignedp = TREE_UNSIGNED (type);
969
970  if (mode == BLKmode || memory_required)
971    {
972      HOST_WIDE_INT size = int_size_in_bytes (type);
973      rtx tmp;
974
975      /* Unfortunately, we don't yet know how to allocate variable-sized
976	 temporaries.  However, sometimes we have a fixed upper limit on
977	 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
978	 instead.  This is the case for Chill variable-sized strings.  */
979      if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
980	  && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
981	  && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
982	size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
983
984      tmp = assign_stack_temp (mode, size, keep);
985      MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
986      return tmp;
987    }
988
989#ifndef PROMOTE_FOR_CALL_ONLY
990  if (! dont_promote)
991    mode = promote_mode (type, mode, &unsignedp, 0);
992#endif
993
994  return gen_reg_rtx (mode);
995}
996
997/* Combine temporary stack slots which are adjacent on the stack.
998
999   This allows for better use of already allocated stack space.  This is only
1000   done for BLKmode slots because we can be sure that we won't have alignment
1001   problems in this case.  */
1002
1003void
1004combine_temp_slots ()
1005{
1006  struct temp_slot *p, *q;
1007  struct temp_slot *prev_p, *prev_q;
1008  int num_slots;
1009
1010  /* If there are a lot of temp slots, don't do anything unless
1011     high levels of optimizaton.  */
1012  if (! flag_expensive_optimizations)
1013    for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1014      if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1015	return;
1016
1017  for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1018    {
1019      int delete_p = 0;
1020
1021      if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1022	for (q = p->next, prev_q = p; q; q = prev_q->next)
1023	  {
1024	    int delete_q = 0;
1025	    if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1026	      {
1027		if (p->base_offset + p->full_size == q->base_offset)
1028		  {
1029		    /* Q comes after P; combine Q into P.  */
1030		    p->size += q->size;
1031		    p->full_size += q->full_size;
1032		    delete_q = 1;
1033		  }
1034		else if (q->base_offset + q->full_size == p->base_offset)
1035		  {
1036		    /* P comes after Q; combine P into Q.  */
1037		    q->size += p->size;
1038		    q->full_size += p->full_size;
1039		    delete_p = 1;
1040		    break;
1041		  }
1042	      }
1043	    /* Either delete Q or advance past it.  */
1044	    if (delete_q)
1045	      prev_q->next = q->next;
1046	    else
1047	      prev_q = q;
1048	  }
1049      /* Either delete P or advance past it.  */
1050      if (delete_p)
1051	{
1052	  if (prev_p)
1053	    prev_p->next = p->next;
1054	  else
1055	    temp_slots = p->next;
1056	}
1057      else
1058	prev_p = p;
1059    }
1060}
1061
1062/* Find the temp slot corresponding to the object at address X.  */
1063
1064static struct temp_slot *
1065find_temp_slot_from_address (x)
1066     rtx x;
1067{
1068  struct temp_slot *p;
1069  rtx next;
1070
1071  for (p = temp_slots; p; p = p->next)
1072    {
1073      if (! p->in_use)
1074	continue;
1075
1076      else if (XEXP (p->slot, 0) == x
1077	       || p->address == x
1078	       || (GET_CODE (x) == PLUS
1079		   && XEXP (x, 0) == virtual_stack_vars_rtx
1080		   && GET_CODE (XEXP (x, 1)) == CONST_INT
1081		   && INTVAL (XEXP (x, 1)) >= p->base_offset
1082		   && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1083	return p;
1084
1085      else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1086	for (next = p->address; next; next = XEXP (next, 1))
1087	  if (XEXP (next, 0) == x)
1088	    return p;
1089    }
1090
1091  return 0;
1092}
1093
1094/* Indicate that NEW is an alternate way of referring to the temp slot
1095   that previously was known by OLD.  */
1096
1097void
1098update_temp_slot_address (old, new)
1099     rtx old, new;
1100{
1101  struct temp_slot *p = find_temp_slot_from_address (old);
1102
1103  /* If none, return.  Else add NEW as an alias.  */
1104  if (p == 0)
1105    return;
1106  else if (p->address == 0)
1107    p->address = new;
1108  else
1109    {
1110      if (GET_CODE (p->address) != EXPR_LIST)
1111	p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1112
1113      p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1114    }
1115}
1116
1117/* If X could be a reference to a temporary slot, mark the fact that its
1118   address was taken.  */
1119
1120void
1121mark_temp_addr_taken (x)
1122     rtx x;
1123{
1124  struct temp_slot *p;
1125
1126  if (x == 0)
1127    return;
1128
1129  /* If X is not in memory or is at a constant address, it cannot be in
1130     a temporary slot.  */
1131  if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1132    return;
1133
1134  p = find_temp_slot_from_address (XEXP (x, 0));
1135  if (p != 0)
1136    p->addr_taken = 1;
1137}
1138
1139/* If X could be a reference to a temporary slot, mark that slot as
1140   belonging to the to one level higher than the current level.  If X
1141   matched one of our slots, just mark that one.  Otherwise, we can't
1142   easily predict which it is, so upgrade all of them.  Kept slots
1143   need not be touched.
1144
1145   This is called when an ({...}) construct occurs and a statement
1146   returns a value in memory.  */
1147
1148void
1149preserve_temp_slots (x)
1150     rtx x;
1151{
1152  struct temp_slot *p = 0;
1153
1154  /* If there is no result, we still might have some objects whose address
1155     were taken, so we need to make sure they stay around.  */
1156  if (x == 0)
1157    {
1158      for (p = temp_slots; p; p = p->next)
1159	if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1160	  p->level--;
1161
1162      return;
1163    }
1164
1165  /* If X is a register that is being used as a pointer, see if we have
1166     a temporary slot we know it points to.  To be consistent with
1167     the code below, we really should preserve all non-kept slots
1168     if we can't find a match, but that seems to be much too costly.  */
1169  if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1170    p = find_temp_slot_from_address (x);
1171
1172  /* If X is not in memory or is at a constant address, it cannot be in
1173     a temporary slot, but it can contain something whose address was
1174     taken.  */
1175  if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1176    {
1177      for (p = temp_slots; p; p = p->next)
1178	if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1179	  p->level--;
1180
1181      return;
1182    }
1183
1184  /* First see if we can find a match.  */
1185  if (p == 0)
1186    p = find_temp_slot_from_address (XEXP (x, 0));
1187
1188  if (p != 0)
1189    {
1190      /* Move everything at our level whose address was taken to our new
1191	 level in case we used its address.  */
1192      struct temp_slot *q;
1193
1194      if (p->level == temp_slot_level)
1195	{
1196	  for (q = temp_slots; q; q = q->next)
1197	    if (q != p && q->addr_taken && q->level == p->level)
1198	      q->level--;
1199
1200	  p->level--;
1201	  p->addr_taken = 0;
1202	}
1203      return;
1204    }
1205
1206  /* Otherwise, preserve all non-kept slots at this level.  */
1207  for (p = temp_slots; p; p = p->next)
1208    if (p->in_use && p->level == temp_slot_level && ! p->keep)
1209      p->level--;
1210}
1211
1212/* X is the result of an RTL_EXPR.  If it is a temporary slot associated
1213   with that RTL_EXPR, promote it into a temporary slot at the present
1214   level so it will not be freed when we free slots made in the
1215   RTL_EXPR.  */
1216
1217void
1218preserve_rtl_expr_result (x)
1219     rtx x;
1220{
1221  struct temp_slot *p;
1222
1223  /* If X is not in memory or is at a constant address, it cannot be in
1224     a temporary slot.  */
1225  if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1226    return;
1227
1228  /* If we can find a match, move it to our level unless it is already at
1229     an upper level.  */
1230  p = find_temp_slot_from_address (XEXP (x, 0));
1231  if (p != 0)
1232    {
1233      p->level = MIN (p->level, temp_slot_level);
1234      p->rtl_expr = 0;
1235    }
1236
1237  return;
1238}
1239
1240/* Free all temporaries used so far.  This is normally called at the end
1241   of generating code for a statement.  Don't free any temporaries
1242   currently in use for an RTL_EXPR that hasn't yet been emitted.
1243   We could eventually do better than this since it can be reused while
1244   generating the same RTL_EXPR, but this is complex and probably not
1245   worthwhile.  */
1246
1247void
1248free_temp_slots ()
1249{
1250  struct temp_slot *p;
1251
1252  for (p = temp_slots; p; p = p->next)
1253    if (p->in_use && p->level == temp_slot_level && ! p->keep
1254	&& p->rtl_expr == 0)
1255      p->in_use = 0;
1256
1257  combine_temp_slots ();
1258}
1259
1260/* Free all temporary slots used in T, an RTL_EXPR node.  */
1261
1262void
1263free_temps_for_rtl_expr (t)
1264     tree t;
1265{
1266  struct temp_slot *p;
1267
1268  for (p = temp_slots; p; p = p->next)
1269    if (p->rtl_expr == t)
1270      p->in_use = 0;
1271
1272  combine_temp_slots ();
1273}
1274
1275/* Mark all temporaries ever allocated in this function as not suitable
1276   for reuse until the current level is exited.  */
1277
1278void
1279mark_all_temps_used ()
1280{
1281  struct temp_slot *p;
1282
1283  for (p = temp_slots; p; p = p->next)
1284    {
1285      p->in_use = p->keep = 1;
1286      p->level = MIN (p->level, temp_slot_level);
1287    }
1288}
1289
1290/* Push deeper into the nesting level for stack temporaries.  */
1291
1292void
1293push_temp_slots ()
1294{
1295  temp_slot_level++;
1296}
1297
1298/* Likewise, but save the new level as the place to allocate variables
1299   for blocks.  */
1300
1301void
1302push_temp_slots_for_block ()
1303{
1304  push_temp_slots ();
1305
1306  var_temp_slot_level = temp_slot_level;
1307}
1308
1309/* Likewise, but save the new level as the place to allocate temporaries
1310   for TARGET_EXPRs.  */
1311
1312void
1313push_temp_slots_for_target ()
1314{
1315  push_temp_slots ();
1316
1317  target_temp_slot_level = temp_slot_level;
1318}
1319
1320/* Set and get the value of target_temp_slot_level.  The only
1321   permitted use of these functions is to save and restore this value.  */
1322
1323int
1324get_target_temp_slot_level ()
1325{
1326  return target_temp_slot_level;
1327}
1328
1329void
1330set_target_temp_slot_level (level)
1331     int level;
1332{
1333  target_temp_slot_level = level;
1334}
1335
1336/* Pop a temporary nesting level.  All slots in use in the current level
1337   are freed.  */
1338
1339void
1340pop_temp_slots ()
1341{
1342  struct temp_slot *p;
1343
1344  for (p = temp_slots; p; p = p->next)
1345    if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1346      p->in_use = 0;
1347
1348  combine_temp_slots ();
1349
1350  temp_slot_level--;
1351}
1352
1353/* Initialize temporary slots.  */
1354
1355void
1356init_temp_slots ()
1357{
1358  /* We have not allocated any temporaries yet.  */
1359  temp_slots = 0;
1360  temp_slot_level = 0;
1361  var_temp_slot_level = 0;
1362  target_temp_slot_level = 0;
1363}
1364
1365/* Retroactively move an auto variable from a register to a stack slot.
1366   This is done when an address-reference to the variable is seen.  */
1367
1368void
1369put_var_into_stack (decl)
1370     tree decl;
1371{
1372  register rtx reg;
1373  enum machine_mode promoted_mode, decl_mode;
1374  struct function *function = 0;
1375  tree context;
1376  int can_use_addressof;
1377
1378  context = decl_function_context (decl);
1379
1380  /* Get the current rtl used for this object and its original mode.  */
1381  reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1382
1383  /* No need to do anything if decl has no rtx yet
1384     since in that case caller is setting TREE_ADDRESSABLE
1385     and a stack slot will be assigned when the rtl is made.  */
1386  if (reg == 0)
1387    return;
1388
1389  /* Get the declared mode for this object.  */
1390  decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1391	       : DECL_MODE (decl));
1392  /* Get the mode it's actually stored in.  */
1393  promoted_mode = GET_MODE (reg);
1394
1395  /* If this variable comes from an outer function,
1396     find that function's saved context.  */
1397  if (context != current_function_decl && context != inline_function_decl)
1398    for (function = outer_function_chain; function; function = function->next)
1399      if (function->decl == context)
1400	break;
1401
1402  /* If this is a variable-size object with a pseudo to address it,
1403     put that pseudo into the stack, if the var is nonlocal.  */
1404  if (DECL_NONLOCAL (decl)
1405      && GET_CODE (reg) == MEM
1406      && GET_CODE (XEXP (reg, 0)) == REG
1407      && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1408    {
1409      reg = XEXP (reg, 0);
1410      decl_mode = promoted_mode = GET_MODE (reg);
1411    }
1412
1413  can_use_addressof
1414    = (function == 0
1415       && optimize > 0
1416       /* FIXME make it work for promoted modes too */
1417       && decl_mode == promoted_mode
1418#ifdef NON_SAVING_SETJMP
1419       && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1420#endif
1421       );
1422
1423  /* If we can't use ADDRESSOF, make sure we see through one we already
1424     generated.  */
1425  if (! can_use_addressof && GET_CODE (reg) == MEM
1426      && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1427    reg = XEXP (XEXP (reg, 0), 0);
1428
1429  /* Now we should have a value that resides in one or more pseudo regs.  */
1430
1431  if (GET_CODE (reg) == REG)
1432    {
1433      /* If this variable lives in the current function and we don't need
1434	 to put things in the stack for the sake of setjmp, try to keep it
1435	 in a register until we know we actually need the address.  */
1436      if (can_use_addressof)
1437	gen_mem_addressof (reg, decl);
1438      else
1439	put_reg_into_stack (function, reg, TREE_TYPE (decl),
1440			    promoted_mode, decl_mode,
1441			    TREE_SIDE_EFFECTS (decl), 0,
1442			    TREE_USED (decl)
1443			    || DECL_INITIAL (decl) != 0);
1444    }
1445  else if (GET_CODE (reg) == CONCAT)
1446    {
1447      /* A CONCAT contains two pseudos; put them both in the stack.
1448	 We do it so they end up consecutive.  */
1449      enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1450      tree part_type = TREE_TYPE (TREE_TYPE (decl));
1451#ifdef FRAME_GROWS_DOWNWARD
1452      /* Since part 0 should have a lower address, do it second.  */
1453      put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1454			  part_mode, TREE_SIDE_EFFECTS (decl), 0,
1455			  TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1456      put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1457			  part_mode, TREE_SIDE_EFFECTS (decl), 0,
1458			  TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1459#else
1460      put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1461			  part_mode, TREE_SIDE_EFFECTS (decl), 0,
1462			  TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1463      put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1464			  part_mode, TREE_SIDE_EFFECTS (decl), 0,
1465			  TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1466#endif
1467
1468      /* Change the CONCAT into a combined MEM for both parts.  */
1469      PUT_CODE (reg, MEM);
1470      MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1471      MEM_ALIAS_SET (reg) = get_alias_set (decl);
1472
1473      /* The two parts are in memory order already.
1474	 Use the lower parts address as ours.  */
1475      XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1476      /* Prevent sharing of rtl that might lose.  */
1477      if (GET_CODE (XEXP (reg, 0)) == PLUS)
1478	XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1479    }
1480  else
1481    return;
1482
1483  if (flag_check_memory_usage)
1484    emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1485		       XEXP (reg, 0), ptr_mode,
1486		       GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1487		       TYPE_MODE (sizetype),
1488		       GEN_INT (MEMORY_USE_RW),
1489		       TYPE_MODE (integer_type_node));
1490}
1491
1492/* Subroutine of put_var_into_stack.  This puts a single pseudo reg REG
1493   into the stack frame of FUNCTION (0 means the current function).
1494   DECL_MODE is the machine mode of the user-level data type.
1495   PROMOTED_MODE is the machine mode of the register.
1496   VOLATILE_P is nonzero if this is for a "volatile" decl.
1497   USED_P is nonzero if this reg might have already been used in an insn.  */
1498
1499static void
1500put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1501		    original_regno, used_p)
1502     struct function *function;
1503     rtx reg;
1504     tree type;
1505     enum machine_mode promoted_mode, decl_mode;
1506     int volatile_p;
1507     int original_regno;
1508     int used_p;
1509{
1510  rtx new = 0;
1511  int regno = original_regno;
1512
1513  if (regno == 0)
1514    regno = REGNO (reg);
1515
1516  if (function)
1517    {
1518      if (regno < function->max_parm_reg)
1519	new = function->parm_reg_stack_loc[regno];
1520      if (new == 0)
1521	new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1522					0, function);
1523    }
1524  else
1525    {
1526      if (regno < max_parm_reg)
1527	new = parm_reg_stack_loc[regno];
1528      if (new == 0)
1529	new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1530    }
1531
1532  PUT_MODE (reg, decl_mode);
1533  XEXP (reg, 0) = XEXP (new, 0);
1534  /* `volatil' bit means one thing for MEMs, another entirely for REGs.  */
1535  MEM_VOLATILE_P (reg) = volatile_p;
1536  PUT_CODE (reg, MEM);
1537
1538  /* If this is a memory ref that contains aggregate components,
1539     mark it as such for cse and loop optimize.  If we are reusing a
1540     previously generated stack slot, then we need to copy the bit in
1541     case it was set for other reasons.  For instance, it is set for
1542     __builtin_va_alist.  */
1543  MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type) | MEM_IN_STRUCT_P (new);
1544  MEM_ALIAS_SET (reg) = get_alias_set (type);
1545
1546  /* Now make sure that all refs to the variable, previously made
1547     when it was a register, are fixed up to be valid again.  */
1548
1549  if (used_p && function != 0)
1550    {
1551      struct var_refs_queue *temp;
1552
1553      /* Variable is inherited; fix it up when we get back to its function.  */
1554      push_obstacks (function->function_obstack,
1555		     function->function_maybepermanent_obstack);
1556
1557      /* See comment in restore_tree_status in tree.c for why this needs to be
1558	 on saveable obstack.  */
1559      temp
1560	= (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1561      temp->modified = reg;
1562      temp->promoted_mode = promoted_mode;
1563      temp->unsignedp = TREE_UNSIGNED (type);
1564      temp->next = function->fixup_var_refs_queue;
1565      function->fixup_var_refs_queue = temp;
1566      pop_obstacks ();
1567    }
1568  else if (used_p)
1569    /* Variable is local; fix it up now.  */
1570    fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1571}
1572
1573static void
1574fixup_var_refs (var, promoted_mode, unsignedp)
1575     rtx var;
1576     enum machine_mode promoted_mode;
1577     int unsignedp;
1578{
1579  tree pending;
1580  rtx first_insn = get_insns ();
1581  struct sequence_stack *stack = sequence_stack;
1582  tree rtl_exps = rtl_expr_chain;
1583
1584  /* Must scan all insns for stack-refs that exceed the limit.  */
1585  fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1586
1587  /* Scan all pending sequences too.  */
1588  for (; stack; stack = stack->next)
1589    {
1590      push_to_sequence (stack->first);
1591      fixup_var_refs_insns (var, promoted_mode, unsignedp,
1592			    stack->first, stack->next != 0);
1593      /* Update remembered end of sequence
1594	 in case we added an insn at the end.  */
1595      stack->last = get_last_insn ();
1596      end_sequence ();
1597    }
1598
1599  /* Scan all waiting RTL_EXPRs too.  */
1600  for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1601    {
1602      rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1603      if (seq != const0_rtx && seq != 0)
1604	{
1605	  push_to_sequence (seq);
1606	  fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1607	  end_sequence ();
1608	}
1609    }
1610}
1611
1612/* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1613   some part of an insn.  Return a struct fixup_replacement whose OLD
1614   value is equal to X.  Allocate a new structure if no such entry exists.  */
1615
1616static struct fixup_replacement *
1617find_fixup_replacement (replacements, x)
1618     struct fixup_replacement **replacements;
1619     rtx x;
1620{
1621  struct fixup_replacement *p;
1622
1623  /* See if we have already replaced this.  */
1624  for (p = *replacements; p && p->old != x; p = p->next)
1625    ;
1626
1627  if (p == 0)
1628    {
1629      p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1630      p->old = x;
1631      p->new = 0;
1632      p->next = *replacements;
1633      *replacements = p;
1634    }
1635
1636  return p;
1637}
1638
1639/* Scan the insn-chain starting with INSN for refs to VAR
1640   and fix them up.  TOPLEVEL is nonzero if this chain is the
1641   main chain of insns for the current function.  */
1642
1643static void
1644fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1645     rtx var;
1646     enum machine_mode promoted_mode;
1647     int unsignedp;
1648     rtx insn;
1649     int toplevel;
1650{
1651  rtx call_dest = 0;
1652
1653  while (insn)
1654    {
1655      rtx next = NEXT_INSN (insn);
1656      rtx set, prev, prev_set;
1657      rtx note;
1658
1659      if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1660	{
1661	  /* If this is a CLOBBER of VAR, delete it.
1662
1663	     If it has a REG_LIBCALL note, delete the REG_LIBCALL
1664	     and REG_RETVAL notes too.  */
1665 	  if (GET_CODE (PATTERN (insn)) == CLOBBER
1666	      && (XEXP (PATTERN (insn), 0) == var
1667		  || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1668		      && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1669			  || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1670	    {
1671	      if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1672		/* The REG_LIBCALL note will go away since we are going to
1673		   turn INSN into a NOTE, so just delete the
1674		   corresponding REG_RETVAL note.  */
1675		remove_note (XEXP (note, 0),
1676			     find_reg_note (XEXP (note, 0), REG_RETVAL,
1677					    NULL_RTX));
1678
1679	      /* In unoptimized compilation, we shouldn't call delete_insn
1680		 except in jump.c doing warnings.  */
1681	      PUT_CODE (insn, NOTE);
1682	      NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1683	      NOTE_SOURCE_FILE (insn) = 0;
1684	    }
1685
1686	  /* The insn to load VAR from a home in the arglist
1687	     is now a no-op.  When we see it, just delete it.
1688	     Similarly if this is storing VAR from a register from which
1689	     it was loaded in the previous insn.  This will occur
1690	     when an ADDRESSOF was made for an arglist slot.  */
1691	  else if (toplevel
1692		   && (set = single_set (insn)) != 0
1693		   && SET_DEST (set) == var
1694		   /* If this represents the result of an insn group,
1695		      don't delete the insn.  */
1696		   && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1697		   && (rtx_equal_p (SET_SRC (set), var)
1698		       || (GET_CODE (SET_SRC (set)) == REG
1699			   && (prev = prev_nonnote_insn (insn)) != 0
1700			   && (prev_set = single_set (prev)) != 0
1701			   && SET_DEST (prev_set) == SET_SRC (set)
1702			   && rtx_equal_p (SET_SRC (prev_set), var))))
1703	    {
1704	      /* In unoptimized compilation, we shouldn't call delete_insn
1705		 except in jump.c doing warnings.  */
1706	      PUT_CODE (insn, NOTE);
1707	      NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1708	      NOTE_SOURCE_FILE (insn) = 0;
1709	      if (insn == last_parm_insn)
1710		last_parm_insn = PREV_INSN (next);
1711	    }
1712	  else
1713	    {
1714	      struct fixup_replacement *replacements = 0;
1715	      rtx next_insn = NEXT_INSN (insn);
1716
1717	      if (SMALL_REGISTER_CLASSES)
1718		{
1719		  /* If the insn that copies the results of a CALL_INSN
1720		     into a pseudo now references VAR, we have to use an
1721		     intermediate pseudo since we want the life of the
1722		     return value register to be only a single insn.
1723
1724		     If we don't use an intermediate pseudo, such things as
1725		     address computations to make the address of VAR valid
1726		     if it is not can be placed between the CALL_INSN and INSN.
1727
1728		     To make sure this doesn't happen, we record the destination
1729		     of the CALL_INSN and see if the next insn uses both that
1730		     and VAR.  */
1731
1732		  if (call_dest != 0 && GET_CODE (insn) == INSN
1733		      && reg_mentioned_p (var, PATTERN (insn))
1734		      && reg_mentioned_p (call_dest, PATTERN (insn)))
1735		    {
1736		      rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1737
1738		      emit_insn_before (gen_move_insn (temp, call_dest), insn);
1739
1740		      PATTERN (insn) = replace_rtx (PATTERN (insn),
1741						    call_dest, temp);
1742		    }
1743
1744		  if (GET_CODE (insn) == CALL_INSN
1745		      && GET_CODE (PATTERN (insn)) == SET)
1746		    call_dest = SET_DEST (PATTERN (insn));
1747		  else if (GET_CODE (insn) == CALL_INSN
1748			   && GET_CODE (PATTERN (insn)) == PARALLEL
1749			   && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1750		    call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1751		  else
1752		    call_dest = 0;
1753		}
1754
1755	      /* See if we have to do anything to INSN now that VAR is in
1756		 memory.  If it needs to be loaded into a pseudo, use a single
1757		 pseudo for the entire insn in case there is a MATCH_DUP
1758		 between two operands.  We pass a pointer to the head of
1759		 a list of struct fixup_replacements.  If fixup_var_refs_1
1760		 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1761		 it will record them in this list.
1762
1763		 If it allocated a pseudo for any replacement, we copy into
1764		 it here.  */
1765
1766	      fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1767				&replacements);
1768
1769	      /* If this is last_parm_insn, and any instructions were output
1770		 after it to fix it up, then we must set last_parm_insn to
1771		 the last such instruction emitted.  */
1772	      if (insn == last_parm_insn)
1773		last_parm_insn = PREV_INSN (next_insn);
1774
1775	      while (replacements)
1776		{
1777		  if (GET_CODE (replacements->new) == REG)
1778		    {
1779		      rtx insert_before;
1780		      rtx seq;
1781
1782		      /* OLD might be a (subreg (mem)).  */
1783		      if (GET_CODE (replacements->old) == SUBREG)
1784			replacements->old
1785			  = fixup_memory_subreg (replacements->old, insn, 0);
1786		      else
1787			replacements->old
1788			  = fixup_stack_1 (replacements->old, insn);
1789
1790		      insert_before = insn;
1791
1792		      /* If we are changing the mode, do a conversion.
1793			 This might be wasteful, but combine.c will
1794			 eliminate much of the waste.  */
1795
1796		      if (GET_MODE (replacements->new)
1797			  != GET_MODE (replacements->old))
1798			{
1799			  start_sequence ();
1800			  convert_move (replacements->new,
1801					replacements->old, unsignedp);
1802			  seq = gen_sequence ();
1803			  end_sequence ();
1804			}
1805		      else
1806			seq = gen_move_insn (replacements->new,
1807					     replacements->old);
1808
1809		      emit_insn_before (seq, insert_before);
1810		    }
1811
1812		  replacements = replacements->next;
1813		}
1814	    }
1815
1816	  /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1817	     But don't touch other insns referred to by reg-notes;
1818	     we will get them elsewhere.  */
1819	  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1820	    if (GET_CODE (note) != INSN_LIST)
1821	      XEXP (note, 0)
1822		= walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1823	}
1824      insn = next;
1825    }
1826}
1827
1828/* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1829   See if the rtx expression at *LOC in INSN needs to be changed.
1830
1831   REPLACEMENTS is a pointer to a list head that starts out zero, but may
1832   contain a list of original rtx's and replacements. If we find that we need
1833   to modify this insn by replacing a memory reference with a pseudo or by
1834   making a new MEM to implement a SUBREG, we consult that list to see if
1835   we have already chosen a replacement. If none has already been allocated,
1836   we allocate it and update the list.  fixup_var_refs_insns will copy VAR
1837   or the SUBREG, as appropriate, to the pseudo.  */
1838
1839static void
1840fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1841     register rtx var;
1842     enum machine_mode promoted_mode;
1843     register rtx *loc;
1844     rtx insn;
1845     struct fixup_replacement **replacements;
1846{
1847  register int i;
1848  register rtx x = *loc;
1849  RTX_CODE code = GET_CODE (x);
1850  register char *fmt;
1851  register rtx tem, tem1;
1852  struct fixup_replacement *replacement;
1853
1854  switch (code)
1855    {
1856    case ADDRESSOF:
1857      if (XEXP (x, 0) == var)
1858	{
1859	  /* Prevent sharing of rtl that might lose.  */
1860	  rtx sub = copy_rtx (XEXP (var, 0));
1861
1862	  start_sequence ();
1863
1864	  if (! validate_change (insn, loc, sub, 0))
1865	    {
1866	      rtx y = force_operand (sub, NULL_RTX);
1867
1868	      if (! validate_change (insn, loc, y, 0))
1869		*loc = copy_to_reg (y);
1870	    }
1871
1872	  emit_insn_before (gen_sequence (), insn);
1873	  end_sequence ();
1874	}
1875      return;
1876
1877    case MEM:
1878      if (var == x)
1879	{
1880	  /* If we already have a replacement, use it.  Otherwise,
1881	     try to fix up this address in case it is invalid.  */
1882
1883	  replacement = find_fixup_replacement (replacements, var);
1884	  if (replacement->new)
1885	    {
1886	      *loc = replacement->new;
1887	      return;
1888	    }
1889
1890	  *loc = replacement->new = x = fixup_stack_1 (x, insn);
1891
1892	  /* Unless we are forcing memory to register or we changed the mode,
1893	     we can leave things the way they are if the insn is valid.  */
1894
1895	  INSN_CODE (insn) = -1;
1896	  if (! flag_force_mem && GET_MODE (x) == promoted_mode
1897	      && recog_memoized (insn) >= 0)
1898	    return;
1899
1900	  *loc = replacement->new = gen_reg_rtx (promoted_mode);
1901	  return;
1902	}
1903
1904      /* If X contains VAR, we need to unshare it here so that we update
1905	 each occurrence separately.  But all identical MEMs in one insn
1906	 must be replaced with the same rtx because of the possibility of
1907	 MATCH_DUPs.  */
1908
1909      if (reg_mentioned_p (var, x))
1910	{
1911	  replacement = find_fixup_replacement (replacements, x);
1912	  if (replacement->new == 0)
1913	    replacement->new = copy_most_rtx (x, var);
1914
1915	  *loc = x = replacement->new;
1916	}
1917      break;
1918
1919    case REG:
1920    case CC0:
1921    case PC:
1922    case CONST_INT:
1923    case CONST:
1924    case SYMBOL_REF:
1925    case LABEL_REF:
1926    case CONST_DOUBLE:
1927      return;
1928
1929    case SIGN_EXTRACT:
1930    case ZERO_EXTRACT:
1931      /* Note that in some cases those types of expressions are altered
1932	 by optimize_bit_field, and do not survive to get here.  */
1933      if (XEXP (x, 0) == var
1934	  || (GET_CODE (XEXP (x, 0)) == SUBREG
1935	      && SUBREG_REG (XEXP (x, 0)) == var))
1936	{
1937	  /* Get TEM as a valid MEM in the mode presently in the insn.
1938
1939	     We don't worry about the possibility of MATCH_DUP here; it
1940	     is highly unlikely and would be tricky to handle.  */
1941
1942	  tem = XEXP (x, 0);
1943	  if (GET_CODE (tem) == SUBREG)
1944	    {
1945	      if (GET_MODE_BITSIZE (GET_MODE (tem))
1946		  > GET_MODE_BITSIZE (GET_MODE (var)))
1947		{
1948		  replacement = find_fixup_replacement (replacements, var);
1949		  if (replacement->new == 0)
1950		    replacement->new = gen_reg_rtx (GET_MODE (var));
1951		  SUBREG_REG (tem) = replacement->new;
1952		}
1953	      else
1954		tem = fixup_memory_subreg (tem, insn, 0);
1955	    }
1956	  else
1957	    tem = fixup_stack_1 (tem, insn);
1958
1959	  /* Unless we want to load from memory, get TEM into the proper mode
1960	     for an extract from memory.  This can only be done if the
1961	     extract is at a constant position and length.  */
1962
1963	  if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1964	      && GET_CODE (XEXP (x, 2)) == CONST_INT
1965	      && ! mode_dependent_address_p (XEXP (tem, 0))
1966	      && ! MEM_VOLATILE_P (tem))
1967	    {
1968	      enum machine_mode wanted_mode = VOIDmode;
1969	      enum machine_mode is_mode = GET_MODE (tem);
1970	      HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1971
1972#ifdef HAVE_extzv
1973	      if (GET_CODE (x) == ZERO_EXTRACT)
1974		wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1975#endif
1976#ifdef HAVE_extv
1977	      if (GET_CODE (x) == SIGN_EXTRACT)
1978		wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1979#endif
1980	      /* If we have a narrower mode, we can do something.  */
1981	      if (wanted_mode != VOIDmode
1982		  && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1983		{
1984		  HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1985		  rtx old_pos = XEXP (x, 2);
1986		  rtx newmem;
1987
1988		  /* If the bytes and bits are counted differently, we
1989		     must adjust the offset.  */
1990		  if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1991		    offset = (GET_MODE_SIZE (is_mode)
1992			      - GET_MODE_SIZE (wanted_mode) - offset);
1993
1994		  pos %= GET_MODE_BITSIZE (wanted_mode);
1995
1996		  newmem = gen_rtx_MEM (wanted_mode,
1997					plus_constant (XEXP (tem, 0), offset));
1998		  RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1999		  MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2000		  MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2001
2002		  /* Make the change and see if the insn remains valid.  */
2003		  INSN_CODE (insn) = -1;
2004		  XEXP (x, 0) = newmem;
2005		  XEXP (x, 2) = GEN_INT (pos);
2006
2007		  if (recog_memoized (insn) >= 0)
2008		    return;
2009
2010		  /* Otherwise, restore old position.  XEXP (x, 0) will be
2011		     restored later.  */
2012		  XEXP (x, 2) = old_pos;
2013		}
2014	    }
2015
2016	  /* If we get here, the bitfield extract insn can't accept a memory
2017	     reference.  Copy the input into a register.  */
2018
2019	  tem1 = gen_reg_rtx (GET_MODE (tem));
2020	  emit_insn_before (gen_move_insn (tem1, tem), insn);
2021	  XEXP (x, 0) = tem1;
2022	  return;
2023	}
2024      break;
2025
2026    case SUBREG:
2027      if (SUBREG_REG (x) == var)
2028	{
2029	  /* If this is a special SUBREG made because VAR was promoted
2030	     from a wider mode, replace it with VAR and call ourself
2031	     recursively, this time saying that the object previously
2032	     had its current mode (by virtue of the SUBREG).  */
2033
2034	  if (SUBREG_PROMOTED_VAR_P (x))
2035	    {
2036	      *loc = var;
2037	      fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2038	      return;
2039	    }
2040
2041	  /* If this SUBREG makes VAR wider, it has become a paradoxical
2042	     SUBREG with VAR in memory, but these aren't allowed at this
2043	     stage of the compilation.  So load VAR into a pseudo and take
2044	     a SUBREG of that pseudo.  */
2045	  if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2046	    {
2047	      replacement = find_fixup_replacement (replacements, var);
2048	      if (replacement->new == 0)
2049		replacement->new = gen_reg_rtx (GET_MODE (var));
2050	      SUBREG_REG (x) = replacement->new;
2051	      return;
2052	    }
2053
2054	  /* See if we have already found a replacement for this SUBREG.
2055	     If so, use it.  Otherwise, make a MEM and see if the insn
2056	     is recognized.  If not, or if we should force MEM into a register,
2057	     make a pseudo for this SUBREG.  */
2058	  replacement = find_fixup_replacement (replacements, x);
2059	  if (replacement->new)
2060	    {
2061	      *loc = replacement->new;
2062	      return;
2063	    }
2064
2065	  replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2066
2067	  INSN_CODE (insn) = -1;
2068	  if (! flag_force_mem && recog_memoized (insn) >= 0)
2069	    return;
2070
2071	  *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2072	  return;
2073	}
2074      break;
2075
2076    case SET:
2077      /* First do special simplification of bit-field references.  */
2078      if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2079	  || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2080	optimize_bit_field (x, insn, 0);
2081      if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2082	  || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2083	optimize_bit_field (x, insn, NULL_PTR);
2084
2085      /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2086	 into a register and then store it back out.  */
2087      if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2088	  && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2089	  && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2090	  && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2091	      > GET_MODE_SIZE (GET_MODE (var))))
2092	{
2093	  replacement = find_fixup_replacement (replacements, var);
2094	  if (replacement->new == 0)
2095	    replacement->new = gen_reg_rtx (GET_MODE (var));
2096
2097	  SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2098	  emit_insn_after (gen_move_insn (var, replacement->new), insn);
2099	}
2100
2101      /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2102	 insn into a pseudo and store the low part of the pseudo into VAR.  */
2103      if (GET_CODE (SET_DEST (x)) == SUBREG
2104	  && SUBREG_REG (SET_DEST (x)) == var
2105	  && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2106	      > GET_MODE_SIZE (GET_MODE (var))))
2107	{
2108	  SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2109	  emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2110							    tem)),
2111			   insn);
2112	  break;
2113	}
2114
2115      {
2116	rtx dest = SET_DEST (x);
2117	rtx src = SET_SRC (x);
2118#ifdef HAVE_insv
2119	rtx outerdest = dest;
2120#endif
2121
2122	while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2123	       || GET_CODE (dest) == SIGN_EXTRACT
2124	       || GET_CODE (dest) == ZERO_EXTRACT)
2125	  dest = XEXP (dest, 0);
2126
2127	if (GET_CODE (src) == SUBREG)
2128	  src = XEXP (src, 0);
2129
2130	/* If VAR does not appear at the top level of the SET
2131	   just scan the lower levels of the tree.  */
2132
2133        if (src != var && dest != var)
2134	  break;
2135
2136	/* We will need to rerecognize this insn.  */
2137	INSN_CODE (insn) = -1;
2138
2139#ifdef HAVE_insv
2140	if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2141	  {
2142	    /* Since this case will return, ensure we fixup all the
2143	       operands here.  */
2144	    fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2145			      insn, replacements);
2146	    fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2147			      insn, replacements);
2148	    fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2149			      insn, replacements);
2150
2151	    tem = XEXP (outerdest, 0);
2152
2153	    /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2154	       that may appear inside a ZERO_EXTRACT.
2155	       This was legitimate when the MEM was a REG.  */
2156	    if (GET_CODE (tem) == SUBREG
2157		&& SUBREG_REG (tem) == var)
2158	      tem = fixup_memory_subreg (tem, insn, 0);
2159	    else
2160	      tem = fixup_stack_1 (tem, insn);
2161
2162	    if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2163		&& GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2164		&& ! mode_dependent_address_p (XEXP (tem, 0))
2165		&& ! MEM_VOLATILE_P (tem))
2166	      {
2167		enum machine_mode wanted_mode
2168		  = insn_operand_mode[(int) CODE_FOR_insv][0];
2169		enum machine_mode is_mode = GET_MODE (tem);
2170		HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2171
2172		/* If we have a narrower mode, we can do something.  */
2173		if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2174		  {
2175		    HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2176		    rtx old_pos = XEXP (outerdest, 2);
2177		    rtx newmem;
2178
2179		    if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2180		      offset = (GET_MODE_SIZE (is_mode)
2181				- GET_MODE_SIZE (wanted_mode) - offset);
2182
2183		    pos %= GET_MODE_BITSIZE (wanted_mode);
2184
2185		    newmem = gen_rtx_MEM (wanted_mode,
2186					  plus_constant (XEXP (tem, 0), offset));
2187		    RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2188		    MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2189		    MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2190
2191		    /* Make the change and see if the insn remains valid.  */
2192		    INSN_CODE (insn) = -1;
2193		    XEXP (outerdest, 0) = newmem;
2194		    XEXP (outerdest, 2) = GEN_INT (pos);
2195
2196		    if (recog_memoized (insn) >= 0)
2197		      return;
2198
2199		    /* Otherwise, restore old position.  XEXP (x, 0) will be
2200		       restored later.  */
2201		    XEXP (outerdest, 2) = old_pos;
2202		  }
2203	      }
2204
2205	    /* If we get here, the bit-field store doesn't allow memory
2206	       or isn't located at a constant position.  Load the value into
2207	       a register, do the store, and put it back into memory.  */
2208
2209	    tem1 = gen_reg_rtx (GET_MODE (tem));
2210	    emit_insn_before (gen_move_insn (tem1, tem), insn);
2211	    emit_insn_after (gen_move_insn (tem, tem1), insn);
2212	    XEXP (outerdest, 0) = tem1;
2213	    return;
2214	  }
2215#endif
2216
2217	/* STRICT_LOW_PART is a no-op on memory references
2218	   and it can cause combinations to be unrecognizable,
2219	   so eliminate it.  */
2220
2221	if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2222	  SET_DEST (x) = XEXP (SET_DEST (x), 0);
2223
2224	/* A valid insn to copy VAR into or out of a register
2225	   must be left alone, to avoid an infinite loop here.
2226	   If the reference to VAR is by a subreg, fix that up,
2227	   since SUBREG is not valid for a memref.
2228	   Also fix up the address of the stack slot.
2229
2230	   Note that we must not try to recognize the insn until
2231	   after we know that we have valid addresses and no
2232	   (subreg (mem ...) ...) constructs, since these interfere
2233	   with determining the validity of the insn.  */
2234
2235	if ((SET_SRC (x) == var
2236	     || (GET_CODE (SET_SRC (x)) == SUBREG
2237		 && SUBREG_REG (SET_SRC (x)) == var))
2238	    && (GET_CODE (SET_DEST (x)) == REG
2239		|| (GET_CODE (SET_DEST (x)) == SUBREG
2240		    && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2241	    && GET_MODE (var) == promoted_mode
2242	    && x == single_set (insn))
2243	  {
2244	    rtx pat;
2245
2246	    replacement = find_fixup_replacement (replacements, SET_SRC (x));
2247	    if (replacement->new)
2248	      SET_SRC (x) = replacement->new;
2249	    else if (GET_CODE (SET_SRC (x)) == SUBREG)
2250	      SET_SRC (x) = replacement->new
2251		= fixup_memory_subreg (SET_SRC (x), insn, 0);
2252	    else
2253	      SET_SRC (x) = replacement->new
2254		= fixup_stack_1 (SET_SRC (x), insn);
2255
2256	    if (recog_memoized (insn) >= 0)
2257	      return;
2258
2259	    /* INSN is not valid, but we know that we want to
2260	       copy SET_SRC (x) to SET_DEST (x) in some way.  So
2261	       we generate the move and see whether it requires more
2262	       than one insn.  If it does, we emit those insns and
2263	       delete INSN.  Otherwise, we an just replace the pattern
2264	       of INSN; we have already verified above that INSN has
2265	       no other function that to do X.  */
2266
2267	    pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2268	    if (GET_CODE (pat) == SEQUENCE)
2269	      {
2270		emit_insn_after (pat, insn);
2271		PUT_CODE (insn, NOTE);
2272		NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2273		NOTE_SOURCE_FILE (insn) = 0;
2274	      }
2275	    else
2276	      PATTERN (insn) = pat;
2277
2278	    return;
2279	  }
2280
2281	if ((SET_DEST (x) == var
2282	     || (GET_CODE (SET_DEST (x)) == SUBREG
2283		 && SUBREG_REG (SET_DEST (x)) == var))
2284	    && (GET_CODE (SET_SRC (x)) == REG
2285		|| (GET_CODE (SET_SRC (x)) == SUBREG
2286		    && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2287	    && GET_MODE (var) == promoted_mode
2288	    && x == single_set (insn))
2289	  {
2290	    rtx pat;
2291
2292	    if (GET_CODE (SET_DEST (x)) == SUBREG)
2293	      SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2294	    else
2295	      SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2296
2297	    if (recog_memoized (insn) >= 0)
2298	      return;
2299
2300	    pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2301	    if (GET_CODE (pat) == SEQUENCE)
2302	      {
2303		emit_insn_after (pat, insn);
2304		PUT_CODE (insn, NOTE);
2305		NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2306		NOTE_SOURCE_FILE (insn) = 0;
2307	      }
2308	    else
2309	      PATTERN (insn) = pat;
2310
2311	    return;
2312	  }
2313
2314	/* Otherwise, storing into VAR must be handled specially
2315	   by storing into a temporary and copying that into VAR
2316	   with a new insn after this one.  Note that this case
2317	   will be used when storing into a promoted scalar since
2318	   the insn will now have different modes on the input
2319	   and output and hence will be invalid (except for the case
2320	   of setting it to a constant, which does not need any
2321	   change if it is valid).  We generate extra code in that case,
2322	   but combine.c will eliminate it.  */
2323
2324	if (dest == var)
2325	  {
2326	    rtx temp;
2327	    rtx fixeddest = SET_DEST (x);
2328
2329	    /* STRICT_LOW_PART can be discarded, around a MEM.  */
2330	    if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2331	      fixeddest = XEXP (fixeddest, 0);
2332	    /* Convert (SUBREG (MEM)) to a MEM in a changed mode.  */
2333	    if (GET_CODE (fixeddest) == SUBREG)
2334	      {
2335		fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2336		promoted_mode = GET_MODE (fixeddest);
2337	      }
2338	    else
2339	      fixeddest = fixup_stack_1 (fixeddest, insn);
2340
2341	    temp = gen_reg_rtx (promoted_mode);
2342
2343	    emit_insn_after (gen_move_insn (fixeddest,
2344					    gen_lowpart (GET_MODE (fixeddest),
2345							 temp)),
2346			     insn);
2347
2348	    SET_DEST (x) = temp;
2349	  }
2350      }
2351
2352    default:
2353      break;
2354    }
2355
2356  /* Nothing special about this RTX; fix its operands.  */
2357
2358  fmt = GET_RTX_FORMAT (code);
2359  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2360    {
2361      if (fmt[i] == 'e')
2362	fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2363      if (fmt[i] == 'E')
2364	{
2365	  register int j;
2366	  for (j = 0; j < XVECLEN (x, i); j++)
2367	    fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2368			      insn, replacements);
2369	}
2370    }
2371}
2372
2373/* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2374   return an rtx (MEM:m1 newaddr) which is equivalent.
2375   If any insns must be emitted to compute NEWADDR, put them before INSN.
2376
2377   UNCRITICAL nonzero means accept paradoxical subregs.
2378   This is used for subregs found inside REG_NOTES.  */
2379
2380static rtx
2381fixup_memory_subreg (x, insn, uncritical)
2382     rtx x;
2383     rtx insn;
2384     int uncritical;
2385{
2386  int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2387  rtx addr = XEXP (SUBREG_REG (x), 0);
2388  enum machine_mode mode = GET_MODE (x);
2389  rtx result;
2390
2391  /* Paradoxical SUBREGs are usually invalid during RTL generation.  */
2392  if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2393      && ! uncritical)
2394    abort ();
2395
2396  if (BYTES_BIG_ENDIAN)
2397    offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2398	       - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2399  addr = plus_constant (addr, offset);
2400  if (!flag_force_addr && memory_address_p (mode, addr))
2401    /* Shortcut if no insns need be emitted.  */
2402    return change_address (SUBREG_REG (x), mode, addr);
2403  start_sequence ();
2404  result = change_address (SUBREG_REG (x), mode, addr);
2405  emit_insn_before (gen_sequence (), insn);
2406  end_sequence ();
2407  return result;
2408}
2409
2410/* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2411   Replace subexpressions of X in place.
2412   If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2413   Otherwise return X, with its contents possibly altered.
2414
2415   If any insns must be emitted to compute NEWADDR, put them before INSN.
2416
2417   UNCRITICAL is as in fixup_memory_subreg.  */
2418
2419static rtx
2420walk_fixup_memory_subreg (x, insn, uncritical)
2421     register rtx x;
2422     rtx insn;
2423     int uncritical;
2424{
2425  register enum rtx_code code;
2426  register char *fmt;
2427  register int i;
2428
2429  if (x == 0)
2430    return 0;
2431
2432  code = GET_CODE (x);
2433
2434  if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2435    return fixup_memory_subreg (x, insn, uncritical);
2436
2437  /* Nothing special about this RTX; fix its operands.  */
2438
2439  fmt = GET_RTX_FORMAT (code);
2440  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2441    {
2442      if (fmt[i] == 'e')
2443	XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2444      if (fmt[i] == 'E')
2445	{
2446	  register int j;
2447	  for (j = 0; j < XVECLEN (x, i); j++)
2448	    XVECEXP (x, i, j)
2449	      = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2450	}
2451    }
2452  return x;
2453}
2454
2455/* For each memory ref within X, if it refers to a stack slot
2456   with an out of range displacement, put the address in a temp register
2457   (emitting new insns before INSN to load these registers)
2458   and alter the memory ref to use that register.
2459   Replace each such MEM rtx with a copy, to avoid clobberage.  */
2460
2461static rtx
2462fixup_stack_1 (x, insn)
2463     rtx x;
2464     rtx insn;
2465{
2466  register int i;
2467  register RTX_CODE code = GET_CODE (x);
2468  register char *fmt;
2469
2470  if (code == MEM)
2471    {
2472      register rtx ad = XEXP (x, 0);
2473      /* If we have address of a stack slot but it's not valid
2474	 (displacement is too large), compute the sum in a register.  */
2475      if (GET_CODE (ad) == PLUS
2476	  && GET_CODE (XEXP (ad, 0)) == REG
2477	  && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2478	       && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2479	      || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2480#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2481	      || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2482#endif
2483	      || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2484	      || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2485	      || XEXP (ad, 0) == current_function_internal_arg_pointer)
2486	  && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2487	{
2488	  rtx temp, seq;
2489	  if (memory_address_p (GET_MODE (x), ad))
2490	    return x;
2491
2492	  start_sequence ();
2493	  temp = copy_to_reg (ad);
2494	  seq = gen_sequence ();
2495	  end_sequence ();
2496	  emit_insn_before (seq, insn);
2497	  return change_address (x, VOIDmode, temp);
2498	}
2499      return x;
2500    }
2501
2502  fmt = GET_RTX_FORMAT (code);
2503  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2504    {
2505      if (fmt[i] == 'e')
2506	XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2507      if (fmt[i] == 'E')
2508	{
2509	  register int j;
2510	  for (j = 0; j < XVECLEN (x, i); j++)
2511	    XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2512	}
2513    }
2514  return x;
2515}
2516
2517/* Optimization: a bit-field instruction whose field
2518   happens to be a byte or halfword in memory
2519   can be changed to a move instruction.
2520
2521   We call here when INSN is an insn to examine or store into a bit-field.
2522   BODY is the SET-rtx to be altered.
2523
2524   EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2525   (Currently this is called only from function.c, and EQUIV_MEM
2526   is always 0.)  */
2527
2528static void
2529optimize_bit_field (body, insn, equiv_mem)
2530     rtx body;
2531     rtx insn;
2532     rtx *equiv_mem;
2533{
2534  register rtx bitfield;
2535  int destflag;
2536  rtx seq = 0;
2537  enum machine_mode mode;
2538
2539  if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2540      || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2541    bitfield = SET_DEST (body), destflag = 1;
2542  else
2543    bitfield = SET_SRC (body), destflag = 0;
2544
2545  /* First check that the field being stored has constant size and position
2546     and is in fact a byte or halfword suitably aligned.  */
2547
2548  if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2549      && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2550      && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2551	  != BLKmode)
2552      && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2553    {
2554      register rtx memref = 0;
2555
2556      /* Now check that the containing word is memory, not a register,
2557	 and that it is safe to change the machine mode.  */
2558
2559      if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2560	memref = XEXP (bitfield, 0);
2561      else if (GET_CODE (XEXP (bitfield, 0)) == REG
2562	       && equiv_mem != 0)
2563	memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2564      else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2565	       && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2566	memref = SUBREG_REG (XEXP (bitfield, 0));
2567      else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2568	       && equiv_mem != 0
2569	       && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2570	memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2571
2572      if (memref
2573	  && ! mode_dependent_address_p (XEXP (memref, 0))
2574	  && ! MEM_VOLATILE_P (memref))
2575	{
2576	  /* Now adjust the address, first for any subreg'ing
2577	     that we are now getting rid of,
2578	     and then for which byte of the word is wanted.  */
2579
2580	  HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2581	  rtx insns;
2582
2583	  /* Adjust OFFSET to count bits from low-address byte.  */
2584	  if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2585	    offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2586		      - offset - INTVAL (XEXP (bitfield, 1)));
2587
2588	  /* Adjust OFFSET to count bytes from low-address byte.  */
2589	  offset /= BITS_PER_UNIT;
2590	  if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2591	    {
2592	      offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2593	      if (BYTES_BIG_ENDIAN)
2594		offset -= (MIN (UNITS_PER_WORD,
2595				GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2596			   - MIN (UNITS_PER_WORD,
2597				  GET_MODE_SIZE (GET_MODE (memref))));
2598	    }
2599
2600	  start_sequence ();
2601	  memref = change_address (memref, mode,
2602				   plus_constant (XEXP (memref, 0), offset));
2603	  insns = get_insns ();
2604	  end_sequence ();
2605	  emit_insns_before (insns, insn);
2606
2607	  /* Store this memory reference where
2608	     we found the bit field reference.  */
2609
2610	  if (destflag)
2611	    {
2612	      validate_change (insn, &SET_DEST (body), memref, 1);
2613	      if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2614		{
2615		  rtx src = SET_SRC (body);
2616		  while (GET_CODE (src) == SUBREG
2617			 && SUBREG_WORD (src) == 0)
2618		    src = SUBREG_REG (src);
2619		  if (GET_MODE (src) != GET_MODE (memref))
2620		    src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2621		  validate_change (insn, &SET_SRC (body), src, 1);
2622		}
2623	      else if (GET_MODE (SET_SRC (body)) != VOIDmode
2624		       && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2625		/* This shouldn't happen because anything that didn't have
2626		   one of these modes should have got converted explicitly
2627		   and then referenced through a subreg.
2628		   This is so because the original bit-field was
2629		   handled by agg_mode and so its tree structure had
2630		   the same mode that memref now has.  */
2631		abort ();
2632	    }
2633	  else
2634	    {
2635	      rtx dest = SET_DEST (body);
2636
2637	      while (GET_CODE (dest) == SUBREG
2638		     && SUBREG_WORD (dest) == 0
2639		     && (GET_MODE_CLASS (GET_MODE (dest))
2640			 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2641		dest = SUBREG_REG (dest);
2642
2643	      validate_change (insn, &SET_DEST (body), dest, 1);
2644
2645	      if (GET_MODE (dest) == GET_MODE (memref))
2646		validate_change (insn, &SET_SRC (body), memref, 1);
2647	      else
2648		{
2649		  /* Convert the mem ref to the destination mode.  */
2650		  rtx newreg = gen_reg_rtx (GET_MODE (dest));
2651
2652		  start_sequence ();
2653		  convert_move (newreg, memref,
2654				GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2655		  seq = get_insns ();
2656		  end_sequence ();
2657
2658		  validate_change (insn, &SET_SRC (body), newreg, 1);
2659		}
2660	    }
2661
2662	  /* See if we can convert this extraction or insertion into
2663	     a simple move insn.  We might not be able to do so if this
2664	     was, for example, part of a PARALLEL.
2665
2666	     If we succeed, write out any needed conversions.  If we fail,
2667	     it is hard to guess why we failed, so don't do anything
2668	     special; just let the optimization be suppressed.  */
2669
2670	  if (apply_change_group () && seq)
2671	    emit_insns_before (seq, insn);
2672	}
2673    }
2674}
2675
2676/* These routines are responsible for converting virtual register references
2677   to the actual hard register references once RTL generation is complete.
2678
2679   The following four variables are used for communication between the
2680   routines.  They contain the offsets of the virtual registers from their
2681   respective hard registers.  */
2682
2683static int in_arg_offset;
2684static int var_offset;
2685static int dynamic_offset;
2686static int out_arg_offset;
2687
2688/* In most machines, the stack pointer register is equivalent to the bottom
2689   of the stack.  */
2690
2691#ifndef STACK_POINTER_OFFSET
2692#define STACK_POINTER_OFFSET	0
2693#endif
2694
2695/* If not defined, pick an appropriate default for the offset of dynamically
2696   allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2697   REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE.  */
2698
2699#ifndef STACK_DYNAMIC_OFFSET
2700
2701#ifdef ACCUMULATE_OUTGOING_ARGS
2702/* The bottom of the stack points to the actual arguments.  If
2703   REG_PARM_STACK_SPACE is defined, this includes the space for the register
2704   parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
2705   stack space for register parameters is not pushed by the caller, but
2706   rather part of the fixed stack areas and hence not included in
2707   `current_function_outgoing_args_size'.  Nevertheless, we must allow
2708   for it when allocating stack dynamic objects.  */
2709
2710#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2711#define STACK_DYNAMIC_OFFSET(FNDECL)	\
2712(current_function_outgoing_args_size	\
2713 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2714
2715#else
2716#define STACK_DYNAMIC_OFFSET(FNDECL)	\
2717(current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2718#endif
2719
2720#else
2721#define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2722#endif
2723#endif
2724
2725/* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2726   its address taken.  DECL is the decl for the object stored in the
2727   register, for later use if we do need to force REG into the stack.
2728   REG is overwritten by the MEM like in put_reg_into_stack.  */
2729
2730rtx
2731gen_mem_addressof (reg, decl)
2732     rtx reg;
2733     tree decl;
2734{
2735  tree type = TREE_TYPE (decl);
2736
2737  rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2738  SET_ADDRESSOF_DECL (r, decl);
2739
2740  XEXP (reg, 0) = r;
2741  PUT_CODE (reg, MEM);
2742  PUT_MODE (reg, DECL_MODE (decl));
2743  MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2744  MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
2745  MEM_ALIAS_SET (reg) = get_alias_set (decl);
2746
2747  if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2748    fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2749
2750  return reg;
2751}
2752
2753/* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack.  */
2754
2755void
2756flush_addressof (decl)
2757     tree decl;
2758{
2759  if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2760      && DECL_RTL (decl) != 0
2761      && GET_CODE (DECL_RTL (decl)) == MEM
2762      && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2763      && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2764    put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2765}
2766
2767/* Force the register pointed to by R, an ADDRESSOF rtx, into the stack.  */
2768
2769static void
2770put_addressof_into_stack (r)
2771     rtx r;
2772{
2773  tree decl = ADDRESSOF_DECL (r);
2774  rtx reg = XEXP (r, 0);
2775
2776  if (GET_CODE (reg) != REG)
2777    abort ();
2778
2779  put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2780		      DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2781		      ADDRESSOF_REGNO (r),
2782		      TREE_USED (decl) || DECL_INITIAL (decl) != 0);
2783}
2784
2785/* Helper function for purge_addressof.  See if the rtx expression at *LOC
2786   in INSN needs to be changed.  If FORCE, always put any ADDRESSOFs into
2787   the stack.  */
2788
2789static void
2790purge_addressof_1 (loc, insn, force)
2791     rtx *loc;
2792     rtx insn;
2793     int force;
2794{
2795  rtx x;
2796  RTX_CODE code;
2797  int i, j;
2798  char *fmt;
2799
2800  /* Re-start here to avoid recursion in common cases.  */
2801 restart:
2802
2803  x = *loc;
2804  if (x == 0)
2805    return;
2806
2807  code = GET_CODE (x);
2808
2809  if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2810    {
2811      rtx insns;
2812      /* We must create a copy of the rtx because it was created by
2813	 overwriting a REG rtx which is always shared.  */
2814      rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2815
2816      if (validate_change (insn, loc, sub, 0))
2817	return;
2818
2819      start_sequence ();
2820      if (! validate_change (insn, loc,
2821			     force_operand (sub, NULL_RTX),
2822			     0))
2823	abort ();
2824
2825      insns = get_insns ();
2826      end_sequence ();
2827      emit_insns_before (insns, insn);
2828      return;
2829    }
2830  else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2831    {
2832      rtx sub = XEXP (XEXP (x, 0), 0);
2833
2834      if (GET_CODE (sub) == MEM)
2835	sub = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2836
2837      if (GET_CODE (sub) == REG
2838	  && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2839	{
2840	  put_addressof_into_stack (XEXP (x, 0));
2841	  return;
2842	}
2843      else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2844	{
2845	  if (! BYTES_BIG_ENDIAN && ! WORDS_BIG_ENDIAN)
2846	    {
2847	      rtx sub2 = gen_rtx_SUBREG (GET_MODE (x), sub, 0);
2848	      if (validate_change (insn, loc, sub2, 0))
2849		goto restart;
2850	    }
2851	}
2852      else if (validate_change (insn, loc, sub, 0))
2853	goto restart;
2854      /* else give up and put it into the stack */
2855    }
2856  else if (code == ADDRESSOF)
2857    {
2858      put_addressof_into_stack (x);
2859      return;
2860    }
2861
2862  /* Scan all subexpressions. */
2863  fmt = GET_RTX_FORMAT (code);
2864  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2865    {
2866      if (*fmt == 'e')
2867	purge_addressof_1 (&XEXP (x, i), insn, force);
2868      else if (*fmt == 'E')
2869	for (j = 0; j < XVECLEN (x, i); j++)
2870	  purge_addressof_1 (&XVECEXP (x, i, j), insn, force);
2871    }
2872}
2873
2874/* Eliminate all occurrences of ADDRESSOF from INSNS.  Elide any remaining
2875   (MEM (ADDRESSOF)) patterns, and force any needed registers into the
2876   stack.  */
2877
2878void
2879purge_addressof (insns)
2880     rtx insns;
2881{
2882  rtx insn;
2883  for (insn = insns; insn; insn = NEXT_INSN (insn))
2884    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2885	|| GET_CODE (insn) == CALL_INSN)
2886      {
2887	purge_addressof_1 (&PATTERN (insn), insn,
2888			   asm_noperands (PATTERN (insn)) > 0);
2889	purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0);
2890      }
2891}
2892
2893/* Pass through the INSNS of function FNDECL and convert virtual register
2894   references to hard register references.  */
2895
2896void
2897instantiate_virtual_regs (fndecl, insns)
2898     tree fndecl;
2899     rtx insns;
2900{
2901  rtx insn;
2902  int i;
2903
2904  /* Compute the offsets to use for this function.  */
2905  in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2906  var_offset = STARTING_FRAME_OFFSET;
2907  dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2908  out_arg_offset = STACK_POINTER_OFFSET;
2909
2910  /* Scan all variables and parameters of this function.  For each that is
2911     in memory, instantiate all virtual registers if the result is a valid
2912     address.  If not, we do it later.  That will handle most uses of virtual
2913     regs on many machines.  */
2914  instantiate_decls (fndecl, 1);
2915
2916  /* Initialize recognition, indicating that volatile is OK.  */
2917  init_recog ();
2918
2919  /* Scan through all the insns, instantiating every virtual register still
2920     present.  */
2921  for (insn = insns; insn; insn = NEXT_INSN (insn))
2922    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2923	|| GET_CODE (insn) == CALL_INSN)
2924      {
2925	instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2926	instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2927      }
2928
2929  /* Instantiate the stack slots for the parm registers, for later use in
2930     addressof elimination.  */
2931  for (i = 0; i < max_parm_reg; ++i)
2932    if (parm_reg_stack_loc[i])
2933      instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
2934
2935  /* Now instantiate the remaining register equivalences for debugging info.
2936     These will not be valid addresses.  */
2937  instantiate_decls (fndecl, 0);
2938
2939  /* Indicate that, from now on, assign_stack_local should use
2940     frame_pointer_rtx.  */
2941  virtuals_instantiated = 1;
2942}
2943
2944/* Scan all decls in FNDECL (both variables and parameters) and instantiate
2945   all virtual registers in their DECL_RTL's.
2946
2947   If VALID_ONLY, do this only if the resulting address is still valid.
2948   Otherwise, always do it.  */
2949
2950static void
2951instantiate_decls (fndecl, valid_only)
2952     tree fndecl;
2953     int valid_only;
2954{
2955  tree decl;
2956
2957  if (DECL_SAVED_INSNS (fndecl))
2958    /* When compiling an inline function, the obstack used for
2959       rtl allocation is the maybepermanent_obstack.  Calling
2960       `resume_temporary_allocation' switches us back to that
2961       obstack while we process this function's parameters.  */
2962    resume_temporary_allocation ();
2963
2964  /* Process all parameters of the function.  */
2965  for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2966    {
2967      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
2968
2969      instantiate_decl (DECL_RTL (decl), size, valid_only);
2970
2971      /* If the parameter was promoted, then the incoming RTL mode may be
2972	 larger than the declared type size.  We must use the larger of
2973	 the two sizes.  */
2974      size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
2975      instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
2976    }
2977
2978  /* Now process all variables defined in the function or its subblocks.  */
2979  instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2980
2981  if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2982    {
2983      /* Save all rtl allocated for this function by raising the
2984	 high-water mark on the maybepermanent_obstack.  */
2985      preserve_data ();
2986      /* All further rtl allocation is now done in the current_obstack.  */
2987      rtl_in_current_obstack ();
2988    }
2989}
2990
2991/* Subroutine of instantiate_decls: Process all decls in the given
2992   BLOCK node and all its subblocks.  */
2993
2994static void
2995instantiate_decls_1 (let, valid_only)
2996     tree let;
2997     int valid_only;
2998{
2999  tree t;
3000
3001  for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3002    instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3003		      valid_only);
3004
3005  /* Process all subblocks.  */
3006  for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3007    instantiate_decls_1 (t, valid_only);
3008}
3009
3010/* Subroutine of the preceding procedures: Given RTL representing a
3011   decl and the size of the object, do any instantiation required.
3012
3013   If VALID_ONLY is non-zero, it means that the RTL should only be
3014   changed if the new address is valid.  */
3015
3016static void
3017instantiate_decl (x, size, valid_only)
3018     rtx x;
3019     int size;
3020     int valid_only;
3021{
3022  enum machine_mode mode;
3023  rtx addr;
3024
3025  /* If this is not a MEM, no need to do anything.  Similarly if the
3026     address is a constant or a register that is not a virtual register.  */
3027
3028  if (x == 0 || GET_CODE (x) != MEM)
3029    return;
3030
3031  addr = XEXP (x, 0);
3032  if (CONSTANT_P (addr)
3033      || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3034      || (GET_CODE (addr) == REG
3035	  && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3036	      || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3037    return;
3038
3039  /* If we should only do this if the address is valid, copy the address.
3040     We need to do this so we can undo any changes that might make the
3041     address invalid.  This copy is unfortunate, but probably can't be
3042     avoided.  */
3043
3044  if (valid_only)
3045    addr = copy_rtx (addr);
3046
3047  instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3048
3049  if (valid_only)
3050    {
3051      /* Now verify that the resulting address is valid for every integer or
3052	 floating-point mode up to and including SIZE bytes long.  We do this
3053	 since the object might be accessed in any mode and frame addresses
3054	 are shared.  */
3055
3056      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3057	   mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3058	   mode = GET_MODE_WIDER_MODE (mode))
3059	if (! memory_address_p (mode, addr))
3060	  return;
3061
3062      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3063	   mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3064	   mode = GET_MODE_WIDER_MODE (mode))
3065	if (! memory_address_p (mode, addr))
3066	  return;
3067    }
3068
3069  /* Put back the address now that we have updated it and we either know
3070     it is valid or we don't care whether it is valid.  */
3071
3072  XEXP (x, 0) = addr;
3073}
3074
3075/* Given a pointer to a piece of rtx and an optional pointer to the
3076   containing object, instantiate any virtual registers present in it.
3077
3078   If EXTRA_INSNS, we always do the replacement and generate
3079   any extra insns before OBJECT.  If it zero, we do nothing if replacement
3080   is not valid.
3081
3082   Return 1 if we either had nothing to do or if we were able to do the
3083   needed replacement.  Return 0 otherwise; we only return zero if
3084   EXTRA_INSNS is zero.
3085
3086   We first try some simple transformations to avoid the creation of extra
3087   pseudos.  */
3088
3089static int
3090instantiate_virtual_regs_1 (loc, object, extra_insns)
3091     rtx *loc;
3092     rtx object;
3093     int extra_insns;
3094{
3095  rtx x;
3096  RTX_CODE code;
3097  rtx new = 0;
3098  HOST_WIDE_INT offset;
3099  rtx temp;
3100  rtx seq;
3101  int i, j;
3102  char *fmt;
3103
3104  /* Re-start here to avoid recursion in common cases.  */
3105 restart:
3106
3107  x = *loc;
3108  if (x == 0)
3109    return 1;
3110
3111  code = GET_CODE (x);
3112
3113  /* Check for some special cases.  */
3114  switch (code)
3115    {
3116    case CONST_INT:
3117    case CONST_DOUBLE:
3118    case CONST:
3119    case SYMBOL_REF:
3120    case CODE_LABEL:
3121    case PC:
3122    case CC0:
3123    case ASM_INPUT:
3124    case ADDR_VEC:
3125    case ADDR_DIFF_VEC:
3126    case RETURN:
3127      return 1;
3128
3129    case SET:
3130      /* We are allowed to set the virtual registers.  This means that
3131	 the actual register should receive the source minus the
3132	 appropriate offset.  This is used, for example, in the handling
3133	 of non-local gotos.  */
3134      if (SET_DEST (x) == virtual_incoming_args_rtx)
3135	new = arg_pointer_rtx, offset = - in_arg_offset;
3136      else if (SET_DEST (x) == virtual_stack_vars_rtx)
3137	new = frame_pointer_rtx, offset = - var_offset;
3138      else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3139	new = stack_pointer_rtx, offset = - dynamic_offset;
3140      else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3141	new = stack_pointer_rtx, offset = - out_arg_offset;
3142
3143      if (new)
3144	{
3145	  /* The only valid sources here are PLUS or REG.  Just do
3146	     the simplest possible thing to handle them.  */
3147	  if (GET_CODE (SET_SRC (x)) != REG
3148	      && GET_CODE (SET_SRC (x)) != PLUS)
3149	    abort ();
3150
3151	  start_sequence ();
3152	  if (GET_CODE (SET_SRC (x)) != REG)
3153	    temp = force_operand (SET_SRC (x), NULL_RTX);
3154	  else
3155	    temp = SET_SRC (x);
3156	  temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3157	  seq = get_insns ();
3158	  end_sequence ();
3159
3160	  emit_insns_before (seq, object);
3161	  SET_DEST (x) = new;
3162
3163	  if (! validate_change (object, &SET_SRC (x), temp, 0)
3164	      || ! extra_insns)
3165	    abort ();
3166
3167	  return 1;
3168	}
3169
3170      instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3171      loc = &SET_SRC (x);
3172      goto restart;
3173
3174    case PLUS:
3175      /* Handle special case of virtual register plus constant.  */
3176      if (CONSTANT_P (XEXP (x, 1)))
3177	{
3178	  rtx old, new_offset;
3179
3180	  /* Check for (plus (plus VIRT foo) (const_int)) first.  */
3181	  if (GET_CODE (XEXP (x, 0)) == PLUS)
3182	    {
3183	      rtx inner = XEXP (XEXP (x, 0), 0);
3184
3185	      if (inner == virtual_incoming_args_rtx)
3186		new = arg_pointer_rtx, offset = in_arg_offset;
3187	      else if (inner == virtual_stack_vars_rtx)
3188		new = frame_pointer_rtx, offset = var_offset;
3189	      else if (inner == virtual_stack_dynamic_rtx)
3190		new = stack_pointer_rtx, offset = dynamic_offset;
3191	      else if (inner == virtual_outgoing_args_rtx)
3192		new = stack_pointer_rtx, offset = out_arg_offset;
3193	      else
3194		{
3195		  loc = &XEXP (x, 0);
3196		  goto restart;
3197		}
3198
3199	      instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3200					  extra_insns);
3201	      new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3202	    }
3203
3204	  else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3205	    new = arg_pointer_rtx, offset = in_arg_offset;
3206	  else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3207	    new = frame_pointer_rtx, offset = var_offset;
3208	  else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3209	    new = stack_pointer_rtx, offset = dynamic_offset;
3210	  else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3211	    new = stack_pointer_rtx, offset = out_arg_offset;
3212	  else
3213	    {
3214	      /* We know the second operand is a constant.  Unless the
3215		 first operand is a REG (which has been already checked),
3216		 it needs to be checked.  */
3217	      if (GET_CODE (XEXP (x, 0)) != REG)
3218		{
3219		  loc = &XEXP (x, 0);
3220		  goto restart;
3221		}
3222	      return 1;
3223	    }
3224
3225	  new_offset = plus_constant (XEXP (x, 1), offset);
3226
3227	  /* If the new constant is zero, try to replace the sum with just
3228	     the register.  */
3229	  if (new_offset == const0_rtx
3230	      && validate_change (object, loc, new, 0))
3231	    return 1;
3232
3233	  /* Next try to replace the register and new offset.
3234	     There are two changes to validate here and we can't assume that
3235	     in the case of old offset equals new just changing the register
3236	     will yield a valid insn.  In the interests of a little efficiency,
3237	     however, we only call validate change once (we don't queue up the
3238	     changes and then call apply_change_group).  */
3239
3240	  old = XEXP (x, 0);
3241	  if (offset == 0
3242	      ? ! validate_change (object, &XEXP (x, 0), new, 0)
3243	      : (XEXP (x, 0) = new,
3244		 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3245	    {
3246	      if (! extra_insns)
3247		{
3248		  XEXP (x, 0) = old;
3249		  return 0;
3250		}
3251
3252	      /* Otherwise copy the new constant into a register and replace
3253		 constant with that register.  */
3254	      temp = gen_reg_rtx (Pmode);
3255	      XEXP (x, 0) = new;
3256	      if (validate_change (object, &XEXP (x, 1), temp, 0))
3257		emit_insn_before (gen_move_insn (temp, new_offset), object);
3258	      else
3259		{
3260		  /* If that didn't work, replace this expression with a
3261		     register containing the sum.  */
3262
3263		  XEXP (x, 0) = old;
3264		  new = gen_rtx_PLUS (Pmode, new, new_offset);
3265
3266		  start_sequence ();
3267		  temp = force_operand (new, NULL_RTX);
3268		  seq = get_insns ();
3269		  end_sequence ();
3270
3271		  emit_insns_before (seq, object);
3272		  if (! validate_change (object, loc, temp, 0)
3273		      && ! validate_replace_rtx (x, temp, object))
3274		    abort ();
3275		}
3276	    }
3277
3278	  return 1;
3279	}
3280
3281      /* Fall through to generic two-operand expression case.  */
3282    case EXPR_LIST:
3283    case CALL:
3284    case COMPARE:
3285    case MINUS:
3286    case MULT:
3287    case DIV:      case UDIV:
3288    case MOD:      case UMOD:
3289    case AND:      case IOR:      case XOR:
3290    case ROTATERT: case ROTATE:
3291    case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3292    case NE:       case EQ:
3293    case GE:       case GT:       case GEU:    case GTU:
3294    case LE:       case LT:       case LEU:    case LTU:
3295      if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3296	instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3297      loc = &XEXP (x, 0);
3298      goto restart;
3299
3300    case MEM:
3301      /* Most cases of MEM that convert to valid addresses have already been
3302	 handled by our scan of decls.  The only special handling we
3303	 need here is to make a copy of the rtx to ensure it isn't being
3304	 shared if we have to change it to a pseudo.
3305
3306	 If the rtx is a simple reference to an address via a virtual register,
3307	 it can potentially be shared.  In such cases, first try to make it
3308	 a valid address, which can also be shared.  Otherwise, copy it and
3309	 proceed normally.
3310
3311	 First check for common cases that need no processing.  These are
3312	 usually due to instantiation already being done on a previous instance
3313	 of a shared rtx.  */
3314
3315      temp = XEXP (x, 0);
3316      if (CONSTANT_ADDRESS_P (temp)
3317#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3318	  || temp == arg_pointer_rtx
3319#endif
3320#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3321	  || temp == hard_frame_pointer_rtx
3322#endif
3323	  || temp == frame_pointer_rtx)
3324	return 1;
3325
3326      if (GET_CODE (temp) == PLUS
3327	  && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3328	  && (XEXP (temp, 0) == frame_pointer_rtx
3329#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3330	      || XEXP (temp, 0) == hard_frame_pointer_rtx
3331#endif
3332#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3333	      || XEXP (temp, 0) == arg_pointer_rtx
3334#endif
3335	      ))
3336	return 1;
3337
3338      if (temp == virtual_stack_vars_rtx
3339	  || temp == virtual_incoming_args_rtx
3340	  || (GET_CODE (temp) == PLUS
3341	      && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3342	      && (XEXP (temp, 0) == virtual_stack_vars_rtx
3343		  || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3344	{
3345	  /* This MEM may be shared.  If the substitution can be done without
3346	     the need to generate new pseudos, we want to do it in place
3347	     so all copies of the shared rtx benefit.  The call below will
3348	     only make substitutions if the resulting address is still
3349	     valid.
3350
3351	     Note that we cannot pass X as the object in the recursive call
3352	     since the insn being processed may not allow all valid
3353	     addresses.  However, if we were not passed on object, we can
3354	     only modify X without copying it if X will have a valid
3355	     address.
3356
3357	     ??? Also note that this can still lose if OBJECT is an insn that
3358	     has less restrictions on an address that some other insn.
3359	     In that case, we will modify the shared address.  This case
3360	     doesn't seem very likely, though.  One case where this could
3361	     happen is in the case of a USE or CLOBBER reference, but we
3362	     take care of that below.  */
3363
3364	  if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3365					  object ? object : x, 0))
3366	    return 1;
3367
3368	  /* Otherwise make a copy and process that copy.  We copy the entire
3369	     RTL expression since it might be a PLUS which could also be
3370	     shared.  */
3371	  *loc = x = copy_rtx (x);
3372	}
3373
3374      /* Fall through to generic unary operation case.  */
3375    case SUBREG:
3376    case STRICT_LOW_PART:
3377    case NEG:          case NOT:
3378    case PRE_DEC:      case PRE_INC:      case POST_DEC:    case POST_INC:
3379    case SIGN_EXTEND:  case ZERO_EXTEND:
3380    case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3381    case FLOAT:        case FIX:
3382    case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3383    case ABS:
3384    case SQRT:
3385    case FFS:
3386      /* These case either have just one operand or we know that we need not
3387	 check the rest of the operands.  */
3388      loc = &XEXP (x, 0);
3389      goto restart;
3390
3391    case USE:
3392    case CLOBBER:
3393      /* If the operand is a MEM, see if the change is a valid MEM.  If not,
3394	 go ahead and make the invalid one, but do it to a copy.  For a REG,
3395	 just make the recursive call, since there's no chance of a problem. */
3396
3397      if ((GET_CODE (XEXP (x, 0)) == MEM
3398	   && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3399					  0))
3400	  || (GET_CODE (XEXP (x, 0)) == REG
3401	      && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3402	return 1;
3403
3404      XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3405      loc = &XEXP (x, 0);
3406      goto restart;
3407
3408    case REG:
3409      /* Try to replace with a PLUS.  If that doesn't work, compute the sum
3410	 in front of this insn and substitute the temporary.  */
3411      if (x == virtual_incoming_args_rtx)
3412	new = arg_pointer_rtx, offset = in_arg_offset;
3413      else if (x == virtual_stack_vars_rtx)
3414	new = frame_pointer_rtx, offset = var_offset;
3415      else if (x == virtual_stack_dynamic_rtx)
3416	new = stack_pointer_rtx, offset = dynamic_offset;
3417      else if (x == virtual_outgoing_args_rtx)
3418	new = stack_pointer_rtx, offset = out_arg_offset;
3419
3420      if (new)
3421	{
3422	  temp = plus_constant (new, offset);
3423	  if (!validate_change (object, loc, temp, 0))
3424	    {
3425	      if (! extra_insns)
3426		return 0;
3427
3428	      start_sequence ();
3429	      temp = force_operand (temp, NULL_RTX);
3430	      seq = get_insns ();
3431	      end_sequence ();
3432
3433	      emit_insns_before (seq, object);
3434	      if (! validate_change (object, loc, temp, 0)
3435		  && ! validate_replace_rtx (x, temp, object))
3436		abort ();
3437	    }
3438	}
3439
3440      return 1;
3441
3442    case ADDRESSOF:
3443      if (GET_CODE (XEXP (x, 0)) == REG)
3444	return 1;
3445
3446      else if (GET_CODE (XEXP (x, 0)) == MEM)
3447	{
3448	  /* If we have a (addressof (mem ..)), do any instantiation inside
3449	     since we know we'll be making the inside valid when we finally
3450	     remove the ADDRESSOF.  */
3451	  instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3452	  return 1;
3453	}
3454      break;
3455
3456    default:
3457      break;
3458    }
3459
3460  /* Scan all subexpressions.  */
3461  fmt = GET_RTX_FORMAT (code);
3462  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3463    if (*fmt == 'e')
3464      {
3465	if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3466	  return 0;
3467      }
3468    else if (*fmt == 'E')
3469      for (j = 0; j < XVECLEN (x, i); j++)
3470	if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3471					  extra_insns))
3472	  return 0;
3473
3474  return 1;
3475}
3476
3477/* Optimization: assuming this function does not receive nonlocal gotos,
3478   delete the handlers for such, as well as the insns to establish
3479   and disestablish them.  */
3480
3481static void
3482delete_handlers ()
3483{
3484  rtx insn;
3485  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3486    {
3487      /* Delete the handler by turning off the flag that would
3488	 prevent jump_optimize from deleting it.
3489	 Also permit deletion of the nonlocal labels themselves
3490	 if nothing local refers to them.  */
3491      if (GET_CODE (insn) == CODE_LABEL)
3492	{
3493	  tree t, last_t;
3494
3495	  LABEL_PRESERVE_P (insn) = 0;
3496
3497	  /* Remove it from the nonlocal_label list, to avoid confusing
3498	     flow.  */
3499	  for (t = nonlocal_labels, last_t = 0; t;
3500	       last_t = t, t = TREE_CHAIN (t))
3501	    if (DECL_RTL (TREE_VALUE (t)) == insn)
3502	      break;
3503	  if (t)
3504	    {
3505	      if (! last_t)
3506		nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3507	      else
3508		TREE_CHAIN (last_t) = TREE_CHAIN (t);
3509	    }
3510	}
3511      if (GET_CODE (insn) == INSN
3512	  && ((nonlocal_goto_handler_slot != 0
3513	       && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3514	      || (nonlocal_goto_stack_level != 0
3515		  && reg_mentioned_p (nonlocal_goto_stack_level,
3516				      PATTERN (insn)))))
3517	delete_insn (insn);
3518    }
3519}
3520
3521/* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3522   of the current function.  */
3523
3524rtx
3525nonlocal_label_rtx_list ()
3526{
3527  tree t;
3528  rtx x = 0;
3529
3530  for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3531    x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
3532
3533  return x;
3534}
3535
3536/* Output a USE for any register use in RTL.
3537   This is used with -noreg to mark the extent of lifespan
3538   of any registers used in a user-visible variable's DECL_RTL.  */
3539
3540void
3541use_variable (rtl)
3542     rtx rtl;
3543{
3544  if (GET_CODE (rtl) == REG)
3545    /* This is a register variable.  */
3546    emit_insn (gen_rtx_USE (VOIDmode, rtl));
3547  else if (GET_CODE (rtl) == MEM
3548	   && GET_CODE (XEXP (rtl, 0)) == REG
3549	   && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3550	       || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3551	   && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3552    /* This is a variable-sized structure.  */
3553    emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3554}
3555
3556/* Like use_variable except that it outputs the USEs after INSN
3557   instead of at the end of the insn-chain.  */
3558
3559void
3560use_variable_after (rtl, insn)
3561     rtx rtl, insn;
3562{
3563  if (GET_CODE (rtl) == REG)
3564    /* This is a register variable.  */
3565    emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3566  else if (GET_CODE (rtl) == MEM
3567	   && GET_CODE (XEXP (rtl, 0)) == REG
3568	   && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3569	       || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3570	   && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3571    /* This is a variable-sized structure.  */
3572    emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3573}
3574
3575int
3576max_parm_reg_num ()
3577{
3578  return max_parm_reg;
3579}
3580
3581/* Return the first insn following those generated by `assign_parms'.  */
3582
3583rtx
3584get_first_nonparm_insn ()
3585{
3586  if (last_parm_insn)
3587    return NEXT_INSN (last_parm_insn);
3588  return get_insns ();
3589}
3590
3591/* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3592   Crash if there is none.  */
3593
3594rtx
3595get_first_block_beg ()
3596{
3597  register rtx searcher;
3598  register rtx insn = get_first_nonparm_insn ();
3599
3600  for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3601    if (GET_CODE (searcher) == NOTE
3602	&& NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3603      return searcher;
3604
3605  abort ();	/* Invalid call to this function.  (See comments above.)  */
3606  return NULL_RTX;
3607}
3608
3609/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3610   This means a type for which function calls must pass an address to the
3611   function or get an address back from the function.
3612   EXP may be a type node or an expression (whose type is tested).  */
3613
3614int
3615aggregate_value_p (exp)
3616     tree exp;
3617{
3618  int i, regno, nregs;
3619  rtx reg;
3620  tree type;
3621  if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3622    type = exp;
3623  else
3624    type = TREE_TYPE (exp);
3625
3626  if (RETURN_IN_MEMORY (type))
3627    return 1;
3628  /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3629     and thus can't be returned in registers.  */
3630  if (TREE_ADDRESSABLE (type))
3631    return 1;
3632  if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3633    return 1;
3634  /* Make sure we have suitable call-clobbered regs to return
3635     the value in; if not, we must return it in memory.  */
3636  reg = hard_function_value (type, 0);
3637
3638  /* If we have something other than a REG (e.g. a PARALLEL), then assume
3639     it is OK.  */
3640  if (GET_CODE (reg) != REG)
3641    return 0;
3642
3643  regno = REGNO (reg);
3644  nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3645  for (i = 0; i < nregs; i++)
3646    if (! call_used_regs[regno + i])
3647      return 1;
3648  return 0;
3649}
3650
3651/* Assign RTL expressions to the function's parameters.
3652   This may involve copying them into registers and using
3653   those registers as the RTL for them.
3654
3655   If SECOND_TIME is non-zero it means that this function is being
3656   called a second time.  This is done by integrate.c when a function's
3657   compilation is deferred.  We need to come back here in case the
3658   FUNCTION_ARG macro computes items needed for the rest of the compilation
3659   (such as changing which registers are fixed or caller-saved).  But suppress
3660   writing any insns or setting DECL_RTL of anything in this case.  */
3661
3662void
3663assign_parms (fndecl, second_time)
3664     tree fndecl;
3665     int second_time;
3666{
3667  register tree parm;
3668  register rtx entry_parm = 0;
3669  register rtx stack_parm = 0;
3670  CUMULATIVE_ARGS args_so_far;
3671  enum machine_mode promoted_mode, passed_mode;
3672  enum machine_mode nominal_mode, promoted_nominal_mode;
3673  int unsignedp;
3674  /* Total space needed so far for args on the stack,
3675     given as a constant and a tree-expression.  */
3676  struct args_size stack_args_size;
3677  tree fntype = TREE_TYPE (fndecl);
3678  tree fnargs = DECL_ARGUMENTS (fndecl);
3679  /* This is used for the arg pointer when referring to stack args.  */
3680  rtx internal_arg_pointer;
3681  /* This is a dummy PARM_DECL that we used for the function result if
3682     the function returns a structure.  */
3683  tree function_result_decl = 0;
3684  int varargs_setup = 0;
3685  rtx conversion_insns = 0;
3686
3687  /* Nonzero if the last arg is named `__builtin_va_alist',
3688     which is used on some machines for old-fashioned non-ANSI varargs.h;
3689     this should be stuck onto the stack as if it had arrived there.  */
3690  int hide_last_arg
3691    = (current_function_varargs
3692       && fnargs
3693       && (parm = tree_last (fnargs)) != 0
3694       && DECL_NAME (parm)
3695       && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3696		     "__builtin_va_alist")));
3697
3698  /* Nonzero if function takes extra anonymous args.
3699     This means the last named arg must be on the stack
3700     right before the anonymous ones.  */
3701  int stdarg
3702    = (TYPE_ARG_TYPES (fntype) != 0
3703       && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3704	   != void_type_node));
3705
3706  current_function_stdarg = stdarg;
3707
3708  /* If the reg that the virtual arg pointer will be translated into is
3709     not a fixed reg or is the stack pointer, make a copy of the virtual
3710     arg pointer, and address parms via the copy.  The frame pointer is
3711     considered fixed even though it is not marked as such.
3712
3713     The second time through, simply use ap to avoid generating rtx.  */
3714
3715  if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3716       || ! (fixed_regs[ARG_POINTER_REGNUM]
3717	     || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3718      && ! second_time)
3719    internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3720  else
3721    internal_arg_pointer = virtual_incoming_args_rtx;
3722  current_function_internal_arg_pointer = internal_arg_pointer;
3723
3724  stack_args_size.constant = 0;
3725  stack_args_size.var = 0;
3726
3727  /* If struct value address is treated as the first argument, make it so.  */
3728  if (aggregate_value_p (DECL_RESULT (fndecl))
3729      && ! current_function_returns_pcc_struct
3730      && struct_value_incoming_rtx == 0)
3731    {
3732      tree type = build_pointer_type (TREE_TYPE (fntype));
3733
3734      function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3735
3736      DECL_ARG_TYPE (function_result_decl) = type;
3737      TREE_CHAIN (function_result_decl) = fnargs;
3738      fnargs = function_result_decl;
3739    }
3740
3741  max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3742  parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3743  bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
3744
3745#ifdef INIT_CUMULATIVE_INCOMING_ARGS
3746  INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3747#else
3748  INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3749#endif
3750
3751  /* We haven't yet found an argument that we must push and pretend the
3752     caller did.  */
3753  current_function_pretend_args_size = 0;
3754
3755  for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3756    {
3757      int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3758      struct args_size stack_offset;
3759      struct args_size arg_size;
3760      int passed_pointer = 0;
3761      int did_conversion = 0;
3762      tree passed_type = DECL_ARG_TYPE (parm);
3763      tree nominal_type = TREE_TYPE (parm);
3764
3765      /* Set LAST_NAMED if this is last named arg before some
3766	 anonymous args.  */
3767      int last_named = ((TREE_CHAIN (parm) == 0
3768			 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3769			&& (stdarg || current_function_varargs));
3770      /* Set NAMED_ARG if this arg should be treated as a named arg.  For
3771	 most machines, if this is a varargs/stdarg function, then we treat
3772	 the last named arg as if it were anonymous too.  */
3773      int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
3774
3775      if (TREE_TYPE (parm) == error_mark_node
3776	  /* This can happen after weird syntax errors
3777	     or if an enum type is defined among the parms.  */
3778	  || TREE_CODE (parm) != PARM_DECL
3779	  || passed_type == NULL)
3780	{
3781	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
3782	    = gen_rtx_MEM (BLKmode, const0_rtx);
3783	  TREE_USED (parm) = 1;
3784	  continue;
3785	}
3786
3787      /* For varargs.h function, save info about regs and stack space
3788	 used by the individual args, not including the va_alist arg.  */
3789      if (hide_last_arg && last_named)
3790	current_function_args_info = args_so_far;
3791
3792      /* Find mode of arg as it is passed, and mode of arg
3793	 as it should be during execution of this function.  */
3794      passed_mode = TYPE_MODE (passed_type);
3795      nominal_mode = TYPE_MODE (nominal_type);
3796
3797      /* If the parm's mode is VOID, its value doesn't matter,
3798	 and avoid the usual things like emit_move_insn that could crash.  */
3799      if (nominal_mode == VOIDmode)
3800	{
3801	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3802	  continue;
3803	}
3804
3805      /* If the parm is to be passed as a transparent union, use the
3806	 type of the first field for the tests below.  We have already
3807	 verified that the modes are the same.  */
3808      if (DECL_TRANSPARENT_UNION (parm)
3809	  || TYPE_TRANSPARENT_UNION (passed_type))
3810	passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3811
3812      /* See if this arg was passed by invisible reference.  It is if
3813	 it is an object whose size depends on the contents of the
3814	 object itself or if the machine requires these objects be passed
3815	 that way.  */
3816
3817      if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3818	   && contains_placeholder_p (TYPE_SIZE (passed_type)))
3819	  || TREE_ADDRESSABLE (passed_type)
3820#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3821	  || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3822					      passed_type, named_arg)
3823#endif
3824	  )
3825	{
3826	  passed_type = nominal_type = build_pointer_type (passed_type);
3827	  passed_pointer = 1;
3828	  passed_mode = nominal_mode = Pmode;
3829	}
3830
3831      promoted_mode = passed_mode;
3832
3833#ifdef PROMOTE_FUNCTION_ARGS
3834      /* Compute the mode in which the arg is actually extended to.  */
3835      unsignedp = TREE_UNSIGNED (passed_type);
3836      promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3837#endif
3838
3839      /* Let machine desc say which reg (if any) the parm arrives in.
3840	 0 means it arrives on the stack.  */
3841#ifdef FUNCTION_INCOMING_ARG
3842      entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3843					  passed_type, named_arg);
3844#else
3845      entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3846				 passed_type, named_arg);
3847#endif
3848
3849      if (entry_parm == 0)
3850	promoted_mode = passed_mode;
3851
3852#ifdef SETUP_INCOMING_VARARGS
3853      /* If this is the last named parameter, do any required setup for
3854	 varargs or stdargs.  We need to know about the case of this being an
3855	 addressable type, in which case we skip the registers it
3856	 would have arrived in.
3857
3858	 For stdargs, LAST_NAMED will be set for two parameters, the one that
3859	 is actually the last named, and the dummy parameter.  We only
3860	 want to do this action once.
3861
3862	 Also, indicate when RTL generation is to be suppressed.  */
3863      if (last_named && !varargs_setup)
3864	{
3865	  SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3866				  current_function_pretend_args_size,
3867				  second_time);
3868	  varargs_setup = 1;
3869	}
3870#endif
3871
3872      /* Determine parm's home in the stack,
3873	 in case it arrives in the stack or we should pretend it did.
3874
3875	 Compute the stack position and rtx where the argument arrives
3876	 and its size.
3877
3878	 There is one complexity here:  If this was a parameter that would
3879	 have been passed in registers, but wasn't only because it is
3880	 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3881	 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3882	 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3883	 0 as it was the previous time.  */
3884
3885      locate_and_pad_parm (promoted_mode, passed_type,
3886#ifdef STACK_PARMS_IN_REG_PARM_AREA
3887			   1,
3888#else
3889#ifdef FUNCTION_INCOMING_ARG
3890			   FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3891						  passed_type,
3892						  (named_arg
3893						   || varargs_setup)) != 0,
3894#else
3895			   FUNCTION_ARG (args_so_far, promoted_mode,
3896					 passed_type,
3897					 named_arg || varargs_setup) != 0,
3898#endif
3899#endif
3900			   fndecl, &stack_args_size, &stack_offset, &arg_size);
3901
3902      if (! second_time)
3903	{
3904	  rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3905
3906	  if (offset_rtx == const0_rtx)
3907	    stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
3908	  else
3909	    stack_parm = gen_rtx_MEM (promoted_mode,
3910				      gen_rtx_PLUS (Pmode,
3911						    internal_arg_pointer,
3912						    offset_rtx));
3913
3914	  /* If this is a memory ref that contains aggregate components,
3915	     mark it as such for cse and loop optimize.  Likewise if it
3916	     is readonly.  */
3917	  MEM_IN_STRUCT_P (stack_parm) = aggregate;
3918	  RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
3919	  MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
3920	}
3921
3922      /* If this parameter was passed both in registers and in the stack,
3923	 use the copy on the stack.  */
3924      if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3925	entry_parm = 0;
3926
3927#ifdef FUNCTION_ARG_PARTIAL_NREGS
3928      /* If this parm was passed part in regs and part in memory,
3929	 pretend it arrived entirely in memory
3930	 by pushing the register-part onto the stack.
3931
3932	 In the special case of a DImode or DFmode that is split,
3933	 we could put it together in a pseudoreg directly,
3934	 but for now that's not worth bothering with.  */
3935
3936      if (entry_parm)
3937	{
3938	  int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3939						  passed_type, named_arg);
3940
3941	  if (nregs > 0)
3942	    {
3943	      current_function_pretend_args_size
3944		= (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3945		   / (PARM_BOUNDARY / BITS_PER_UNIT)
3946		   * (PARM_BOUNDARY / BITS_PER_UNIT));
3947
3948	      if (! second_time)
3949		{
3950		  /* Handle calls that pass values in multiple non-contiguous
3951		     locations.  The Irix 6 ABI has examples of this.  */
3952		  if (GET_CODE (entry_parm) == PARALLEL)
3953		    emit_group_store (validize_mem (stack_parm), entry_parm,
3954				      int_size_in_bytes (TREE_TYPE (parm)),
3955				      (TYPE_ALIGN (TREE_TYPE (parm))
3956				       / BITS_PER_UNIT));
3957		  else
3958		    move_block_from_reg (REGNO (entry_parm),
3959					 validize_mem (stack_parm), nregs,
3960					 int_size_in_bytes (TREE_TYPE (parm)));
3961		}
3962	      entry_parm = stack_parm;
3963	    }
3964	}
3965#endif
3966
3967      /* If we didn't decide this parm came in a register,
3968	 by default it came on the stack.  */
3969      if (entry_parm == 0)
3970	entry_parm = stack_parm;
3971
3972      /* Record permanently how this parm was passed.  */
3973      if (! second_time)
3974	DECL_INCOMING_RTL (parm) = entry_parm;
3975
3976      /* If there is actually space on the stack for this parm,
3977	 count it in stack_args_size; otherwise set stack_parm to 0
3978	 to indicate there is no preallocated stack slot for the parm.  */
3979
3980      if (entry_parm == stack_parm
3981#if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3982	  /* On some machines, even if a parm value arrives in a register
3983	     there is still an (uninitialized) stack slot allocated for it.
3984
3985	     ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3986	     whether this parameter already has a stack slot allocated,
3987	     because an arg block exists only if current_function_args_size
3988	     is larger than some threshold, and we haven't calculated that
3989	     yet.  So, for now, we just assume that stack slots never exist
3990	     in this case.  */
3991	  || REG_PARM_STACK_SPACE (fndecl) > 0
3992#endif
3993	  )
3994	{
3995	  stack_args_size.constant += arg_size.constant;
3996	  if (arg_size.var)
3997	    ADD_PARM_SIZE (stack_args_size, arg_size.var);
3998	}
3999      else
4000	/* No stack slot was pushed for this parm.  */
4001	stack_parm = 0;
4002
4003      /* Update info on where next arg arrives in registers.  */
4004
4005      FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4006			    passed_type, named_arg);
4007
4008      /* If this is our second time through, we are done with this parm.  */
4009      if (second_time)
4010	continue;
4011
4012      /* If we can't trust the parm stack slot to be aligned enough
4013	 for its ultimate type, don't use that slot after entry.
4014	 We'll make another stack slot, if we need one.  */
4015      {
4016	int thisparm_boundary
4017	  = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4018
4019	if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4020	  stack_parm = 0;
4021      }
4022
4023      /* If parm was passed in memory, and we need to convert it on entry,
4024	 don't store it back in that same slot.  */
4025      if (entry_parm != 0
4026	  && nominal_mode != BLKmode && nominal_mode != passed_mode)
4027	stack_parm = 0;
4028
4029#if 0
4030      /* Now adjust STACK_PARM to the mode and precise location
4031	 where this parameter should live during execution,
4032	 if we discover that it must live in the stack during execution.
4033	 To make debuggers happier on big-endian machines, we store
4034	 the value in the last bytes of the space available.  */
4035
4036      if (nominal_mode != BLKmode && nominal_mode != passed_mode
4037	  && stack_parm != 0)
4038	{
4039	  rtx offset_rtx;
4040
4041	  if (BYTES_BIG_ENDIAN
4042	      && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4043	    stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4044				      - GET_MODE_SIZE (nominal_mode));
4045
4046	  offset_rtx = ARGS_SIZE_RTX (stack_offset);
4047	  if (offset_rtx == const0_rtx)
4048	    stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4049	  else
4050	    stack_parm = gen_rtx_MEM (nominal_mode,
4051				      gen_rtx_PLUS (Pmode,
4052						    internal_arg_pointer,
4053						    offset_rtx));
4054
4055	  /* If this is a memory ref that contains aggregate components,
4056	     mark it as such for cse and loop optimize.  */
4057	  MEM_IN_STRUCT_P (stack_parm) = aggregate;
4058	}
4059#endif /* 0 */
4060
4061#ifdef STACK_REGS
4062      /* We need this "use" info, because the gcc-register->stack-register
4063	 converter in reg-stack.c needs to know which registers are active
4064	 at the start of the function call.  The actual parameter loading
4065	 instructions are not always available then anymore, since they might
4066	 have been optimised away.  */
4067
4068      if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4069	  emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4070#endif
4071
4072      /* ENTRY_PARM is an RTX for the parameter as it arrives,
4073	 in the mode in which it arrives.
4074	 STACK_PARM is an RTX for a stack slot where the parameter can live
4075	 during the function (in case we want to put it there).
4076	 STACK_PARM is 0 if no stack slot was pushed for it.
4077
4078	 Now output code if necessary to convert ENTRY_PARM to
4079	 the type in which this function declares it,
4080	 and store that result in an appropriate place,
4081	 which may be a pseudo reg, may be STACK_PARM,
4082	 or may be a local stack slot if STACK_PARM is 0.
4083
4084	 Set DECL_RTL to that place.  */
4085
4086      if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4087	{
4088	  /* If a BLKmode arrives in registers, copy it to a stack slot.
4089	     Handle calls that pass values in multiple non-contiguous
4090	     locations.  The Irix 6 ABI has examples of this.  */
4091	  if (GET_CODE (entry_parm) == REG
4092	      || GET_CODE (entry_parm) == PARALLEL)
4093	    {
4094	      int size_stored
4095		= CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4096			      UNITS_PER_WORD);
4097
4098	      /* Note that we will be storing an integral number of words.
4099		 So we have to be careful to ensure that we allocate an
4100		 integral number of words.  We do this below in the
4101		 assign_stack_local if space was not allocated in the argument
4102		 list.  If it was, this will not work if PARM_BOUNDARY is not
4103		 a multiple of BITS_PER_WORD.  It isn't clear how to fix this
4104		 if it becomes a problem.  */
4105
4106	      if (stack_parm == 0)
4107		{
4108		  stack_parm
4109		    = assign_stack_local (GET_MODE (entry_parm),
4110					  size_stored, 0);
4111
4112		  /* If this is a memory ref that contains aggregate
4113		     components, mark it as such for cse and loop optimize.  */
4114		  MEM_IN_STRUCT_P (stack_parm) = aggregate;
4115		}
4116
4117	      else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4118		abort ();
4119
4120	      if (TREE_READONLY (parm))
4121		RTX_UNCHANGING_P (stack_parm) = 1;
4122
4123	      /* Handle calls that pass values in multiple non-contiguous
4124		 locations.  The Irix 6 ABI has examples of this.  */
4125	      if (GET_CODE (entry_parm) == PARALLEL)
4126		emit_group_store (validize_mem (stack_parm), entry_parm,
4127				  int_size_in_bytes (TREE_TYPE (parm)),
4128				  (TYPE_ALIGN (TREE_TYPE (parm))
4129				   / BITS_PER_UNIT));
4130	      else
4131		move_block_from_reg (REGNO (entry_parm),
4132				     validize_mem (stack_parm),
4133				     size_stored / UNITS_PER_WORD,
4134				     int_size_in_bytes (TREE_TYPE (parm)));
4135	    }
4136	  DECL_RTL (parm) = stack_parm;
4137	}
4138      else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4139		   && ! DECL_INLINE (fndecl))
4140		  /* layout_decl may set this.  */
4141		  || TREE_ADDRESSABLE (parm)
4142		  || TREE_SIDE_EFFECTS (parm)
4143		  /* If -ffloat-store specified, don't put explicit
4144		     float variables into registers.  */
4145		  || (flag_float_store
4146		      && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4147	       /* Always assign pseudo to structure return or item passed
4148		  by invisible reference.  */
4149	       || passed_pointer || parm == function_result_decl)
4150	{
4151	  /* Store the parm in a pseudoregister during the function, but we
4152	     may need to do it in a wider mode.  */
4153
4154	  register rtx parmreg;
4155	  int regno, regnoi = 0, regnor = 0;
4156
4157	  unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4158
4159	  promoted_nominal_mode
4160	    = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4161
4162	  parmreg = gen_reg_rtx (promoted_nominal_mode);
4163	  mark_user_reg (parmreg);
4164
4165	  /* If this was an item that we received a pointer to, set DECL_RTL
4166	     appropriately.  */
4167	  if (passed_pointer)
4168	    {
4169	      DECL_RTL (parm)
4170		= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4171	      MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
4172	    }
4173	  else
4174	    DECL_RTL (parm) = parmreg;
4175
4176	  /* Copy the value into the register.  */
4177	  if (nominal_mode != passed_mode
4178	      || promoted_nominal_mode != promoted_mode)
4179	    {
4180	      /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4181		 mode, by the caller.  We now have to convert it to
4182		 NOMINAL_MODE, if different.  However, PARMREG may be in
4183		 a different mode than NOMINAL_MODE if it is being stored
4184		 promoted.
4185
4186		 If ENTRY_PARM is a hard register, it might be in a register
4187		 not valid for operating in its mode (e.g., an odd-numbered
4188		 register for a DFmode).  In that case, moves are the only
4189		 thing valid, so we can't do a convert from there.  This
4190		 occurs when the calling sequence allow such misaligned
4191		 usages.
4192
4193		 In addition, the conversion may involve a call, which could
4194		 clobber parameters which haven't been copied to pseudo
4195		 registers yet.  Therefore, we must first copy the parm to
4196		 a pseudo reg here, and save the conversion until after all
4197		 parameters have been moved.  */
4198
4199	      rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4200
4201	      emit_move_insn (tempreg, validize_mem (entry_parm));
4202
4203	      push_to_sequence (conversion_insns);
4204	      tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4205
4206	      expand_assignment (parm,
4207				 make_tree (nominal_type, tempreg), 0, 0);
4208	      conversion_insns = get_insns ();
4209	      did_conversion = 1;
4210	      end_sequence ();
4211	    }
4212	  else
4213	    emit_move_insn (parmreg, validize_mem (entry_parm));
4214
4215	  /* If we were passed a pointer but the actual value
4216	     can safely live in a register, put it in one.  */
4217	  if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4218	      && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4219		     && ! DECL_INLINE (fndecl))
4220		    /* layout_decl may set this.  */
4221		    || TREE_ADDRESSABLE (parm)
4222		    || TREE_SIDE_EFFECTS (parm)
4223		    /* If -ffloat-store specified, don't put explicit
4224		       float variables into registers.  */
4225		    || (flag_float_store
4226			&& TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4227	    {
4228	      /* We can't use nominal_mode, because it will have been set to
4229		 Pmode above.  We must use the actual mode of the parm.  */
4230	      parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4231	      mark_user_reg (parmreg);
4232	      emit_move_insn (parmreg, DECL_RTL (parm));
4233	      DECL_RTL (parm) = parmreg;
4234	      /* STACK_PARM is the pointer, not the parm, and PARMREG is
4235		 now the parm.  */
4236	      stack_parm = 0;
4237	    }
4238#ifdef FUNCTION_ARG_CALLEE_COPIES
4239	  /* If we are passed an arg by reference and it is our responsibility
4240	     to make a copy, do it now.
4241	     PASSED_TYPE and PASSED mode now refer to the pointer, not the
4242	     original argument, so we must recreate them in the call to
4243	     FUNCTION_ARG_CALLEE_COPIES.  */
4244	  /* ??? Later add code to handle the case that if the argument isn't
4245	     modified, don't do the copy.  */
4246
4247	  else if (passed_pointer
4248		   && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4249						  TYPE_MODE (DECL_ARG_TYPE (parm)),
4250						  DECL_ARG_TYPE (parm),
4251						  named_arg)
4252		   && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4253	    {
4254	      rtx copy;
4255	      tree type = DECL_ARG_TYPE (parm);
4256
4257	      /* This sequence may involve a library call perhaps clobbering
4258		 registers that haven't been copied to pseudos yet.  */
4259
4260	      push_to_sequence (conversion_insns);
4261
4262	      if (TYPE_SIZE (type) == 0
4263		  || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4264		/* This is a variable sized object.  */
4265		copy = gen_rtx_MEM (BLKmode,
4266				    allocate_dynamic_stack_space
4267				    (expr_size (parm), NULL_RTX,
4268				     TYPE_ALIGN (type)));
4269	      else
4270		copy = assign_stack_temp (TYPE_MODE (type),
4271					  int_size_in_bytes (type), 1);
4272	      MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
4273	      RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4274
4275	      store_expr (parm, copy, 0);
4276	      emit_move_insn (parmreg, XEXP (copy, 0));
4277	      if (flag_check_memory_usage)
4278		emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4279				   XEXP (copy, 0), ptr_mode,
4280				   GEN_INT (int_size_in_bytes (type)),
4281				   TYPE_MODE (sizetype),
4282				   GEN_INT (MEMORY_USE_RW),
4283				   TYPE_MODE (integer_type_node));
4284	      conversion_insns = get_insns ();
4285	      did_conversion = 1;
4286	      end_sequence ();
4287	    }
4288#endif /* FUNCTION_ARG_CALLEE_COPIES */
4289
4290	  /* In any case, record the parm's desired stack location
4291	     in case we later discover it must live in the stack.
4292
4293	     If it is a COMPLEX value, store the stack location for both
4294	     halves.  */
4295
4296	  if (GET_CODE (parmreg) == CONCAT)
4297	    regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4298	  else
4299	    regno = REGNO (parmreg);
4300
4301	  if (regno >= max_parm_reg)
4302	    {
4303	      rtx *new;
4304	      int old_max_parm_reg = max_parm_reg;
4305
4306	      /* It's slow to expand this one register at a time,
4307		 but it's also rare and we need max_parm_reg to be
4308		 precisely correct.  */
4309	      max_parm_reg = regno + 1;
4310	      new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4311	      bcopy ((char *) parm_reg_stack_loc, (char *) new,
4312		     old_max_parm_reg * sizeof (rtx));
4313	      bzero ((char *) (new + old_max_parm_reg),
4314		     (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4315	      parm_reg_stack_loc = new;
4316	    }
4317
4318	  if (GET_CODE (parmreg) == CONCAT)
4319	    {
4320	      enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4321
4322	      regnor = REGNO (gen_realpart (submode, parmreg));
4323	      regnoi = REGNO (gen_imagpart (submode, parmreg));
4324
4325	      if (stack_parm != 0)
4326		{
4327		  parm_reg_stack_loc[regnor]
4328		    = gen_realpart (submode, stack_parm);
4329		  parm_reg_stack_loc[regnoi]
4330		    = gen_imagpart (submode, stack_parm);
4331		}
4332	      else
4333		{
4334		  parm_reg_stack_loc[regnor] = 0;
4335		  parm_reg_stack_loc[regnoi] = 0;
4336		}
4337	    }
4338	  else
4339	    parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4340
4341	  /* Mark the register as eliminable if we did no conversion
4342	     and it was copied from memory at a fixed offset,
4343	     and the arg pointer was not copied to a pseudo-reg.
4344	     If the arg pointer is a pseudo reg or the offset formed
4345	     an invalid address, such memory-equivalences
4346	     as we make here would screw up life analysis for it.  */
4347	  if (nominal_mode == passed_mode
4348	      && ! did_conversion
4349	      && stack_parm != 0
4350	      && GET_CODE (stack_parm) == MEM
4351	      && stack_offset.var == 0
4352	      && reg_mentioned_p (virtual_incoming_args_rtx,
4353				  XEXP (stack_parm, 0)))
4354	    {
4355	      rtx linsn = get_last_insn ();
4356	      rtx sinsn, set;
4357
4358	      /* Mark complex types separately.  */
4359	      if (GET_CODE (parmreg) == CONCAT)
4360		/* Scan backwards for the set of the real and
4361		   imaginary parts.  */
4362		for (sinsn = linsn; sinsn != 0;
4363		     sinsn = prev_nonnote_insn (sinsn))
4364		  {
4365		    set = single_set (sinsn);
4366		    if (set != 0
4367			&& SET_DEST (set) == regno_reg_rtx [regnoi])
4368		      REG_NOTES (sinsn)
4369			= gen_rtx_EXPR_LIST (REG_EQUIV,
4370					     parm_reg_stack_loc[regnoi],
4371					     REG_NOTES (sinsn));
4372		    else if (set != 0
4373			     && SET_DEST (set) == regno_reg_rtx [regnor])
4374		      REG_NOTES (sinsn)
4375			= gen_rtx_EXPR_LIST (REG_EQUIV,
4376					     parm_reg_stack_loc[regnor],
4377					     REG_NOTES (sinsn));
4378		  }
4379	      else if ((set = single_set (linsn)) != 0
4380		       && SET_DEST (set) == parmreg)
4381	        REG_NOTES (linsn)
4382		  = gen_rtx_EXPR_LIST (REG_EQUIV,
4383				       stack_parm, REG_NOTES (linsn));
4384	    }
4385
4386	  /* For pointer data type, suggest pointer register.  */
4387	  if (POINTER_TYPE_P (TREE_TYPE (parm)))
4388	    mark_reg_pointer (parmreg,
4389			      (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4390			       / BITS_PER_UNIT));
4391	}
4392      else
4393	{
4394	  /* Value must be stored in the stack slot STACK_PARM
4395	     during function execution.  */
4396
4397	  if (promoted_mode != nominal_mode)
4398	    {
4399	      /* Conversion is required.   */
4400	      rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4401
4402	      emit_move_insn (tempreg, validize_mem (entry_parm));
4403
4404	      push_to_sequence (conversion_insns);
4405	      entry_parm = convert_to_mode (nominal_mode, tempreg,
4406					    TREE_UNSIGNED (TREE_TYPE (parm)));
4407	      if (stack_parm)
4408		{
4409		  /* ??? This may need a big-endian conversion on sparc64.  */
4410		  stack_parm = change_address (stack_parm, nominal_mode,
4411					       NULL_RTX);
4412		}
4413	      conversion_insns = get_insns ();
4414	      did_conversion = 1;
4415	      end_sequence ();
4416	    }
4417
4418	  if (entry_parm != stack_parm)
4419	    {
4420	      if (stack_parm == 0)
4421		{
4422		  stack_parm
4423		    = assign_stack_local (GET_MODE (entry_parm),
4424					  GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4425		  /* If this is a memory ref that contains aggregate components,
4426		     mark it as such for cse and loop optimize.  */
4427		  MEM_IN_STRUCT_P (stack_parm) = aggregate;
4428		}
4429
4430	      if (promoted_mode != nominal_mode)
4431		{
4432		  push_to_sequence (conversion_insns);
4433		  emit_move_insn (validize_mem (stack_parm),
4434				  validize_mem (entry_parm));
4435		  conversion_insns = get_insns ();
4436		  end_sequence ();
4437		}
4438	      else
4439		emit_move_insn (validize_mem (stack_parm),
4440				validize_mem (entry_parm));
4441	    }
4442	  if (flag_check_memory_usage)
4443	    {
4444	      push_to_sequence (conversion_insns);
4445	      emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4446				 XEXP (stack_parm, 0), ptr_mode,
4447				 GEN_INT (GET_MODE_SIZE (GET_MODE
4448							 (entry_parm))),
4449				 TYPE_MODE (sizetype),
4450				 GEN_INT (MEMORY_USE_RW),
4451				 TYPE_MODE (integer_type_node));
4452
4453	      conversion_insns = get_insns ();
4454	      end_sequence ();
4455	    }
4456	  DECL_RTL (parm) = stack_parm;
4457	}
4458
4459      /* If this "parameter" was the place where we are receiving the
4460	 function's incoming structure pointer, set up the result.  */
4461      if (parm == function_result_decl)
4462	{
4463	  tree result = DECL_RESULT (fndecl);
4464	  tree restype = TREE_TYPE (result);
4465
4466	  DECL_RTL (result)
4467	    = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4468
4469	  MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4470	}
4471
4472      if (TREE_THIS_VOLATILE (parm))
4473	MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4474      if (TREE_READONLY (parm))
4475	RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4476    }
4477
4478  /* Output all parameter conversion instructions (possibly including calls)
4479     now that all parameters have been copied out of hard registers.  */
4480  emit_insns (conversion_insns);
4481
4482  last_parm_insn = get_last_insn ();
4483
4484  current_function_args_size = stack_args_size.constant;
4485
4486  /* Adjust function incoming argument size for alignment and
4487     minimum length.  */
4488
4489#ifdef REG_PARM_STACK_SPACE
4490#ifndef MAYBE_REG_PARM_STACK_SPACE
4491  current_function_args_size = MAX (current_function_args_size,
4492				    REG_PARM_STACK_SPACE (fndecl));
4493#endif
4494#endif
4495
4496#ifdef STACK_BOUNDARY
4497#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4498
4499  current_function_args_size
4500    = ((current_function_args_size + STACK_BYTES - 1)
4501       / STACK_BYTES) * STACK_BYTES;
4502#endif
4503
4504#ifdef ARGS_GROW_DOWNWARD
4505  current_function_arg_offset_rtx
4506    = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4507       : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4508				  size_int (-stack_args_size.constant)),
4509		      NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4510#else
4511  current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4512#endif
4513
4514  /* See how many bytes, if any, of its args a function should try to pop
4515     on return.  */
4516
4517  current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4518						 current_function_args_size);
4519
4520  /* For stdarg.h function, save info about
4521     regs and stack space used by the named args.  */
4522
4523  if (!hide_last_arg)
4524    current_function_args_info = args_so_far;
4525
4526  /* Set the rtx used for the function return value.  Put this in its
4527     own variable so any optimizers that need this information don't have
4528     to include tree.h.  Do this here so it gets done when an inlined
4529     function gets output.  */
4530
4531  current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4532}
4533
4534/* Indicate whether REGNO is an incoming argument to the current function
4535   that was promoted to a wider mode.  If so, return the RTX for the
4536   register (to get its mode).  PMODE and PUNSIGNEDP are set to the mode
4537   that REGNO is promoted from and whether the promotion was signed or
4538   unsigned.  */
4539
4540#ifdef PROMOTE_FUNCTION_ARGS
4541
4542rtx
4543promoted_input_arg (regno, pmode, punsignedp)
4544     int regno;
4545     enum machine_mode *pmode;
4546     int *punsignedp;
4547{
4548  tree arg;
4549
4550  for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4551       arg = TREE_CHAIN (arg))
4552    if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4553	&& REGNO (DECL_INCOMING_RTL (arg)) == regno
4554	&& TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4555      {
4556	enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4557	int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4558
4559	mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4560	if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4561	    && mode != DECL_MODE (arg))
4562	  {
4563	    *pmode = DECL_MODE (arg);
4564	    *punsignedp = unsignedp;
4565	    return DECL_INCOMING_RTL (arg);
4566	  }
4567      }
4568
4569  return 0;
4570}
4571
4572#endif
4573
4574/* Compute the size and offset from the start of the stacked arguments for a
4575   parm passed in mode PASSED_MODE and with type TYPE.
4576
4577   INITIAL_OFFSET_PTR points to the current offset into the stacked
4578   arguments.
4579
4580   The starting offset and size for this parm are returned in *OFFSET_PTR
4581   and *ARG_SIZE_PTR, respectively.
4582
4583   IN_REGS is non-zero if the argument will be passed in registers.  It will
4584   never be set if REG_PARM_STACK_SPACE is not defined.
4585
4586   FNDECL is the function in which the argument was defined.
4587
4588   There are two types of rounding that are done.  The first, controlled by
4589   FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4590   list to be aligned to the specific boundary (in bits).  This rounding
4591   affects the initial and starting offsets, but not the argument size.
4592
4593   The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4594   optionally rounds the size of the parm to PARM_BOUNDARY.  The
4595   initial offset is not affected by this rounding, while the size always
4596   is and the starting offset may be.  */
4597
4598/*  offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4599    initial_offset_ptr is positive because locate_and_pad_parm's
4600    callers pass in the total size of args so far as
4601    initial_offset_ptr. arg_size_ptr is always positive.*/
4602
4603void
4604locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4605		     initial_offset_ptr, offset_ptr, arg_size_ptr)
4606     enum machine_mode passed_mode;
4607     tree type;
4608     int in_regs;
4609     tree fndecl;
4610     struct args_size *initial_offset_ptr;
4611     struct args_size *offset_ptr;
4612     struct args_size *arg_size_ptr;
4613{
4614  tree sizetree
4615    = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4616  enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4617  int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4618
4619#ifdef REG_PARM_STACK_SPACE
4620  /* If we have found a stack parm before we reach the end of the
4621     area reserved for registers, skip that area.  */
4622  if (! in_regs)
4623    {
4624      int reg_parm_stack_space = 0;
4625
4626#ifdef MAYBE_REG_PARM_STACK_SPACE
4627      reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4628#else
4629      reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4630#endif
4631      if (reg_parm_stack_space > 0)
4632	{
4633	  if (initial_offset_ptr->var)
4634	    {
4635	      initial_offset_ptr->var
4636		= size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4637			      size_int (reg_parm_stack_space));
4638	      initial_offset_ptr->constant = 0;
4639	    }
4640	  else if (initial_offset_ptr->constant < reg_parm_stack_space)
4641	    initial_offset_ptr->constant = reg_parm_stack_space;
4642	}
4643    }
4644#endif /* REG_PARM_STACK_SPACE */
4645
4646  arg_size_ptr->var = 0;
4647  arg_size_ptr->constant = 0;
4648
4649#ifdef ARGS_GROW_DOWNWARD
4650  if (initial_offset_ptr->var)
4651    {
4652      offset_ptr->constant = 0;
4653      offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4654				    initial_offset_ptr->var);
4655    }
4656  else
4657    {
4658      offset_ptr->constant = - initial_offset_ptr->constant;
4659      offset_ptr->var = 0;
4660    }
4661  if (where_pad != none
4662      && (TREE_CODE (sizetree) != INTEGER_CST
4663	  || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4664    sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4665  SUB_PARM_SIZE (*offset_ptr, sizetree);
4666  if (where_pad != downward)
4667    pad_to_arg_alignment (offset_ptr, boundary);
4668  if (initial_offset_ptr->var)
4669    {
4670      arg_size_ptr->var = size_binop (MINUS_EXPR,
4671				      size_binop (MINUS_EXPR,
4672						  integer_zero_node,
4673						  initial_offset_ptr->var),
4674				      offset_ptr->var);
4675    }
4676  else
4677    {
4678      arg_size_ptr->constant = (- initial_offset_ptr->constant
4679				- offset_ptr->constant);
4680    }
4681#else /* !ARGS_GROW_DOWNWARD */
4682  pad_to_arg_alignment (initial_offset_ptr, boundary);
4683  *offset_ptr = *initial_offset_ptr;
4684
4685#ifdef PUSH_ROUNDING
4686  if (passed_mode != BLKmode)
4687    sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4688#endif
4689
4690  /* Pad_below needs the pre-rounded size to know how much to pad below
4691     so this must be done before rounding up.  */
4692  if (where_pad == downward
4693    /* However, BLKmode args passed in regs have their padding done elsewhere.
4694       The stack slot must be able to hold the entire register.  */
4695      && !(in_regs && passed_mode == BLKmode))
4696    pad_below (offset_ptr, passed_mode, sizetree);
4697
4698  if (where_pad != none
4699      && (TREE_CODE (sizetree) != INTEGER_CST
4700	  || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4701    sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4702
4703  ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4704#endif /* ARGS_GROW_DOWNWARD */
4705}
4706
4707/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4708   BOUNDARY is measured in bits, but must be a multiple of a storage unit.  */
4709
4710static void
4711pad_to_arg_alignment (offset_ptr, boundary)
4712     struct args_size *offset_ptr;
4713     int boundary;
4714{
4715  int boundary_in_bytes = boundary / BITS_PER_UNIT;
4716
4717  if (boundary > BITS_PER_UNIT)
4718    {
4719      if (offset_ptr->var)
4720	{
4721	  offset_ptr->var  =
4722#ifdef ARGS_GROW_DOWNWARD
4723	    round_down
4724#else
4725	    round_up
4726#endif
4727	      (ARGS_SIZE_TREE (*offset_ptr),
4728	       boundary / BITS_PER_UNIT);
4729	  offset_ptr->constant = 0; /*?*/
4730	}
4731      else
4732	offset_ptr->constant =
4733#ifdef ARGS_GROW_DOWNWARD
4734	  FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4735#else
4736	  CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4737#endif
4738    }
4739}
4740
4741#ifndef ARGS_GROW_DOWNWARD
4742static void
4743pad_below (offset_ptr, passed_mode, sizetree)
4744     struct args_size *offset_ptr;
4745     enum machine_mode passed_mode;
4746     tree sizetree;
4747{
4748  if (passed_mode != BLKmode)
4749    {
4750      if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4751	offset_ptr->constant
4752	  += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4753	       / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4754	      - GET_MODE_SIZE (passed_mode));
4755    }
4756  else
4757    {
4758      if (TREE_CODE (sizetree) != INTEGER_CST
4759	  || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4760	{
4761	  /* Round the size up to multiple of PARM_BOUNDARY bits.  */
4762	  tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4763	  /* Add it in.  */
4764	  ADD_PARM_SIZE (*offset_ptr, s2);
4765	  SUB_PARM_SIZE (*offset_ptr, sizetree);
4766	}
4767    }
4768}
4769#endif
4770
4771#ifdef ARGS_GROW_DOWNWARD
4772static tree
4773round_down (value, divisor)
4774     tree value;
4775     int divisor;
4776{
4777  return size_binop (MULT_EXPR,
4778		     size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4779		     size_int (divisor));
4780}
4781#endif
4782
4783/* Walk the tree of blocks describing the binding levels within a function
4784   and warn about uninitialized variables.
4785   This is done after calling flow_analysis and before global_alloc
4786   clobbers the pseudo-regs to hard regs.  */
4787
4788void
4789uninitialized_vars_warning (block)
4790     tree block;
4791{
4792  register tree decl, sub;
4793  for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4794    {
4795      if (TREE_CODE (decl) == VAR_DECL
4796	  /* These warnings are unreliable for and aggregates
4797	     because assigning the fields one by one can fail to convince
4798	     flow.c that the entire aggregate was initialized.
4799	     Unions are troublesome because members may be shorter.  */
4800	  && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4801	  && DECL_RTL (decl) != 0
4802	  && GET_CODE (DECL_RTL (decl)) == REG
4803	  && regno_uninitialized (REGNO (DECL_RTL (decl))))
4804	warning_with_decl (decl,
4805			   "`%s' might be used uninitialized in this function");
4806      if (TREE_CODE (decl) == VAR_DECL
4807	  && DECL_RTL (decl) != 0
4808	  && GET_CODE (DECL_RTL (decl)) == REG
4809	  && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4810	warning_with_decl (decl,
4811			   "variable `%s' might be clobbered by `longjmp' or `vfork'");
4812    }
4813  for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4814    uninitialized_vars_warning (sub);
4815}
4816
4817/* Do the appropriate part of uninitialized_vars_warning
4818   but for arguments instead of local variables.  */
4819
4820void
4821setjmp_args_warning ()
4822{
4823  register tree decl;
4824  for (decl = DECL_ARGUMENTS (current_function_decl);
4825       decl; decl = TREE_CHAIN (decl))
4826    if (DECL_RTL (decl) != 0
4827	&& GET_CODE (DECL_RTL (decl)) == REG
4828	&& regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4829      warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4830}
4831
4832/* If this function call setjmp, put all vars into the stack
4833   unless they were declared `register'.  */
4834
4835void
4836setjmp_protect (block)
4837     tree block;
4838{
4839  register tree decl, sub;
4840  for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4841    if ((TREE_CODE (decl) == VAR_DECL
4842	 || TREE_CODE (decl) == PARM_DECL)
4843	&& DECL_RTL (decl) != 0
4844	&& (GET_CODE (DECL_RTL (decl)) == REG
4845	    || (GET_CODE (DECL_RTL (decl)) == MEM
4846		&& GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4847	/* If this variable came from an inline function, it must be
4848	   that its life doesn't overlap the setjmp.  If there was a
4849	   setjmp in the function, it would already be in memory.  We
4850	   must exclude such variable because their DECL_RTL might be
4851	   set to strange things such as virtual_stack_vars_rtx.  */
4852	&& ! DECL_FROM_INLINE (decl)
4853	&& (
4854#ifdef NON_SAVING_SETJMP
4855	    /* If longjmp doesn't restore the registers,
4856	       don't put anything in them.  */
4857	    NON_SAVING_SETJMP
4858	    ||
4859#endif
4860	    ! DECL_REGISTER (decl)))
4861      put_var_into_stack (decl);
4862  for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4863    setjmp_protect (sub);
4864}
4865
4866/* Like the previous function, but for args instead of local variables.  */
4867
4868void
4869setjmp_protect_args ()
4870{
4871  register tree decl;
4872  for (decl = DECL_ARGUMENTS (current_function_decl);
4873       decl; decl = TREE_CHAIN (decl))
4874    if ((TREE_CODE (decl) == VAR_DECL
4875	 || TREE_CODE (decl) == PARM_DECL)
4876	&& DECL_RTL (decl) != 0
4877	&& (GET_CODE (DECL_RTL (decl)) == REG
4878	    || (GET_CODE (DECL_RTL (decl)) == MEM
4879		&& GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4880	&& (
4881	    /* If longjmp doesn't restore the registers,
4882	       don't put anything in them.  */
4883#ifdef NON_SAVING_SETJMP
4884	    NON_SAVING_SETJMP
4885	    ||
4886#endif
4887	    ! DECL_REGISTER (decl)))
4888      put_var_into_stack (decl);
4889}
4890
4891/* Return the context-pointer register corresponding to DECL,
4892   or 0 if it does not need one.  */
4893
4894rtx
4895lookup_static_chain (decl)
4896     tree decl;
4897{
4898  tree context = decl_function_context (decl);
4899  tree link;
4900
4901  if (context == 0
4902      || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4903    return 0;
4904
4905  /* We treat inline_function_decl as an alias for the current function
4906     because that is the inline function whose vars, types, etc.
4907     are being merged into the current function.
4908     See expand_inline_function.  */
4909  if (context == current_function_decl || context == inline_function_decl)
4910    return virtual_stack_vars_rtx;
4911
4912  for (link = context_display; link; link = TREE_CHAIN (link))
4913    if (TREE_PURPOSE (link) == context)
4914      return RTL_EXPR_RTL (TREE_VALUE (link));
4915
4916  abort ();
4917}
4918
4919/* Convert a stack slot address ADDR for variable VAR
4920   (from a containing function)
4921   into an address valid in this function (using a static chain).  */
4922
4923rtx
4924fix_lexical_addr (addr, var)
4925     rtx addr;
4926     tree var;
4927{
4928  rtx basereg;
4929  HOST_WIDE_INT displacement;
4930  tree context = decl_function_context (var);
4931  struct function *fp;
4932  rtx base = 0;
4933
4934  /* If this is the present function, we need not do anything.  */
4935  if (context == current_function_decl || context == inline_function_decl)
4936    return addr;
4937
4938  for (fp = outer_function_chain; fp; fp = fp->next)
4939    if (fp->decl == context)
4940      break;
4941
4942  if (fp == 0)
4943    abort ();
4944
4945  if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
4946    addr = XEXP (XEXP (addr, 0), 0);
4947
4948  /* Decode given address as base reg plus displacement.  */
4949  if (GET_CODE (addr) == REG)
4950    basereg = addr, displacement = 0;
4951  else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4952    basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4953  else
4954    abort ();
4955
4956  /* We accept vars reached via the containing function's
4957     incoming arg pointer and via its stack variables pointer.  */
4958  if (basereg == fp->internal_arg_pointer)
4959    {
4960      /* If reached via arg pointer, get the arg pointer value
4961	 out of that function's stack frame.
4962
4963	 There are two cases:  If a separate ap is needed, allocate a
4964	 slot in the outer function for it and dereference it that way.
4965	 This is correct even if the real ap is actually a pseudo.
4966	 Otherwise, just adjust the offset from the frame pointer to
4967	 compensate.  */
4968
4969#ifdef NEED_SEPARATE_AP
4970      rtx addr;
4971
4972      if (fp->arg_pointer_save_area == 0)
4973	fp->arg_pointer_save_area
4974	  = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4975
4976      addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4977      addr = memory_address (Pmode, addr);
4978
4979      base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
4980#else
4981      displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4982      base = lookup_static_chain (var);
4983#endif
4984    }
4985
4986  else if (basereg == virtual_stack_vars_rtx)
4987    {
4988      /* This is the same code as lookup_static_chain, duplicated here to
4989	 avoid an extra call to decl_function_context.  */
4990      tree link;
4991
4992      for (link = context_display; link; link = TREE_CHAIN (link))
4993	if (TREE_PURPOSE (link) == context)
4994	  {
4995	    base = RTL_EXPR_RTL (TREE_VALUE (link));
4996	    break;
4997	  }
4998    }
4999
5000  if (base == 0)
5001    abort ();
5002
5003  /* Use same offset, relative to appropriate static chain or argument
5004     pointer.  */
5005  return plus_constant (base, displacement);
5006}
5007
5008/* Return the address of the trampoline for entering nested fn FUNCTION.
5009   If necessary, allocate a trampoline (in the stack frame)
5010   and emit rtl to initialize its contents (at entry to this function).  */
5011
5012rtx
5013trampoline_address (function)
5014     tree function;
5015{
5016  tree link;
5017  tree rtlexp;
5018  rtx tramp;
5019  struct function *fp;
5020  tree fn_context;
5021
5022  /* Find an existing trampoline and return it.  */
5023  for (link = trampoline_list; link; link = TREE_CHAIN (link))
5024    if (TREE_PURPOSE (link) == function)
5025      return
5026	round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5027
5028  for (fp = outer_function_chain; fp; fp = fp->next)
5029    for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5030      if (TREE_PURPOSE (link) == function)
5031	{
5032	  tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5033				    function);
5034	  return round_trampoline_addr (tramp);
5035	}
5036
5037  /* None exists; we must make one.  */
5038
5039  /* Find the `struct function' for the function containing FUNCTION.  */
5040  fp = 0;
5041  fn_context = decl_function_context (function);
5042  if (fn_context != current_function_decl
5043      && fn_context != inline_function_decl)
5044    for (fp = outer_function_chain; fp; fp = fp->next)
5045      if (fp->decl == fn_context)
5046	break;
5047
5048  /* Allocate run-time space for this trampoline
5049     (usually in the defining function's stack frame).  */
5050#ifdef ALLOCATE_TRAMPOLINE
5051  tramp = ALLOCATE_TRAMPOLINE (fp);
5052#else
5053  /* If rounding needed, allocate extra space
5054     to ensure we have TRAMPOLINE_SIZE bytes left after rounding up.  */
5055#ifdef TRAMPOLINE_ALIGNMENT
5056#define TRAMPOLINE_REAL_SIZE \
5057  (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5058#else
5059#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5060#endif
5061  if (fp != 0)
5062    tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5063  else
5064    tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5065#endif
5066
5067  /* Record the trampoline for reuse and note it for later initialization
5068     by expand_function_end.  */
5069  if (fp != 0)
5070    {
5071      push_obstacks (fp->function_maybepermanent_obstack,
5072		     fp->function_maybepermanent_obstack);
5073      rtlexp = make_node (RTL_EXPR);
5074      RTL_EXPR_RTL (rtlexp) = tramp;
5075      fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5076      pop_obstacks ();
5077    }
5078  else
5079    {
5080      /* Make the RTL_EXPR node temporary, not momentary, so that the
5081	 trampoline_list doesn't become garbage.  */
5082      int momentary = suspend_momentary ();
5083      rtlexp = make_node (RTL_EXPR);
5084      resume_momentary (momentary);
5085
5086      RTL_EXPR_RTL (rtlexp) = tramp;
5087      trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5088    }
5089
5090  tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5091  return round_trampoline_addr (tramp);
5092}
5093
5094/* Given a trampoline address,
5095   round it to multiple of TRAMPOLINE_ALIGNMENT.  */
5096
5097static rtx
5098round_trampoline_addr (tramp)
5099     rtx tramp;
5100{
5101#ifdef TRAMPOLINE_ALIGNMENT
5102  /* Round address up to desired boundary.  */
5103  rtx temp = gen_reg_rtx (Pmode);
5104  temp = expand_binop (Pmode, add_optab, tramp,
5105		       GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5106		       temp, 0, OPTAB_LIB_WIDEN);
5107  tramp = expand_binop (Pmode, and_optab, temp,
5108			GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5109			temp, 0, OPTAB_LIB_WIDEN);
5110#endif
5111  return tramp;
5112}
5113
5114/* The functions identify_blocks and reorder_blocks provide a way to
5115   reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5116   duplicate portions of the RTL code.  Call identify_blocks before
5117   changing the RTL, and call reorder_blocks after.  */
5118
5119/* Put all this function's BLOCK nodes including those that are chained
5120   onto the first block into a vector, and return it.
5121   Also store in each NOTE for the beginning or end of a block
5122   the index of that block in the vector.
5123   The arguments are BLOCK, the chain of top-level blocks of the function,
5124   and INSNS, the insn chain of the function.  */
5125
5126tree *
5127identify_blocks (block, insns)
5128     tree block;
5129     rtx insns;
5130{
5131  int n_blocks;
5132  tree *block_vector;
5133  int *block_stack;
5134  int depth = 0;
5135  int next_block_number = 1;
5136  int current_block_number = 1;
5137  rtx insn;
5138
5139  if (block == 0)
5140    return 0;
5141
5142  n_blocks = all_blocks (block, 0);
5143  block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5144  block_stack = (int *) alloca (n_blocks * sizeof (int));
5145
5146  all_blocks (block, block_vector);
5147
5148  for (insn = insns; insn; insn = NEXT_INSN (insn))
5149    if (GET_CODE (insn) == NOTE)
5150      {
5151	if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5152	  {
5153	    block_stack[depth++] = current_block_number;
5154	    current_block_number = next_block_number;
5155	    NOTE_BLOCK_NUMBER (insn) =  next_block_number++;
5156	  }
5157	if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5158	  {
5159	    NOTE_BLOCK_NUMBER (insn) = current_block_number;
5160	    current_block_number = block_stack[--depth];
5161	  }
5162      }
5163
5164  if (n_blocks != next_block_number)
5165    abort ();
5166
5167  return block_vector;
5168}
5169
5170/* Given BLOCK_VECTOR which was returned by identify_blocks,
5171   and a revised instruction chain, rebuild the tree structure
5172   of BLOCK nodes to correspond to the new order of RTL.
5173   The new block tree is inserted below TOP_BLOCK.
5174   Returns the current top-level block.  */
5175
5176tree
5177reorder_blocks (block_vector, block, insns)
5178     tree *block_vector;
5179     tree block;
5180     rtx insns;
5181{
5182  tree current_block = block;
5183  rtx insn;
5184
5185  if (block_vector == 0)
5186    return block;
5187
5188  /* Prune the old trees away, so that it doesn't get in the way.  */
5189  BLOCK_SUBBLOCKS (current_block) = 0;
5190  BLOCK_CHAIN (current_block) = 0;
5191
5192  for (insn = insns; insn; insn = NEXT_INSN (insn))
5193    if (GET_CODE (insn) == NOTE)
5194      {
5195	if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5196	  {
5197	    tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5198	    /* If we have seen this block before, copy it.  */
5199	    if (TREE_ASM_WRITTEN (block))
5200	      block = copy_node (block);
5201	    BLOCK_SUBBLOCKS (block) = 0;
5202	    TREE_ASM_WRITTEN (block) = 1;
5203	    BLOCK_SUPERCONTEXT (block) = current_block;
5204	    BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5205	    BLOCK_SUBBLOCKS (current_block) = block;
5206	    current_block = block;
5207	    NOTE_SOURCE_FILE (insn) = 0;
5208	  }
5209	if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5210	  {
5211	    BLOCK_SUBBLOCKS (current_block)
5212	      = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5213	    current_block = BLOCK_SUPERCONTEXT (current_block);
5214	    NOTE_SOURCE_FILE (insn) = 0;
5215	  }
5216      }
5217
5218  BLOCK_SUBBLOCKS (current_block)
5219    = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5220  return current_block;
5221}
5222
5223/* Reverse the order of elements in the chain T of blocks,
5224   and return the new head of the chain (old last element).  */
5225
5226static tree
5227blocks_nreverse (t)
5228     tree t;
5229{
5230  register tree prev = 0, decl, next;
5231  for (decl = t; decl; decl = next)
5232    {
5233      next = BLOCK_CHAIN (decl);
5234      BLOCK_CHAIN (decl) = prev;
5235      prev = decl;
5236    }
5237  return prev;
5238}
5239
5240/* Count the subblocks of the list starting with BLOCK, and list them
5241   all into the vector VECTOR.  Also clear TREE_ASM_WRITTEN in all
5242   blocks.  */
5243
5244static int
5245all_blocks (block, vector)
5246     tree block;
5247     tree *vector;
5248{
5249  int n_blocks = 0;
5250
5251  while (block)
5252    {
5253      TREE_ASM_WRITTEN (block) = 0;
5254
5255      /* Record this block.  */
5256      if (vector)
5257	vector[n_blocks] = block;
5258
5259      ++n_blocks;
5260
5261      /* Record the subblocks, and their subblocks...  */
5262      n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5263			      vector ? vector + n_blocks : 0);
5264      block = BLOCK_CHAIN (block);
5265    }
5266
5267  return n_blocks;
5268}
5269
5270/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5271   and initialize static variables for generating RTL for the statements
5272   of the function.  */
5273
5274void
5275init_function_start (subr, filename, line)
5276     tree subr;
5277     char *filename;
5278     int line;
5279{
5280  init_stmt_for_function ();
5281
5282  cse_not_expected = ! optimize;
5283
5284  /* Caller save not needed yet.  */
5285  caller_save_needed = 0;
5286
5287  /* No stack slots have been made yet.  */
5288  stack_slot_list = 0;
5289
5290  /* There is no stack slot for handling nonlocal gotos.  */
5291  nonlocal_goto_handler_slot = 0;
5292  nonlocal_goto_stack_level = 0;
5293
5294  /* No labels have been declared for nonlocal use.  */
5295  nonlocal_labels = 0;
5296
5297  /* No function calls so far in this function.  */
5298  function_call_count = 0;
5299
5300  /* No parm regs have been allocated.
5301     (This is important for output_inline_function.)  */
5302  max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5303
5304  /* Initialize the RTL mechanism.  */
5305  init_emit ();
5306
5307  /* Initialize the queue of pending postincrement and postdecrements,
5308     and some other info in expr.c.  */
5309  init_expr ();
5310
5311  /* We haven't done register allocation yet.  */
5312  reg_renumber = 0;
5313
5314  init_const_rtx_hash_table ();
5315
5316  current_function_name = (*decl_printable_name) (subr, 2);
5317
5318  /* Nonzero if this is a nested function that uses a static chain.  */
5319
5320  current_function_needs_context
5321    = (decl_function_context (current_function_decl) != 0
5322       && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5323
5324  /* Set if a call to setjmp is seen.  */
5325  current_function_calls_setjmp = 0;
5326
5327  /* Set if a call to longjmp is seen.  */
5328  current_function_calls_longjmp = 0;
5329
5330  current_function_calls_alloca = 0;
5331  current_function_has_nonlocal_label = 0;
5332  current_function_has_nonlocal_goto = 0;
5333  current_function_contains_functions = 0;
5334  current_function_is_thunk = 0;
5335
5336  current_function_returns_pcc_struct = 0;
5337  current_function_returns_struct = 0;
5338  current_function_epilogue_delay_list = 0;
5339  current_function_uses_const_pool = 0;
5340  current_function_uses_pic_offset_table = 0;
5341  current_function_cannot_inline = 0;
5342
5343  /* We have not yet needed to make a label to jump to for tail-recursion.  */
5344  tail_recursion_label = 0;
5345
5346  /* We haven't had a need to make a save area for ap yet.  */
5347
5348  arg_pointer_save_area = 0;
5349
5350  /* No stack slots allocated yet.  */
5351  frame_offset = 0;
5352
5353  /* No SAVE_EXPRs in this function yet.  */
5354  save_expr_regs = 0;
5355
5356  /* No RTL_EXPRs in this function yet.  */
5357  rtl_expr_chain = 0;
5358
5359  /* Set up to allocate temporaries.  */
5360  init_temp_slots ();
5361
5362  /* Within function body, compute a type's size as soon it is laid out.  */
5363  immediate_size_expand++;
5364
5365  /* We haven't made any trampolines for this function yet.  */
5366  trampoline_list = 0;
5367
5368  init_pending_stack_adjust ();
5369  inhibit_defer_pop = 0;
5370
5371  current_function_outgoing_args_size = 0;
5372
5373  /* Prevent ever trying to delete the first instruction of a function.
5374     Also tell final how to output a linenum before the function prologue.
5375     Note linenums could be missing, e.g. when compiling a Java .class file. */
5376  if (line > 0)
5377    emit_line_note (filename, line);
5378
5379  /* Make sure first insn is a note even if we don't want linenums.
5380     This makes sure the first insn will never be deleted.
5381     Also, final expects a note to appear there.  */
5382  emit_note (NULL_PTR, NOTE_INSN_DELETED);
5383
5384  /* Set flags used by final.c.  */
5385  if (aggregate_value_p (DECL_RESULT (subr)))
5386    {
5387#ifdef PCC_STATIC_STRUCT_RETURN
5388      current_function_returns_pcc_struct = 1;
5389#endif
5390      current_function_returns_struct = 1;
5391    }
5392
5393  /* Warn if this value is an aggregate type,
5394     regardless of which calling convention we are using for it.  */
5395  if (warn_aggregate_return
5396      && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5397    warning ("function returns an aggregate");
5398
5399  current_function_returns_pointer
5400    = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5401
5402  /* Indicate that we need to distinguish between the return value of the
5403     present function and the return value of a function being called.  */
5404  rtx_equal_function_value_matters = 1;
5405
5406  /* Indicate that we have not instantiated virtual registers yet.  */
5407  virtuals_instantiated = 0;
5408
5409  /* Indicate we have no need of a frame pointer yet.  */
5410  frame_pointer_needed = 0;
5411
5412  /* By default assume not varargs or stdarg.  */
5413  current_function_varargs = 0;
5414  current_function_stdarg = 0;
5415}
5416
5417/* Indicate that the current function uses extra args
5418   not explicitly mentioned in the argument list in any fashion.  */
5419
5420void
5421mark_varargs ()
5422{
5423  current_function_varargs = 1;
5424}
5425
5426/* Expand a call to __main at the beginning of a possible main function.  */
5427
5428#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5429#undef HAS_INIT_SECTION
5430#define HAS_INIT_SECTION
5431#endif
5432
5433#ifndef GEN_CALL__MAIN
5434#define GEN_CALL__MAIN \
5435  do {									\
5436    emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0,	\
5437		       VOIDmode, 0);					\
5438  } while (0)
5439#endif
5440
5441void
5442expand_main_function ()
5443{
5444#if defined(INVOKE__main) || !defined (HAS_INIT_SECTION)
5445  GEN_CALL__MAIN;
5446#endif /* not HAS_INIT_SECTION */
5447}
5448
5449extern struct obstack permanent_obstack;
5450
5451/* Start the RTL for a new function, and set variables used for
5452   emitting RTL.
5453   SUBR is the FUNCTION_DECL node.
5454   PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5455   the function's parameters, which must be run at any return statement.  */
5456
5457void
5458expand_function_start (subr, parms_have_cleanups)
5459     tree subr;
5460     int parms_have_cleanups;
5461{
5462  register int i;
5463  tree tem;
5464  rtx last_ptr = NULL_RTX;
5465
5466  /* Make sure volatile mem refs aren't considered
5467     valid operands of arithmetic insns.  */
5468  init_recog_no_volatile ();
5469
5470  /* If function gets a static chain arg, store it in the stack frame.
5471     Do this first, so it gets the first stack slot offset.  */
5472  if (current_function_needs_context)
5473    {
5474      last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5475
5476      /* Delay copying static chain if it is not a register to avoid
5477	 conflicts with regs used for parameters.  */
5478      if (! SMALL_REGISTER_CLASSES
5479	  || GET_CODE (static_chain_incoming_rtx) == REG)
5480        emit_move_insn (last_ptr, static_chain_incoming_rtx);
5481    }
5482
5483  /* If the parameters of this function need cleaning up, get a label
5484     for the beginning of the code which executes those cleanups.  This must
5485     be done before doing anything with return_label.  */
5486  if (parms_have_cleanups)
5487    cleanup_label = gen_label_rtx ();
5488  else
5489    cleanup_label = 0;
5490
5491  /* Make the label for return statements to jump to, if this machine
5492     does not have a one-instruction return and uses an epilogue,
5493     or if it returns a structure, or if it has parm cleanups.  */
5494#ifdef HAVE_return
5495  if (cleanup_label == 0 && HAVE_return
5496      && ! current_function_returns_pcc_struct
5497      && ! (current_function_returns_struct && ! optimize))
5498    return_label = 0;
5499  else
5500    return_label = gen_label_rtx ();
5501#else
5502  return_label = gen_label_rtx ();
5503#endif
5504
5505  /* Initialize rtx used to return the value.  */
5506  /* Do this before assign_parms so that we copy the struct value address
5507     before any library calls that assign parms might generate.  */
5508
5509  /* Decide whether to return the value in memory or in a register.  */
5510  if (aggregate_value_p (DECL_RESULT (subr)))
5511    {
5512      /* Returning something that won't go in a register.  */
5513      register rtx value_address = 0;
5514
5515#ifdef PCC_STATIC_STRUCT_RETURN
5516      if (current_function_returns_pcc_struct)
5517	{
5518	  int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5519	  value_address = assemble_static_space (size);
5520	}
5521      else
5522#endif
5523	{
5524	  /* Expect to be passed the address of a place to store the value.
5525	     If it is passed as an argument, assign_parms will take care of
5526	     it.  */
5527	  if (struct_value_incoming_rtx)
5528	    {
5529	      value_address = gen_reg_rtx (Pmode);
5530	      emit_move_insn (value_address, struct_value_incoming_rtx);
5531	    }
5532	}
5533      if (value_address)
5534	{
5535	  DECL_RTL (DECL_RESULT (subr))
5536	    = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5537	  MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5538	    = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5539	}
5540    }
5541  else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5542    /* If return mode is void, this decl rtl should not be used.  */
5543    DECL_RTL (DECL_RESULT (subr)) = 0;
5544  else if (parms_have_cleanups)
5545    {
5546      /* If function will end with cleanup code for parms,
5547	 compute the return values into a pseudo reg,
5548	 which we will copy into the true return register
5549	 after the cleanups are done.  */
5550
5551      enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5552
5553#ifdef PROMOTE_FUNCTION_RETURN
5554      tree type = TREE_TYPE (DECL_RESULT (subr));
5555      int unsignedp = TREE_UNSIGNED (type);
5556
5557      mode = promote_mode (type, mode, &unsignedp, 1);
5558#endif
5559
5560      DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5561    }
5562  else
5563    /* Scalar, returned in a register.  */
5564    {
5565#ifdef FUNCTION_OUTGOING_VALUE
5566      DECL_RTL (DECL_RESULT (subr))
5567	= FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5568#else
5569      DECL_RTL (DECL_RESULT (subr))
5570	= FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5571#endif
5572
5573      /* Mark this reg as the function's return value.  */
5574      if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5575	{
5576	  REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5577	  /* Needed because we may need to move this to memory
5578	     in case it's a named return value whose address is taken.  */
5579	  DECL_REGISTER (DECL_RESULT (subr)) = 1;
5580	}
5581    }
5582
5583  /* Initialize rtx for parameters and local variables.
5584     In some cases this requires emitting insns.  */
5585
5586  assign_parms (subr, 0);
5587
5588  /* Copy the static chain now if it wasn't a register.  The delay is to
5589     avoid conflicts with the parameter passing registers.  */
5590
5591  if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5592      if (GET_CODE (static_chain_incoming_rtx) != REG)
5593        emit_move_insn (last_ptr, static_chain_incoming_rtx);
5594
5595  /* The following was moved from init_function_start.
5596     The move is supposed to make sdb output more accurate.  */
5597  /* Indicate the beginning of the function body,
5598     as opposed to parm setup.  */
5599  emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5600
5601  /* If doing stupid allocation, mark parms as born here.  */
5602
5603  if (GET_CODE (get_last_insn ()) != NOTE)
5604    emit_note (NULL_PTR, NOTE_INSN_DELETED);
5605  parm_birth_insn = get_last_insn ();
5606
5607  if (obey_regdecls)
5608    {
5609      for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5610	use_variable (regno_reg_rtx[i]);
5611
5612      if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5613	use_variable (current_function_internal_arg_pointer);
5614    }
5615
5616  context_display = 0;
5617  if (current_function_needs_context)
5618    {
5619      /* Fetch static chain values for containing functions.  */
5620      tem = decl_function_context (current_function_decl);
5621      /* If not doing stupid register allocation copy the static chain
5622	 pointer into a pseudo.  If we have small register classes, copy
5623	 the value from memory if static_chain_incoming_rtx is a REG.  If
5624	 we do stupid register allocation, we use the stack address
5625	 generated above.  */
5626      if (tem && ! obey_regdecls)
5627	{
5628	  /* If the static chain originally came in a register, put it back
5629	     there, then move it out in the next insn.  The reason for
5630	     this peculiar code is to satisfy function integration.  */
5631	  if (SMALL_REGISTER_CLASSES
5632	      && GET_CODE (static_chain_incoming_rtx) == REG)
5633	    emit_move_insn (static_chain_incoming_rtx, last_ptr);
5634	  last_ptr = copy_to_reg (static_chain_incoming_rtx);
5635	}
5636
5637      while (tem)
5638	{
5639	  tree rtlexp = make_node (RTL_EXPR);
5640
5641	  RTL_EXPR_RTL (rtlexp) = last_ptr;
5642	  context_display = tree_cons (tem, rtlexp, context_display);
5643	  tem = decl_function_context (tem);
5644	  if (tem == 0)
5645	    break;
5646	  /* Chain thru stack frames, assuming pointer to next lexical frame
5647	     is found at the place we always store it.  */
5648#ifdef FRAME_GROWS_DOWNWARD
5649	  last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5650#endif
5651	  last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5652					       memory_address (Pmode, last_ptr)));
5653
5654	  /* If we are not optimizing, ensure that we know that this
5655	     piece of context is live over the entire function.  */
5656	  if (! optimize)
5657	    save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5658						save_expr_regs);
5659	}
5660    }
5661
5662  /* After the display initializations is where the tail-recursion label
5663     should go, if we end up needing one.   Ensure we have a NOTE here
5664     since some things (like trampolines) get placed before this.  */
5665  tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5666
5667  /* Evaluate now the sizes of any types declared among the arguments.  */
5668  for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5669    {
5670      expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5671		   EXPAND_MEMORY_USE_BAD);
5672      /* Flush the queue in case this parameter declaration has
5673	 side-effects.  */
5674      emit_queue ();
5675    }
5676
5677  /* Make sure there is a line number after the function entry setup code.  */
5678  force_next_line_note ();
5679}
5680
5681/* Generate RTL for the end of the current function.
5682   FILENAME and LINE are the current position in the source file.
5683
5684   It is up to language-specific callers to do cleanups for parameters--
5685   or else, supply 1 for END_BINDINGS and we will call expand_end_bindings.  */
5686
5687void
5688expand_function_end (filename, line, end_bindings)
5689     char *filename;
5690     int line;
5691     int end_bindings;
5692{
5693  register int i;
5694  tree link;
5695
5696#ifdef TRAMPOLINE_TEMPLATE
5697  static rtx initial_trampoline;
5698#endif
5699
5700#ifdef NON_SAVING_SETJMP
5701  /* Don't put any variables in registers if we call setjmp
5702     on a machine that fails to restore the registers.  */
5703  if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5704    {
5705      if (DECL_INITIAL (current_function_decl) != error_mark_node)
5706	setjmp_protect (DECL_INITIAL (current_function_decl));
5707
5708      setjmp_protect_args ();
5709    }
5710#endif
5711
5712  /* Save the argument pointer if a save area was made for it.  */
5713  if (arg_pointer_save_area)
5714    {
5715      rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5716      emit_insn_before (x, tail_recursion_reentry);
5717    }
5718
5719  /* Initialize any trampolines required by this function.  */
5720  for (link = trampoline_list; link; link = TREE_CHAIN (link))
5721    {
5722      tree function = TREE_PURPOSE (link);
5723      rtx context = lookup_static_chain (function);
5724      rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5725#ifdef TRAMPOLINE_TEMPLATE
5726      rtx blktramp;
5727#endif
5728      rtx seq;
5729
5730#ifdef TRAMPOLINE_TEMPLATE
5731      /* First make sure this compilation has a template for
5732	 initializing trampolines.  */
5733      if (initial_trampoline == 0)
5734	{
5735	  end_temporary_allocation ();
5736	  initial_trampoline
5737	    = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
5738	  resume_temporary_allocation ();
5739	}
5740#endif
5741
5742      /* Generate insns to initialize the trampoline.  */
5743      start_sequence ();
5744      tramp = round_trampoline_addr (XEXP (tramp, 0));
5745#ifdef TRAMPOLINE_TEMPLATE
5746      blktramp = change_address (initial_trampoline, BLKmode, tramp);
5747      emit_block_move (blktramp, initial_trampoline,
5748		       GEN_INT (TRAMPOLINE_SIZE),
5749		       TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5750#endif
5751      INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5752      seq = get_insns ();
5753      end_sequence ();
5754
5755      /* Put those insns at entry to the containing function (this one).  */
5756      emit_insns_before (seq, tail_recursion_reentry);
5757    }
5758
5759  /* If we are doing stack checking and this function makes calls,
5760     do a stack probe at the start of the function to ensure we have enough
5761     space for another stack frame.  */
5762  if (flag_stack_check && ! STACK_CHECK_BUILTIN)
5763    {
5764      rtx insn, seq;
5765
5766      for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5767	if (GET_CODE (insn) == CALL_INSN)
5768	  {
5769	    start_sequence ();
5770	    probe_stack_range (STACK_CHECK_PROTECT,
5771			       GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
5772	    seq = get_insns ();
5773	    end_sequence ();
5774	    emit_insns_before (seq, tail_recursion_reentry);
5775	    break;
5776	  }
5777    }
5778
5779  /* Warn about unused parms if extra warnings were specified.  */
5780  if (warn_unused && extra_warnings)
5781    {
5782      tree decl;
5783
5784      for (decl = DECL_ARGUMENTS (current_function_decl);
5785	   decl; decl = TREE_CHAIN (decl))
5786	if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5787	    && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5788	  warning_with_decl (decl, "unused parameter `%s'");
5789    }
5790
5791  /* Delete handlers for nonlocal gotos if nothing uses them.  */
5792  if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5793    delete_handlers ();
5794
5795  /* End any sequences that failed to be closed due to syntax errors.  */
5796  while (in_sequence_p ())
5797    end_sequence ();
5798
5799  /* Outside function body, can't compute type's actual size
5800     until next function's body starts.  */
5801  immediate_size_expand--;
5802
5803  /* If doing stupid register allocation,
5804     mark register parms as dying here.  */
5805
5806  if (obey_regdecls)
5807    {
5808      rtx tem;
5809      for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5810	use_variable (regno_reg_rtx[i]);
5811
5812      /* Likewise for the regs of all the SAVE_EXPRs in the function.  */
5813
5814      for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5815	{
5816	  use_variable (XEXP (tem, 0));
5817	  use_variable_after (XEXP (tem, 0), parm_birth_insn);
5818	}
5819
5820      if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5821	use_variable (current_function_internal_arg_pointer);
5822    }
5823
5824  clear_pending_stack_adjust ();
5825  do_pending_stack_adjust ();
5826
5827  /* Mark the end of the function body.
5828     If control reaches this insn, the function can drop through
5829     without returning a value.  */
5830  emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5831
5832  /* Must mark the last line number note in the function, so that the test
5833     coverage code can avoid counting the last line twice.  This just tells
5834     the code to ignore the immediately following line note, since there
5835     already exists a copy of this note somewhere above.  This line number
5836     note is still needed for debugging though, so we can't delete it.  */
5837  if (flag_test_coverage)
5838    emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
5839
5840  /* Output a linenumber for the end of the function.
5841     SDB depends on this.  */
5842  emit_line_note_force (filename, line);
5843
5844  /* Output the label for the actual return from the function,
5845     if one is expected.  This happens either because a function epilogue
5846     is used instead of a return instruction, or because a return was done
5847     with a goto in order to run local cleanups, or because of pcc-style
5848     structure returning.  */
5849
5850  if (return_label)
5851    emit_label (return_label);
5852
5853  /* C++ uses this.  */
5854  if (end_bindings)
5855    expand_end_bindings (0, 0, 0);
5856
5857  /* Now handle any leftover exception regions that may have been
5858     created for the parameters.  */
5859  {
5860    rtx last = get_last_insn ();
5861    rtx label;
5862
5863    expand_leftover_cleanups ();
5864
5865    /* If the above emitted any code, may sure we jump around it.  */
5866    if (last != get_last_insn ())
5867      {
5868	label = gen_label_rtx ();
5869	last = emit_jump_insn_after (gen_jump (label), last);
5870	last = emit_barrier_after (last);
5871	emit_label (label);
5872      }
5873  }
5874
5875  /* If we had calls to alloca, and this machine needs
5876     an accurate stack pointer to exit the function,
5877     insert some code to save and restore the stack pointer.  */
5878#ifdef EXIT_IGNORE_STACK
5879  if (! EXIT_IGNORE_STACK)
5880#endif
5881    if (current_function_calls_alloca)
5882      {
5883	rtx tem = 0;
5884
5885	emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5886	emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5887      }
5888
5889  /* If scalar return value was computed in a pseudo-reg,
5890     copy that to the hard return register.  */
5891  if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5892      && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5893      && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5894	  >= FIRST_PSEUDO_REGISTER))
5895    {
5896      rtx real_decl_result;
5897
5898#ifdef FUNCTION_OUTGOING_VALUE
5899      real_decl_result
5900	= FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5901				   current_function_decl);
5902#else
5903      real_decl_result
5904	= FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5905			  current_function_decl);
5906#endif
5907      REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5908      /* If this is a BLKmode structure being returned in registers, then use
5909	 the mode computed in expand_return.  */
5910      if (GET_MODE (real_decl_result) == BLKmode)
5911	PUT_MODE (real_decl_result,
5912		  GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
5913      emit_move_insn (real_decl_result,
5914		      DECL_RTL (DECL_RESULT (current_function_decl)));
5915      emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
5916
5917      /* The delay slot scheduler assumes that current_function_return_rtx
5918	 holds the hard register containing the return value, not a temporary
5919	 pseudo.  */
5920      current_function_return_rtx = real_decl_result;
5921    }
5922
5923  /* If returning a structure, arrange to return the address of the value
5924     in a place where debuggers expect to find it.
5925
5926     If returning a structure PCC style,
5927     the caller also depends on this value.
5928     And current_function_returns_pcc_struct is not necessarily set.  */
5929  if (current_function_returns_struct
5930      || current_function_returns_pcc_struct)
5931    {
5932      rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5933      tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5934#ifdef FUNCTION_OUTGOING_VALUE
5935      rtx outgoing
5936	= FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5937				   current_function_decl);
5938#else
5939      rtx outgoing
5940	= FUNCTION_VALUE (build_pointer_type (type),
5941			  current_function_decl);
5942#endif
5943
5944      /* Mark this as a function return value so integrate will delete the
5945	 assignment and USE below when inlining this function.  */
5946      REG_FUNCTION_VALUE_P (outgoing) = 1;
5947
5948      emit_move_insn (outgoing, value_address);
5949      use_variable (outgoing);
5950    }
5951
5952  /* Output a return insn if we are using one.
5953     Otherwise, let the rtl chain end here, to drop through
5954     into the epilogue.  */
5955
5956#ifdef HAVE_return
5957  if (HAVE_return)
5958    {
5959      emit_jump_insn (gen_return ());
5960      emit_barrier ();
5961    }
5962#endif
5963
5964  /* Fix up any gotos that jumped out to the outermost
5965     binding level of the function.
5966     Must follow emitting RETURN_LABEL.  */
5967
5968  /* If you have any cleanups to do at this point,
5969     and they need to create temporary variables,
5970     then you will lose.  */
5971  expand_fixups (get_insns ());
5972}
5973
5974/* These arrays record the INSN_UIDs of the prologue and epilogue insns.  */
5975
5976static int *prologue;
5977static int *epilogue;
5978
5979/* Create an array that records the INSN_UIDs of INSNS (either a sequence
5980   or a single insn).  */
5981
5982#if defined (HAVE_prologue) || defined (HAVE_epilogue)
5983static int *
5984record_insns (insns)
5985     rtx insns;
5986{
5987  int *vec;
5988
5989  if (GET_CODE (insns) == SEQUENCE)
5990    {
5991      int len = XVECLEN (insns, 0);
5992      vec = (int *) oballoc ((len + 1) * sizeof (int));
5993      vec[len] = 0;
5994      while (--len >= 0)
5995	vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5996    }
5997  else
5998    {
5999      vec = (int *) oballoc (2 * sizeof (int));
6000      vec[0] = INSN_UID (insns);
6001      vec[1] = 0;
6002    }
6003  return vec;
6004}
6005
6006/* Determine how many INSN_UIDs in VEC are part of INSN.  */
6007
6008static int
6009contains (insn, vec)
6010     rtx insn;
6011     int *vec;
6012{
6013  register int i, j;
6014
6015  if (GET_CODE (insn) == INSN
6016      && GET_CODE (PATTERN (insn)) == SEQUENCE)
6017    {
6018      int count = 0;
6019      for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6020	for (j = 0; vec[j]; j++)
6021	  if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6022	    count++;
6023      return count;
6024    }
6025  else
6026    {
6027      for (j = 0; vec[j]; j++)
6028	if (INSN_UID (insn) == vec[j])
6029	  return 1;
6030    }
6031  return 0;
6032}
6033#endif /* HAVE_prologue || HAVE_epilogue */
6034
6035/* Generate the prologue and epilogue RTL if the machine supports it.  Thread
6036   this into place with notes indicating where the prologue ends and where
6037   the epilogue begins.  Update the basic block information when possible.  */
6038
6039void
6040thread_prologue_and_epilogue_insns (f)
6041     rtx f;
6042{
6043#ifdef HAVE_prologue
6044  if (HAVE_prologue)
6045    {
6046      rtx head, seq;
6047
6048      /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
6049	 prologue insns and a NOTE_INSN_PROLOGUE_END.  */
6050      emit_note_after (NOTE_INSN_PROLOGUE_END, f);
6051      seq = gen_prologue ();
6052      head = emit_insn_after (seq, f);
6053
6054      /* Include the new prologue insns in the first block.  Ignore them
6055	 if they form a basic block unto themselves.  */
6056      if (basic_block_head && n_basic_blocks
6057	  && GET_CODE (basic_block_head[0]) != CODE_LABEL)
6058	basic_block_head[0] = NEXT_INSN (f);
6059
6060      /* Retain a map of the prologue insns.  */
6061      prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
6062    }
6063  else
6064#endif
6065    prologue = 0;
6066
6067#ifdef HAVE_epilogue
6068  if (HAVE_epilogue)
6069    {
6070      rtx insn = get_last_insn ();
6071      rtx prev = prev_nonnote_insn (insn);
6072
6073      /* If we end with a BARRIER, we don't need an epilogue.  */
6074      if (! (prev && GET_CODE (prev) == BARRIER))
6075	{
6076	  rtx tail, seq, tem;
6077	  rtx first_use = 0;
6078	  rtx last_use = 0;
6079
6080	  /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6081	     epilogue insns, the USE insns at the end of a function,
6082	     the jump insn that returns, and then a BARRIER.  */
6083
6084	  /* Move the USE insns at the end of a function onto a list.  */
6085	  while (prev
6086		 && GET_CODE (prev) == INSN
6087		 && GET_CODE (PATTERN (prev)) == USE)
6088	    {
6089	      tem = prev;
6090	      prev = prev_nonnote_insn (prev);
6091
6092	      NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
6093	      PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
6094	      if (first_use)
6095		{
6096		  NEXT_INSN (tem) = first_use;
6097		  PREV_INSN (first_use) = tem;
6098		}
6099	      first_use = tem;
6100	      if (!last_use)
6101		last_use = tem;
6102	    }
6103
6104	  emit_barrier_after (insn);
6105
6106	  seq = gen_epilogue ();
6107	  tail = emit_jump_insn_after (seq, insn);
6108
6109	  /* Insert the USE insns immediately before the return insn, which
6110	     must be the first instruction before the final barrier.  */
6111	  if (first_use)
6112	    {
6113	      tem = prev_nonnote_insn (get_last_insn ());
6114	      NEXT_INSN (PREV_INSN (tem)) = first_use;
6115	      PREV_INSN (first_use) = PREV_INSN (tem);
6116	      PREV_INSN (tem) = last_use;
6117	      NEXT_INSN (last_use) = tem;
6118	    }
6119
6120	  emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
6121
6122	  /* Include the new epilogue insns in the last block.  Ignore
6123	     them if they form a basic block unto themselves.  */
6124	  if (basic_block_end && n_basic_blocks
6125	      && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
6126	    basic_block_end[n_basic_blocks - 1] = tail;
6127
6128	  /* Retain a map of the epilogue insns.  */
6129	  epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6130	  return;
6131	}
6132    }
6133#endif
6134  epilogue = 0;
6135}
6136
6137/* Reposition the prologue-end and epilogue-begin notes after instruction
6138   scheduling and delayed branch scheduling.  */
6139
6140void
6141reposition_prologue_and_epilogue_notes (f)
6142     rtx f;
6143{
6144#if defined (HAVE_prologue) || defined (HAVE_epilogue)
6145  /* Reposition the prologue and epilogue notes.  */
6146  if (n_basic_blocks)
6147    {
6148      rtx next, prev;
6149      int len;
6150
6151      if (prologue)
6152	{
6153	  register rtx insn, note = 0;
6154
6155	  /* Scan from the beginning until we reach the last prologue insn.
6156	     We apparently can't depend on basic_block_{head,end} after
6157	     reorg has run.  */
6158	  for (len = 0; prologue[len]; len++)
6159	    ;
6160	  for (insn = f; len && insn; insn = NEXT_INSN (insn))
6161	    {
6162	      if (GET_CODE (insn) == NOTE)
6163		{
6164		  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6165		    note = insn;
6166		}
6167	      else if ((len -= contains (insn, prologue)) == 0)
6168		{
6169		  /* Find the prologue-end note if we haven't already, and
6170		     move it to just after the last prologue insn.  */
6171		  if (note == 0)
6172		    {
6173		      for (note = insn; (note = NEXT_INSN (note));)
6174			if (GET_CODE (note) == NOTE
6175			    && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6176			  break;
6177		    }
6178		  next = NEXT_INSN (note);
6179		  prev = PREV_INSN (note);
6180		  if (prev)
6181		    NEXT_INSN (prev) = next;
6182		  if (next)
6183		    PREV_INSN (next) = prev;
6184		  add_insn_after (note, insn);
6185		}
6186	    }
6187	}
6188
6189      if (epilogue)
6190	{
6191	  register rtx insn, note = 0;
6192
6193	  /* Scan from the end until we reach the first epilogue insn.
6194	     We apparently can't depend on basic_block_{head,end} after
6195	     reorg has run.  */
6196	  for (len = 0; epilogue[len]; len++)
6197	    ;
6198	  for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6199	    {
6200	      if (GET_CODE (insn) == NOTE)
6201		{
6202		  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6203		    note = insn;
6204		}
6205	      else if ((len -= contains (insn, epilogue)) == 0)
6206		{
6207		  /* Find the epilogue-begin note if we haven't already, and
6208		     move it to just before the first epilogue insn.  */
6209		  if (note == 0)
6210		    {
6211		      for (note = insn; (note = PREV_INSN (note));)
6212			if (GET_CODE (note) == NOTE
6213			    && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6214			  break;
6215		    }
6216		  next = NEXT_INSN (note);
6217		  prev = PREV_INSN (note);
6218		  if (prev)
6219		    NEXT_INSN (prev) = next;
6220		  if (next)
6221		    PREV_INSN (next) = prev;
6222		  add_insn_after (note, PREV_INSN (insn));
6223		}
6224	    }
6225	}
6226    }
6227#endif /* HAVE_prologue or HAVE_epilogue */
6228}
6229