expr.c revision 72562
1113288Smdodd/* Convert tree expression to rtl instructions, for GNU compiler.
2113288Smdodd   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3113288Smdodd   2000, 2001 Free Software Foundation, Inc.
4113288Smdodd
5113288SmdoddThis file is part of GNU CC.
6113288Smdodd
7113288SmdoddGNU CC is free software; you can redistribute it and/or modify
8113288Smdoddit under the terms of the GNU General Public License as published by
9113288Smdoddthe Free Software Foundation; either version 2, or (at your option)
10113288Smdoddany later version.
11113288Smdodd
12113288SmdoddGNU CC is distributed in the hope that it will be useful,
13113288Smdoddbut WITHOUT ANY WARRANTY; without even the implied warranty of
14113288SmdoddMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15113288SmdoddGNU General Public License for more details.
16113288Smdodd
17113288SmdoddYou should have received a copy of the GNU General Public License
18113288Smdoddalong with GNU CC; see the file COPYING.  If not, write to
19113288Smdoddthe Free Software Foundation, 59 Temple Place - Suite 330,
20113288SmdoddBoston, MA 02111-1307, USA.  */
21113288Smdodd
22113288Smdodd
23113288Smdodd#include "config.h"
24113288Smdodd#include "system.h"
25113288Smdodd#include "machmode.h"
26113288Smdodd#include "rtl.h"
27113288Smdodd#include "tree.h"
28113288Smdodd#include "obstack.h"
29113288Smdodd#include "flags.h"
30113288Smdodd#include "regs.h"
31113288Smdodd#include "hard-reg-set.h"
32113288Smdodd#include "except.h"
33113288Smdodd#include "function.h"
34113288Smdodd#include "insn-flags.h"
35113288Smdodd#include "insn-codes.h"
36113288Smdodd#include "insn-config.h"
37113288Smdodd/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38113288Smdodd#include "expr.h"
39113288Smdodd#include "recog.h"
40113288Smdodd#include "output.h"
41113288Smdodd#include "typeclass.h"
42188945Sthompsa#include "defaults.h"
43113288Smdodd#include "toplev.h"
44113288Smdodd
45113288Smdodd#define CEIL(x,y) (((x) + (y) - 1) / (y))
46200462Sdelphij
47113288Smdodd/* Decide whether a function's arguments should be processed
48113288Smdodd   from first to last or from last to first.
49113288Smdodd
50113288Smdodd   They should if the stack and args grow in opposite directions, but
51113288Smdodd   only if we have push insns.  */
52126774Sdwmalone
53113288Smdodd#ifdef PUSH_ROUNDING
54113288Smdodd
55113288Smdodd#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56113288Smdodd#define PUSH_ARGS_REVERSED	/* If it's last to first */
57113288Smdodd#endif
58113288Smdodd
59113288Smdodd#endif
60113309Smdodd
61113309Smdodd#ifndef STACK_PUSH_CODE
62113309Smdodd#ifdef STACK_GROWS_DOWNWARD
63113288Smdodd#define STACK_PUSH_CODE PRE_DEC
64113288Smdodd#else
65113288Smdodd#define STACK_PUSH_CODE PRE_INC
66113288Smdodd#endif
67113288Smdodd#endif
68113288Smdodd
69113288Smdodd/* Assume that case vectors are not pc-relative.  */
70113288Smdodd#ifndef CASE_VECTOR_PC_RELATIVE
71113288Smdodd#define CASE_VECTOR_PC_RELATIVE 0
72113288Smdodd#endif
73113288Smdodd
74113288Smdodd/* If this is nonzero, we do not bother generating VOLATILE
75113288Smdodd   around volatile memory references, and we are willing to
76113288Smdodd   output indirect addresses.  If cse is to follow, we reject
77126774Sdwmalone   indirect addresses so a useful potential cse is generated;
78113288Smdodd   if it is used only once, instruction combination will produce
79113288Smdodd   the same indirect address eventually.  */
80113288Smdoddint cse_not_expected;
81113288Smdodd
82113288Smdodd/* Nonzero to generate code for all the subroutines within an
83113288Smdodd   expression before generating the upper levels of the expression.
84113288Smdodd   Nowadays this is never zero.  */
85113288Smdoddint do_preexpand_calls = 1;
86113288Smdodd
87171101Simp/* Number of units that we should eventually pop off the stack.
88126774Sdwmalone   These are the arguments to function calls that have already returned.  */
89126774Sdwmaloneint pending_stack_adjust;
90113309Smdodd
91113288Smdodd/* Under some ABIs, it is the caller's responsibility to pop arguments
92113288Smdodd   pushed for function calls.  A naive implementation would simply pop
93113288Smdodd   the arguments immediately after each call.  However, if several
94113288Smdodd   function calls are made in a row, it is typically cheaper to pop
95113288Smdodd   all the arguments after all of the calls are complete since a
96113288Smdodd   single pop instruction can be used.  Therefore, GCC attempts to
97113288Smdodd   defer popping the arguments until absolutely necessary.  (For
98113288Smdodd   example, at the end of a conditional, the arguments must be popped,
99113309Smdodd   since code outside the conditional won't know whether or not the
100171101Simp   arguments need to be popped.)
101113288Smdodd
102113288Smdodd   When INHIBIT_DEFER_POP is non-zero, however, the compiler does not
103113288Smdodd   attempt to defer pops.  Instead, the stack is popped immediately
104113288Smdodd   after each call.  Rather then setting this variable directly, use
105113288Smdodd   NO_DEFER_POP and OK_DEFER_POP.  */
106113288Smdoddint inhibit_defer_pop;
107113288Smdodd
108113309Smdodd/* Nonzero means __builtin_saveregs has already been done in this function.
109113309Smdodd   The value is the pseudoreg containing the value __builtin_saveregs
110113309Smdodd   returned.  */
111113288Smdoddstatic rtx saveregs_value;
112113288Smdodd
113113288Smdodd/* Similarly for __builtin_apply_args.  */
114113288Smdoddstatic rtx apply_args_value;
115113288Smdodd
116113288Smdodd/* Don't check memory usage, since code is being emitted to check a memory
117113288Smdodd   usage.  Used when current_function_check_memory_usage is true, to avoid
118113288Smdodd   infinite recursion.  */
119113288Smdoddstatic int in_check_memory_usage;
120171101Simp
121171101Simp/* Postincrements that still need to be expanded.  */
122171101Simpstatic rtx pending_chain;
123113288Smdodd
124113288Smdodd/* This structure is used by move_by_pieces to describe the move to
125113288Smdodd   be performed.  */
126113288Smdoddstruct move_by_pieces
127113288Smdodd{
128113288Smdodd  rtx to;
129113288Smdodd  rtx to_addr;
130113288Smdodd  int autinc_to;
131113288Smdodd  int explicit_inc_to;
132113288Smdodd  int to_struct;
133113288Smdodd  rtx from;
134113288Smdodd  rtx from_addr;
135113288Smdodd  int autinc_from;
136113288Smdodd  int explicit_inc_from;
137113288Smdodd  int from_struct;
138171101Simp  int len;
139113288Smdodd  int offset;
140113288Smdodd  int reverse;
141113288Smdodd};
142113288Smdodd
143113288Smdodd/* This structure is used by clear_by_pieces to describe the clear to
144113288Smdodd   be performed.  */
145113288Smdodd
146113288Smdoddstruct clear_by_pieces
147113288Smdodd{
148113288Smdodd  rtx to;
149187994Salfred  rtx to_addr;
150113288Smdodd  int autinc_to;
151113288Smdodd  int explicit_inc_to;
152113288Smdodd  int to_struct;
153113288Smdodd  int len;
154113288Smdodd  int offset;
155113288Smdodd  int reverse;
156126774Sdwmalone};
157113288Smdodd
158113288Smdoddextern struct obstack permanent_obstack;
159126774Sdwmaloneextern rtx arg_pointer_save_area;
160113288Smdodd
161113288Smdoddstatic rtx get_push_address	PROTO ((int));
162113288Smdodd
163113288Smdoddstatic rtx enqueue_insn		PROTO((rtx, rtx));
164113288Smdoddstatic void init_queue		PROTO((void));
165113288Smdoddstatic int move_by_pieces_ninsns PROTO((unsigned int, int));
166113288Smdoddstatic void move_by_pieces_1	PROTO((rtx (*) (rtx, ...), enum machine_mode,
167113288Smdodd				       struct move_by_pieces *));
168212048Skevlostatic void clear_by_pieces	PROTO((rtx, int, int));
169212048Skevlostatic void clear_by_pieces_1	PROTO((rtx (*) (rtx, ...), enum machine_mode,
170126774Sdwmalone				       struct clear_by_pieces *));
171126774Sdwmalonestatic int is_zeros_p		PROTO((tree));
172126774Sdwmalonestatic int mostly_zeros_p	PROTO((tree));
173113288Smdoddstatic void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
174113288Smdodd					   tree, tree, int));
175113288Smdoddstatic void store_constructor	PROTO((tree, rtx, int));
176113309Smdoddstatic rtx store_field		PROTO((rtx, int, int, enum machine_mode, tree,
177113309Smdodd				       enum machine_mode, int, int,
178113288Smdodd				       int, int));
179113288Smdoddstatic enum memory_use_mode
180113288Smdodd  get_memory_usage_from_modifier PROTO((enum expand_modifier));
181113288Smdoddstatic tree save_noncopied_parts PROTO((tree, tree));
182113288Smdoddstatic tree init_noncopied_parts PROTO((tree, tree));
183113288Smdoddstatic int safe_from_p		PROTO((rtx, tree, int));
184113288Smdoddstatic int fixed_type_p		PROTO((tree));
185113288Smdoddstatic rtx var_rtx		PROTO((tree));
186113288Smdoddstatic int get_pointer_alignment PROTO((tree, unsigned));
187113288Smdoddstatic tree string_constant	PROTO((tree, tree *));
188113288Smdoddstatic tree c_strlen		PROTO((tree));
189113288Smdoddstatic rtx get_memory_rtx	PROTO((tree));
190113288Smdoddstatic rtx expand_builtin	PROTO((tree, rtx, rtx,
191113288Smdodd				       enum machine_mode, int));
192113288Smdoddstatic int apply_args_size	PROTO((void));
193113288Smdoddstatic int apply_result_size	PROTO((void));
194113288Smdoddstatic rtx result_vector	PROTO((int, rtx));
195113288Smdoddstatic rtx expand_builtin_setjmp PROTO((tree, rtx));
196113288Smdoddstatic rtx expand_builtin_apply_args PROTO((void));
197113288Smdoddstatic rtx expand_builtin_apply	PROTO((rtx, rtx, rtx));
198113288Smdoddstatic void expand_builtin_return PROTO((rtx));
199113288Smdoddstatic rtx expand_increment	PROTO((tree, int, int));
200113288Smdoddstatic void preexpand_calls	PROTO((tree));
201113288Smdoddstatic void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
202113309Smdoddstatic void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
203113309Smdoddstatic void do_jump_for_compare	PROTO((rtx, rtx, rtx));
204113309Smdoddstatic rtx compare		PROTO((tree, enum rtx_code, enum rtx_code));
205113328Smdoddstatic rtx do_store_flag	PROTO((tree, rtx, enum machine_mode, int));
206113309Smdodd
207113288Smdodd/* Record for each mode whether we can move a register directly to or
208113309Smdodd   from an object of that mode in memory.  If we can't, we won't try
209113309Smdodd   to use that mode directly when accessing a field of that mode.  */
210113309Smdodd
211113309Smdoddstatic char direct_load[NUM_MACHINE_MODES];
212113309Smdoddstatic char direct_store[NUM_MACHINE_MODES];
213113309Smdodd
214113309Smdodd/* If a memory-to-memory move would take MOVE_RATIO or more simple
215113309Smdodd   move-instruction sequences, we will do a movstr or libcall instead.  */
216113309Smdodd
217113309Smdodd#ifndef MOVE_RATIO
218113309Smdodd#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
219113288Smdodd#define MOVE_RATIO 2
220113309Smdodd#else
221113309Smdodd/* If we are optimizing for space (-Os), cut down the default move ratio */
222113309Smdodd#define MOVE_RATIO (optimize_size ? 3 : 15)
223113309Smdodd#endif
224113288Smdodd#endif
225113288Smdodd
226113288Smdodd/* This macro is used to determine whether move_by_pieces should be called
227113288Smdodd   to perform a structure copy. */
228113288Smdodd#ifndef MOVE_BY_PIECES_P
229113288Smdodd#define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns        \
230113288Smdodd                                       (SIZE, ALIGN) < MOVE_RATIO)
231113288Smdodd#endif
232113288Smdodd
233113288Smdodd/* This array records the insn_code of insns to perform block moves.  */
234113288Smdoddenum insn_code movstr_optab[NUM_MACHINE_MODES];
235113288Smdodd
236113288Smdodd/* This array records the insn_code of insns to perform block clears.  */
237113288Smdoddenum insn_code clrstr_optab[NUM_MACHINE_MODES];
238113288Smdodd
239113288Smdodd/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow.  */
240113288Smdodd
241113288Smdodd#ifndef SLOW_UNALIGNED_ACCESS
242113309Smdodd#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
243171101Simp#endif
244113288Smdodd
245113288Smdodd/* Register mappings for target machines without register windows.  */
246113288Smdodd#ifndef INCOMING_REGNO
247113288Smdodd#define INCOMING_REGNO(OUT) (OUT)
248113288Smdodd#endif
249113288Smdodd#ifndef OUTGOING_REGNO
250113288Smdodd#define OUTGOING_REGNO(IN) (IN)
251113288Smdodd#endif
252113288Smdodd
253113288Smdodd/* This is run once per compilation to set up which modes can be used
254113288Smdodd   directly in memory and to initialize the block move optab.  */
255113288Smdodd
256113288Smdoddvoid
257113288Smdoddinit_expr_once ()
258113288Smdodd{
259113288Smdodd  rtx insn, pat;
260113288Smdodd  enum machine_mode mode;
261113288Smdodd  int num_clobbers;
262113288Smdodd  rtx mem, mem1;
263113288Smdodd  char *free_point;
264113309Smdodd
265126774Sdwmalone  start_sequence ();
266113288Smdodd
267113288Smdodd  /* Since we are on the permanent obstack, we must be sure we save this
268113288Smdodd     spot AFTER we call start_sequence, since it will reuse the rtl it
269113288Smdodd     makes.  */
270113288Smdodd  free_point = (char *) oballoc (0);
271113288Smdodd
272113288Smdodd  /* Try indexing by frame ptr and try by stack ptr.
273113288Smdodd     It is known that on the Convex the stack ptr isn't a valid index.
274113288Smdodd     With luck, one or the other is valid on any machine.  */
275113288Smdodd  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
276113288Smdodd  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
277113288Smdodd
278113288Smdodd  insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
279113288Smdodd  pat = PATTERN (insn);
280113288Smdodd
281113288Smdodd  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
282113288Smdodd       mode = (enum machine_mode) ((int) mode + 1))
283113288Smdodd    {
284113288Smdodd      int regno;
285113288Smdodd      rtx reg;
286113288Smdodd
287113288Smdodd      direct_load[(int) mode] = direct_store[(int) mode] = 0;
288113288Smdodd      PUT_MODE (mem, mode);
289113288Smdodd      PUT_MODE (mem1, mode);
290113309Smdodd
291113309Smdodd      /* See if there is some register that can be used in this mode and
292113288Smdodd	 directly loaded or stored from memory.  */
293113288Smdodd
294113288Smdodd      if (mode != VOIDmode && mode != BLKmode)
295113288Smdodd	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
296113288Smdodd	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
297113288Smdodd	     regno++)
298113288Smdodd	  {
299113288Smdodd	    if (! HARD_REGNO_MODE_OK (regno, mode))
300113288Smdodd	      continue;
301113288Smdodd
302113288Smdodd	    reg = gen_rtx_REG (mode, regno);
303113288Smdodd
304113288Smdodd	    SET_SRC (pat) = mem;
305113288Smdodd	    SET_DEST (pat) = reg;
306113288Smdodd	    if (recog (pat, insn, &num_clobbers) >= 0)
307113288Smdodd	      direct_load[(int) mode] = 1;
308113288Smdodd
309113288Smdodd	    SET_SRC (pat) = mem1;
310113288Smdodd	    SET_DEST (pat) = reg;
311113288Smdodd	    if (recog (pat, insn, &num_clobbers) >= 0)
312113288Smdodd	      direct_load[(int) mode] = 1;
313113288Smdodd
314113288Smdodd	    SET_SRC (pat) = reg;
315113288Smdodd	    SET_DEST (pat) = mem;
316113288Smdodd	    if (recog (pat, insn, &num_clobbers) >= 0)
317113288Smdodd	      direct_store[(int) mode] = 1;
318113309Smdodd
319113288Smdodd	    SET_SRC (pat) = reg;
320113288Smdodd	    SET_DEST (pat) = mem1;
321113288Smdodd	    if (recog (pat, insn, &num_clobbers) >= 0)
322113288Smdodd	      direct_store[(int) mode] = 1;
323113288Smdodd	  }
324113309Smdodd    }
325113288Smdodd
326113288Smdodd  end_sequence ();
327113288Smdodd  obfree (free_point);
328113288Smdodd}
329113288Smdodd
330113309Smdodd/* This is run at the start of compiling a function.  */
331113309Smdodd
332113309Smdoddvoid
333113309Smdoddinit_expr ()
334113309Smdodd{
335113309Smdodd  init_queue ();
336113309Smdodd
337113309Smdodd  pending_stack_adjust = 0;
338113309Smdodd  inhibit_defer_pop = 0;
339113309Smdodd  saveregs_value = 0;
340113309Smdodd  apply_args_value = 0;
341113309Smdodd  forced_labels = 0;
342113309Smdodd}
343113309Smdodd
344113309Smdodd/* Save all variables describing the current status into the structure *P.
345113288Smdodd   This is used before starting a nested function.  */
346113288Smdodd
347113288Smdoddvoid
348113288Smdoddsave_expr_status (p)
349113288Smdodd     struct function *p;
350113288Smdodd{
351113288Smdodd  p->pending_chain = pending_chain;
352113288Smdodd  p->pending_stack_adjust = pending_stack_adjust;
353113288Smdodd  p->inhibit_defer_pop = inhibit_defer_pop;
354113288Smdodd  p->saveregs_value = saveregs_value;
355113288Smdodd  p->apply_args_value = apply_args_value;
356113288Smdodd  p->forced_labels = forced_labels;
357113288Smdodd
358113288Smdodd  pending_chain = NULL_RTX;
359113288Smdodd  pending_stack_adjust = 0;
360113288Smdodd  inhibit_defer_pop = 0;
361113288Smdodd  saveregs_value = 0;
362113288Smdodd  apply_args_value = 0;
363113288Smdodd  forced_labels = 0;
364113288Smdodd}
365126774Sdwmalone
366113288Smdodd/* Restore all variables describing the current status from the structure *P.
367113288Smdodd   This is used after a nested function.  */
368113288Smdodd
369126774Sdwmalonevoid
370126774Sdwmalonerestore_expr_status (p)
371113288Smdodd     struct function *p;
372113288Smdodd{
373126774Sdwmalone  pending_chain = p->pending_chain;
374113288Smdodd  pending_stack_adjust = p->pending_stack_adjust;
375113288Smdodd  inhibit_defer_pop = p->inhibit_defer_pop;
376113288Smdodd  saveregs_value = p->saveregs_value;
377113288Smdodd  apply_args_value = p->apply_args_value;
378113288Smdodd  forced_labels = p->forced_labels;
379126774Sdwmalone}
380126774Sdwmalone
381113288Smdodd/* Manage the queue of increment instructions to be output
382113288Smdodd   for POSTINCREMENT_EXPR expressions, etc.  */
383113288Smdodd
384113288Smdodd/* Queue up to increment (or change) VAR later.  BODY says how:
385113288Smdodd   BODY should be the same thing you would pass to emit_insn
386113288Smdodd   to increment right away.  It will go to emit_insn later on.
387113288Smdodd
388113288Smdodd   The value is a QUEUED expression to be used in place of VAR
389113288Smdodd   where you want to guarantee the pre-incrementation value of VAR.  */
390113288Smdodd
391113288Smdoddstatic rtx
392113288Smdoddenqueue_insn (var, body)
393113288Smdodd     rtx var, body;
394113288Smdodd{
395113288Smdodd  pending_chain = gen_rtx_QUEUED (GET_MODE (var),
396113288Smdodd				  var, NULL_RTX, NULL_RTX, body,
397113288Smdodd				  pending_chain);
398113288Smdodd  return pending_chain;
399113288Smdodd}
400113288Smdodd
401113288Smdodd/* Use protect_from_queue to convert a QUEUED expression
402113288Smdodd   into something that you can put immediately into an instruction.
403113288Smdodd   If the queued incrementation has not happened yet,
404113288Smdodd   protect_from_queue returns the variable itself.
405113288Smdodd   If the incrementation has happened, protect_from_queue returns a temp
406113288Smdodd   that contains a copy of the old value of the variable.
407113288Smdodd
408113288Smdodd   Any time an rtx which might possibly be a QUEUED is to be put
409113288Smdodd   into an instruction, it must be passed through protect_from_queue first.
410113288Smdodd   QUEUED expressions are not meaningful in instructions.
411113288Smdodd
412113288Smdodd   Do not pass a value through protect_from_queue and then hold
413113288Smdodd   on to it for a while before putting it in an instruction!
414113288Smdodd   If the queue is flushed in between, incorrect code will result.  */
415113288Smdodd
416113328Smdoddrtx
417113328Smdoddprotect_from_queue (x, modify)
418113288Smdodd     register rtx x;
419113288Smdodd     int modify;
420113288Smdodd{
421113288Smdodd  register RTX_CODE code = GET_CODE (x);
422113288Smdodd
423113288Smdodd#if 0  /* A QUEUED can hang around after the queue is forced out.  */
424113288Smdodd  /* Shortcut for most common case.  */
425113288Smdodd  if (pending_chain == 0)
426113288Smdodd    return x;
427113288Smdodd#endif
428113288Smdodd
429113288Smdodd  if (code != QUEUED)
430113288Smdodd    {
431113288Smdodd      /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
432113288Smdodd	 use of autoincrement.  Make a copy of the contents of the memory
433113288Smdodd	 location rather than a copy of the address, but not if the value is
434113288Smdodd	 of mode BLKmode.  Don't modify X in place since it might be
435113288Smdodd	 shared.  */
436113288Smdodd      if (code == MEM && GET_MODE (x) != BLKmode
437113288Smdodd	  && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
438113288Smdodd	{
439113288Smdodd	  register rtx y = XEXP (x, 0);
440113288Smdodd	  register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
441113288Smdodd
442113288Smdodd	  RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
443113288Smdodd	  MEM_COPY_ATTRIBUTES (new, x);
444113288Smdodd	  MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
445113288Smdodd
446113288Smdodd	  if (QUEUED_INSN (y))
447113288Smdodd	    {
448113288Smdodd	      register rtx temp = gen_reg_rtx (GET_MODE (new));
449113288Smdodd	      emit_insn_before (gen_move_insn (temp, new),
450113288Smdodd				QUEUED_INSN (y));
451113288Smdodd	      return temp;
452113288Smdodd	    }
453113288Smdodd	  return new;
454113288Smdodd	}
455113288Smdodd      /* Otherwise, recursively protect the subexpressions of all
456113288Smdodd	 the kinds of rtx's that can contain a QUEUED.  */
457113288Smdodd      if (code == MEM)
458113288Smdodd	{
459113288Smdodd	  rtx tem = protect_from_queue (XEXP (x, 0), 0);
460113288Smdodd	  if (tem != XEXP (x, 0))
461113288Smdodd	    {
462113288Smdodd	      x = copy_rtx (x);
463113288Smdodd	      XEXP (x, 0) = tem;
464113288Smdodd	    }
465113288Smdodd	}
466113288Smdodd      else if (code == PLUS || code == MULT)
467113288Smdodd	{
468113288Smdodd	  rtx new0 = protect_from_queue (XEXP (x, 0), 0);
469113288Smdodd	  rtx new1 = protect_from_queue (XEXP (x, 1), 0);
470113288Smdodd	  if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
471113288Smdodd	    {
472113288Smdodd	      x = copy_rtx (x);
473113288Smdodd	      XEXP (x, 0) = new0;
474113288Smdodd	      XEXP (x, 1) = new1;
475113288Smdodd	    }
476113288Smdodd	}
477113288Smdodd      return x;
478113288Smdodd    }
479113288Smdodd  /* If the increment has not happened, use the variable itself.  */
480113288Smdodd  if (QUEUED_INSN (x) == 0)
481113288Smdodd    return QUEUED_VAR (x);
482113288Smdodd  /* If the increment has happened and a pre-increment copy exists,
483113288Smdodd     use that copy.  */
484113288Smdodd  if (QUEUED_COPY (x) != 0)
485113288Smdodd    return QUEUED_COPY (x);
486113288Smdodd  /* The increment has happened but we haven't set up a pre-increment copy.
487113288Smdodd     Set one up now, and use it.  */
488113288Smdodd  QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
489113288Smdodd  emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
490113288Smdodd		    QUEUED_INSN (x));
491113288Smdodd  return QUEUED_COPY (x);
492}
493
494/* Return nonzero if X contains a QUEUED expression:
495   if it contains anything that will be altered by a queued increment.
496   We handle only combinations of MEM, PLUS, MINUS and MULT operators
497   since memory addresses generally contain only those.  */
498
499int
500queued_subexp_p (x)
501     rtx x;
502{
503  register enum rtx_code code = GET_CODE (x);
504  switch (code)
505    {
506    case QUEUED:
507      return 1;
508    case MEM:
509      return queued_subexp_p (XEXP (x, 0));
510    case MULT:
511    case PLUS:
512    case MINUS:
513      return (queued_subexp_p (XEXP (x, 0))
514	      || queued_subexp_p (XEXP (x, 1)));
515    default:
516      return 0;
517    }
518}
519
520/* Perform all the pending incrementations.  */
521
522void
523emit_queue ()
524{
525  register rtx p;
526  while ((p = pending_chain))
527    {
528      rtx body = QUEUED_BODY (p);
529
530      if (GET_CODE (body) == SEQUENCE)
531	{
532	  QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
533	  emit_insn (QUEUED_BODY (p));
534	}
535      else
536	QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
537      pending_chain = QUEUED_NEXT (p);
538    }
539}
540
541static void
542init_queue ()
543{
544  if (pending_chain)
545    abort ();
546}
547
548/* Copy data from FROM to TO, where the machine modes are not the same.
549   Both modes may be integer, or both may be floating.
550   UNSIGNEDP should be nonzero if FROM is an unsigned type.
551   This causes zero-extension instead of sign-extension.  */
552
553void
554convert_move (to, from, unsignedp)
555     register rtx to, from;
556     int unsignedp;
557{
558  enum machine_mode to_mode = GET_MODE (to);
559  enum machine_mode from_mode = GET_MODE (from);
560  int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561  int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
562  enum insn_code code;
563  rtx libcall;
564
565  /* rtx code for making an equivalent value.  */
566  enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
567
568  to = protect_from_queue (to, 1);
569  from = protect_from_queue (from, 0);
570
571  if (to_real != from_real)
572    abort ();
573
574  /* If FROM is a SUBREG that indicates that we have already done at least
575     the required extension, strip it.  We don't handle such SUBREGs as
576     TO here.  */
577
578  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
579      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
580	  >= GET_MODE_SIZE (to_mode))
581      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
582    from = gen_lowpart (to_mode, from), from_mode = to_mode;
583
584  if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
585    abort ();
586
587  if (to_mode == from_mode
588      || (from_mode == VOIDmode && CONSTANT_P (from)))
589    {
590      emit_move_insn (to, from);
591      return;
592    }
593
594  if (to_real)
595    {
596      rtx value;
597
598      if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
599	{
600	  /* Try converting directly if the insn is supported.  */
601	  if ((code = can_extend_p (to_mode, from_mode, 0))
602	      != CODE_FOR_nothing)
603	    {
604	      emit_unop_insn (code, to, from, UNKNOWN);
605	      return;
606	    }
607	}
608
609#ifdef HAVE_trunchfqf2
610      if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
611	{
612	  emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
613	  return;
614	}
615#endif
616#ifdef HAVE_trunctqfqf2
617      if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
618	{
619	  emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
620	  return;
621	}
622#endif
623#ifdef HAVE_truncsfqf2
624      if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
625	{
626	  emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
627	  return;
628	}
629#endif
630#ifdef HAVE_truncdfqf2
631      if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
632	{
633	  emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
634	  return;
635	}
636#endif
637#ifdef HAVE_truncxfqf2
638      if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
639	{
640	  emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
641	  return;
642	}
643#endif
644#ifdef HAVE_trunctfqf2
645      if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
646	{
647	  emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
648	  return;
649	}
650#endif
651
652#ifdef HAVE_trunctqfhf2
653      if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
654	{
655	  emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
656	  return;
657	}
658#endif
659#ifdef HAVE_truncsfhf2
660      if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
661	{
662	  emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
663	  return;
664	}
665#endif
666#ifdef HAVE_truncdfhf2
667      if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
668	{
669	  emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
670	  return;
671	}
672#endif
673#ifdef HAVE_truncxfhf2
674      if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
675	{
676	  emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
677	  return;
678	}
679#endif
680#ifdef HAVE_trunctfhf2
681      if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
682	{
683	  emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
684	  return;
685	}
686#endif
687
688#ifdef HAVE_truncsftqf2
689      if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
690	{
691	  emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
692	  return;
693	}
694#endif
695#ifdef HAVE_truncdftqf2
696      if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
697	{
698	  emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
699	  return;
700	}
701#endif
702#ifdef HAVE_truncxftqf2
703      if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
704	{
705	  emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
706	  return;
707	}
708#endif
709#ifdef HAVE_trunctftqf2
710      if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
711	{
712	  emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
713	  return;
714	}
715#endif
716
717#ifdef HAVE_truncdfsf2
718      if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
719	{
720	  emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
721	  return;
722	}
723#endif
724#ifdef HAVE_truncxfsf2
725      if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
726	{
727	  emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
728	  return;
729	}
730#endif
731#ifdef HAVE_trunctfsf2
732      if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
733	{
734	  emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
735	  return;
736	}
737#endif
738#ifdef HAVE_truncxfdf2
739      if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
740	{
741	  emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
742	  return;
743	}
744#endif
745#ifdef HAVE_trunctfdf2
746      if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
747	{
748	  emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
749	  return;
750	}
751#endif
752
753      libcall = (rtx) 0;
754      switch (from_mode)
755	{
756	case SFmode:
757	  switch (to_mode)
758	    {
759	    case DFmode:
760	      libcall = extendsfdf2_libfunc;
761	      break;
762
763	    case XFmode:
764	      libcall = extendsfxf2_libfunc;
765	      break;
766
767	    case TFmode:
768	      libcall = extendsftf2_libfunc;
769	      break;
770
771	    default:
772	      break;
773	    }
774	  break;
775
776	case DFmode:
777	  switch (to_mode)
778	    {
779	    case SFmode:
780	      libcall = truncdfsf2_libfunc;
781	      break;
782
783	    case XFmode:
784	      libcall = extenddfxf2_libfunc;
785	      break;
786
787	    case TFmode:
788	      libcall = extenddftf2_libfunc;
789	      break;
790
791	    default:
792	      break;
793	    }
794	  break;
795
796	case XFmode:
797	  switch (to_mode)
798	    {
799	    case SFmode:
800	      libcall = truncxfsf2_libfunc;
801	      break;
802
803	    case DFmode:
804	      libcall = truncxfdf2_libfunc;
805	      break;
806
807	    default:
808	      break;
809	    }
810	  break;
811
812	case TFmode:
813	  switch (to_mode)
814	    {
815	    case SFmode:
816	      libcall = trunctfsf2_libfunc;
817	      break;
818
819	    case DFmode:
820	      libcall = trunctfdf2_libfunc;
821	      break;
822
823	    default:
824	      break;
825	    }
826	  break;
827
828	default:
829	  break;
830	}
831
832      if (libcall == (rtx) 0)
833	/* This conversion is not implemented yet.  */
834	abort ();
835
836      value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
837				       1, from, from_mode);
838      emit_move_insn (to, value);
839      return;
840    }
841
842  /* Now both modes are integers.  */
843
844  /* Handle expanding beyond a word.  */
845  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
846      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
847    {
848      rtx insns;
849      rtx lowpart;
850      rtx fill_value;
851      rtx lowfrom;
852      int i;
853      enum machine_mode lowpart_mode;
854      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
855
856      /* Try converting directly if the insn is supported.  */
857      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
858	  != CODE_FOR_nothing)
859	{
860	  /* If FROM is a SUBREG, put it into a register.  Do this
861	     so that we always generate the same set of insns for
862	     better cse'ing; if an intermediate assignment occurred,
863	     we won't be doing the operation directly on the SUBREG.  */
864	  if (optimize > 0 && GET_CODE (from) == SUBREG)
865	    from = force_reg (from_mode, from);
866	  emit_unop_insn (code, to, from, equiv_code);
867	  return;
868	}
869      /* Next, try converting via full word.  */
870      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
871	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
872		   != CODE_FOR_nothing))
873	{
874	  if (GET_CODE (to) == REG)
875	    emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
876	  convert_move (gen_lowpart (word_mode, to), from, unsignedp);
877	  emit_unop_insn (code, to,
878			  gen_lowpart (word_mode, to), equiv_code);
879	  return;
880	}
881
882      /* No special multiword conversion insn; do it by hand.  */
883      start_sequence ();
884
885      /* Since we will turn this into a no conflict block, we must ensure
886	 that the source does not overlap the target.  */
887
888      if (reg_overlap_mentioned_p (to, from))
889	from = force_reg (from_mode, from);
890
891      /* Get a copy of FROM widened to a word, if necessary.  */
892      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
893	lowpart_mode = word_mode;
894      else
895	lowpart_mode = from_mode;
896
897      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
898
899      lowpart = gen_lowpart (lowpart_mode, to);
900      emit_move_insn (lowpart, lowfrom);
901
902      /* Compute the value to put in each remaining word.  */
903      if (unsignedp)
904	fill_value = const0_rtx;
905      else
906	{
907#ifdef HAVE_slt
908	  if (HAVE_slt
909	      && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
910	      && STORE_FLAG_VALUE == -1)
911	    {
912	      emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
913			     lowpart_mode, 0, 0);
914	      fill_value = gen_reg_rtx (word_mode);
915	      emit_insn (gen_slt (fill_value));
916	    }
917	  else
918#endif
919	    {
920	      fill_value
921		= expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
922				size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
923				NULL_RTX, 0);
924	      fill_value = convert_to_mode (word_mode, fill_value, 1);
925	    }
926	}
927
928      /* Fill the remaining words.  */
929      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
930	{
931	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
932	  rtx subword = operand_subword (to, index, 1, to_mode);
933
934	  if (subword == 0)
935	    abort ();
936
937	  if (fill_value != subword)
938	    emit_move_insn (subword, fill_value);
939	}
940
941      insns = get_insns ();
942      end_sequence ();
943
944      emit_no_conflict_block (insns, to, from, NULL_RTX,
945			      gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
946      return;
947    }
948
949  /* Truncating multi-word to a word or less.  */
950  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
951      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
952    {
953      if (!((GET_CODE (from) == MEM
954	     && ! MEM_VOLATILE_P (from)
955	     && direct_load[(int) to_mode]
956	     && ! mode_dependent_address_p (XEXP (from, 0)))
957	    || GET_CODE (from) == REG
958	    || GET_CODE (from) == SUBREG))
959	from = force_reg (from_mode, from);
960      convert_move (to, gen_lowpart (word_mode, from), 0);
961      return;
962    }
963
964  /* Handle pointer conversion */			/* SPEE 900220 */
965  if (to_mode == PQImode)
966    {
967      if (from_mode != QImode)
968	from = convert_to_mode (QImode, from, unsignedp);
969
970#ifdef HAVE_truncqipqi2
971      if (HAVE_truncqipqi2)
972	{
973	  emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
974	  return;
975	}
976#endif /* HAVE_truncqipqi2 */
977      abort ();
978    }
979
980  if (from_mode == PQImode)
981    {
982      if (to_mode != QImode)
983	{
984	  from = convert_to_mode (QImode, from, unsignedp);
985	  from_mode = QImode;
986	}
987      else
988	{
989#ifdef HAVE_extendpqiqi2
990	  if (HAVE_extendpqiqi2)
991	    {
992	      emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
993	      return;
994	    }
995#endif /* HAVE_extendpqiqi2 */
996	  abort ();
997	}
998    }
999
1000  if (to_mode == PSImode)
1001    {
1002      if (from_mode != SImode)
1003	from = convert_to_mode (SImode, from, unsignedp);
1004
1005#ifdef HAVE_truncsipsi2
1006      if (HAVE_truncsipsi2)
1007	{
1008	  emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1009	  return;
1010	}
1011#endif /* HAVE_truncsipsi2 */
1012      abort ();
1013    }
1014
1015  if (from_mode == PSImode)
1016    {
1017      if (to_mode != SImode)
1018	{
1019	  from = convert_to_mode (SImode, from, unsignedp);
1020	  from_mode = SImode;
1021	}
1022      else
1023	{
1024#ifdef HAVE_extendpsisi2
1025	  if (HAVE_extendpsisi2)
1026	    {
1027	      emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1028	      return;
1029	    }
1030#endif /* HAVE_extendpsisi2 */
1031	  abort ();
1032	}
1033    }
1034
1035  if (to_mode == PDImode)
1036    {
1037      if (from_mode != DImode)
1038	from = convert_to_mode (DImode, from, unsignedp);
1039
1040#ifdef HAVE_truncdipdi2
1041      if (HAVE_truncdipdi2)
1042	{
1043	  emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1044	  return;
1045	}
1046#endif /* HAVE_truncdipdi2 */
1047      abort ();
1048    }
1049
1050  if (from_mode == PDImode)
1051    {
1052      if (to_mode != DImode)
1053	{
1054	  from = convert_to_mode (DImode, from, unsignedp);
1055	  from_mode = DImode;
1056	}
1057      else
1058	{
1059#ifdef HAVE_extendpdidi2
1060	  if (HAVE_extendpdidi2)
1061	    {
1062	      emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1063	      return;
1064	    }
1065#endif /* HAVE_extendpdidi2 */
1066	  abort ();
1067	}
1068    }
1069
1070  /* Now follow all the conversions between integers
1071     no more than a word long.  */
1072
1073  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
1074  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1075      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1076				GET_MODE_BITSIZE (from_mode)))
1077    {
1078      if (!((GET_CODE (from) == MEM
1079	     && ! MEM_VOLATILE_P (from)
1080	     && direct_load[(int) to_mode]
1081	     && ! mode_dependent_address_p (XEXP (from, 0)))
1082	    || GET_CODE (from) == REG
1083	    || GET_CODE (from) == SUBREG))
1084	from = force_reg (from_mode, from);
1085      if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1086	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1087	from = copy_to_reg (from);
1088      emit_move_insn (to, gen_lowpart (to_mode, from));
1089      return;
1090    }
1091
1092  /* Handle extension.  */
1093  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1094    {
1095      /* Convert directly if that works.  */
1096      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1097	  != CODE_FOR_nothing)
1098	{
1099	  emit_unop_insn (code, to, from, equiv_code);
1100	  return;
1101	}
1102      else
1103	{
1104	  enum machine_mode intermediate;
1105	  rtx tmp;
1106	  tree shift_amount;
1107
1108	  /* Search for a mode to convert via.  */
1109	  for (intermediate = from_mode; intermediate != VOIDmode;
1110	       intermediate = GET_MODE_WIDER_MODE (intermediate))
1111	    if (((can_extend_p (to_mode, intermediate, unsignedp)
1112		  != CODE_FOR_nothing)
1113		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1114		     && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1115		&& (can_extend_p (intermediate, from_mode, unsignedp)
1116		    != CODE_FOR_nothing))
1117	      {
1118		convert_move (to, convert_to_mode (intermediate, from,
1119						   unsignedp), unsignedp);
1120		return;
1121	      }
1122
1123	  /* No suitable intermediate mode.
1124	     Generate what we need with	shifts. */
1125	  shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1126				      - GET_MODE_BITSIZE (from_mode), 0);
1127	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
1128	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1129			      to, unsignedp);
1130	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp,  shift_amount,
1131			      to, unsignedp);
1132	  if (tmp != to)
1133	    emit_move_insn (to, tmp);
1134	  return;
1135	}
1136    }
1137
1138  /* Support special truncate insns for certain modes.  */
1139
1140  if (from_mode == DImode && to_mode == SImode)
1141    {
1142#ifdef HAVE_truncdisi2
1143      if (HAVE_truncdisi2)
1144	{
1145	  emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1146	  return;
1147	}
1148#endif
1149      convert_move (to, force_reg (from_mode, from), unsignedp);
1150      return;
1151    }
1152
1153  if (from_mode == DImode && to_mode == HImode)
1154    {
1155#ifdef HAVE_truncdihi2
1156      if (HAVE_truncdihi2)
1157	{
1158	  emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1159	  return;
1160	}
1161#endif
1162      convert_move (to, force_reg (from_mode, from), unsignedp);
1163      return;
1164    }
1165
1166  if (from_mode == DImode && to_mode == QImode)
1167    {
1168#ifdef HAVE_truncdiqi2
1169      if (HAVE_truncdiqi2)
1170	{
1171	  emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1172	  return;
1173	}
1174#endif
1175      convert_move (to, force_reg (from_mode, from), unsignedp);
1176      return;
1177    }
1178
1179  if (from_mode == SImode && to_mode == HImode)
1180    {
1181#ifdef HAVE_truncsihi2
1182      if (HAVE_truncsihi2)
1183	{
1184	  emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1185	  return;
1186	}
1187#endif
1188      convert_move (to, force_reg (from_mode, from), unsignedp);
1189      return;
1190    }
1191
1192  if (from_mode == SImode && to_mode == QImode)
1193    {
1194#ifdef HAVE_truncsiqi2
1195      if (HAVE_truncsiqi2)
1196	{
1197	  emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1198	  return;
1199	}
1200#endif
1201      convert_move (to, force_reg (from_mode, from), unsignedp);
1202      return;
1203    }
1204
1205  if (from_mode == HImode && to_mode == QImode)
1206    {
1207#ifdef HAVE_trunchiqi2
1208      if (HAVE_trunchiqi2)
1209	{
1210	  emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1211	  return;
1212	}
1213#endif
1214      convert_move (to, force_reg (from_mode, from), unsignedp);
1215      return;
1216    }
1217
1218  if (from_mode == TImode && to_mode == DImode)
1219    {
1220#ifdef HAVE_trunctidi2
1221      if (HAVE_trunctidi2)
1222	{
1223	  emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1224	  return;
1225	}
1226#endif
1227      convert_move (to, force_reg (from_mode, from), unsignedp);
1228      return;
1229    }
1230
1231  if (from_mode == TImode && to_mode == SImode)
1232    {
1233#ifdef HAVE_trunctisi2
1234      if (HAVE_trunctisi2)
1235	{
1236	  emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1237	  return;
1238	}
1239#endif
1240      convert_move (to, force_reg (from_mode, from), unsignedp);
1241      return;
1242    }
1243
1244  if (from_mode == TImode && to_mode == HImode)
1245    {
1246#ifdef HAVE_trunctihi2
1247      if (HAVE_trunctihi2)
1248	{
1249	  emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1250	  return;
1251	}
1252#endif
1253      convert_move (to, force_reg (from_mode, from), unsignedp);
1254      return;
1255    }
1256
1257  if (from_mode == TImode && to_mode == QImode)
1258    {
1259#ifdef HAVE_trunctiqi2
1260      if (HAVE_trunctiqi2)
1261	{
1262	  emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1263	  return;
1264	}
1265#endif
1266      convert_move (to, force_reg (from_mode, from), unsignedp);
1267      return;
1268    }
1269
1270  /* Handle truncation of volatile memrefs, and so on;
1271     the things that couldn't be truncated directly,
1272     and for which there was no special instruction.  */
1273  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1274    {
1275      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1276      emit_move_insn (to, temp);
1277      return;
1278    }
1279
1280  /* Mode combination is not recognized.  */
1281  abort ();
1282}
1283
1284/* Return an rtx for a value that would result
1285   from converting X to mode MODE.
1286   Both X and MODE may be floating, or both integer.
1287   UNSIGNEDP is nonzero if X is an unsigned value.
1288   This can be done by referring to a part of X in place
1289   or by copying to a new temporary with conversion.
1290
1291   This function *must not* call protect_from_queue
1292   except when putting X into an insn (in which case convert_move does it).  */
1293
1294rtx
1295convert_to_mode (mode, x, unsignedp)
1296     enum machine_mode mode;
1297     rtx x;
1298     int unsignedp;
1299{
1300  return convert_modes (mode, VOIDmode, x, unsignedp);
1301}
1302
1303/* Return an rtx for a value that would result
1304   from converting X from mode OLDMODE to mode MODE.
1305   Both modes may be floating, or both integer.
1306   UNSIGNEDP is nonzero if X is an unsigned value.
1307
1308   This can be done by referring to a part of X in place
1309   or by copying to a new temporary with conversion.
1310
1311   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1312
1313   This function *must not* call protect_from_queue
1314   except when putting X into an insn (in which case convert_move does it).  */
1315
1316rtx
1317convert_modes (mode, oldmode, x, unsignedp)
1318     enum machine_mode mode, oldmode;
1319     rtx x;
1320     int unsignedp;
1321{
1322  register rtx temp;
1323
1324  /* If FROM is a SUBREG that indicates that we have already done at least
1325     the required extension, strip it.  */
1326
1327  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1328      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1329      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1330    x = gen_lowpart (mode, x);
1331
1332  if (GET_MODE (x) != VOIDmode)
1333    oldmode = GET_MODE (x);
1334
1335  if (mode == oldmode)
1336    return x;
1337
1338  /* There is one case that we must handle specially: If we are converting
1339     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1340     we are to interpret the constant as unsigned, gen_lowpart will do
1341     the wrong if the constant appears negative.  What we want to do is
1342     make the high-order word of the constant zero, not all ones.  */
1343
1344  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1345      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1346      && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1347    {
1348      HOST_WIDE_INT val = INTVAL (x);
1349
1350      if (oldmode != VOIDmode
1351	  && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1352	{
1353	  int width = GET_MODE_BITSIZE (oldmode);
1354
1355	  /* We need to zero extend VAL.  */
1356	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1357	}
1358
1359      return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1360    }
1361
1362  /* We can do this with a gen_lowpart if both desired and current modes
1363     are integer, and this is either a constant integer, a register, or a
1364     non-volatile MEM.  Except for the constant case where MODE is no
1365     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
1366
1367  if ((GET_CODE (x) == CONST_INT
1368       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1369      || (GET_MODE_CLASS (mode) == MODE_INT
1370	  && GET_MODE_CLASS (oldmode) == MODE_INT
1371	  && (GET_CODE (x) == CONST_DOUBLE
1372	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1373		  && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1374		       && direct_load[(int) mode])
1375		      || (GET_CODE (x) == REG
1376			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1377						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
1378    {
1379      /* ?? If we don't know OLDMODE, we have to assume here that
1380	 X does not need sign- or zero-extension.   This may not be
1381	 the case, but it's the best we can do.  */
1382      if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1383	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1384	{
1385	  HOST_WIDE_INT val = INTVAL (x);
1386	  int width = GET_MODE_BITSIZE (oldmode);
1387
1388	  /* We must sign or zero-extend in this case.  Start by
1389	     zero-extending, then sign extend if we need to.  */
1390	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1391	  if (! unsignedp
1392	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1393	    val |= (HOST_WIDE_INT) (-1) << width;
1394
1395	  return GEN_INT (val);
1396	}
1397
1398      return gen_lowpart (mode, x);
1399    }
1400
1401  temp = gen_reg_rtx (mode);
1402  convert_move (temp, x, unsignedp);
1403  return temp;
1404}
1405
1406
1407/* This macro is used to determine what the largest unit size that
1408   move_by_pieces can use is. */
1409
1410/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1411   move efficiently, as opposed to  MOVE_MAX which is the maximum
1412   number of bhytes we can move with a single instruction. */
1413
1414#ifndef MOVE_MAX_PIECES
1415#define MOVE_MAX_PIECES   MOVE_MAX
1416#endif
1417
1418/* Generate several move instructions to copy LEN bytes
1419   from block FROM to block TO.  (These are MEM rtx's with BLKmode).
1420   The caller must pass FROM and TO
1421    through protect_from_queue before calling.
1422   ALIGN (in bytes) is maximum alignment we can assume.  */
1423
1424void
1425move_by_pieces (to, from, len, align)
1426     rtx to, from;
1427     int len, align;
1428{
1429  struct move_by_pieces data;
1430  rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1431  int max_size = MOVE_MAX_PIECES + 1;
1432  enum machine_mode mode = VOIDmode, tmode;
1433  enum insn_code icode;
1434
1435  data.offset = 0;
1436  data.to_addr = to_addr;
1437  data.from_addr = from_addr;
1438  data.to = to;
1439  data.from = from;
1440  data.autinc_to
1441    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1442       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1443  data.autinc_from
1444    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1445       || GET_CODE (from_addr) == POST_INC
1446       || GET_CODE (from_addr) == POST_DEC);
1447
1448  data.explicit_inc_from = 0;
1449  data.explicit_inc_to = 0;
1450  data.reverse
1451    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1452  if (data.reverse) data.offset = len;
1453  data.len = len;
1454
1455  data.to_struct = MEM_IN_STRUCT_P (to);
1456  data.from_struct = MEM_IN_STRUCT_P (from);
1457
1458  /* If copying requires more than two move insns,
1459     copy addresses to registers (to make displacements shorter)
1460     and use post-increment if available.  */
1461  if (!(data.autinc_from && data.autinc_to)
1462      && move_by_pieces_ninsns (len, align) > 2)
1463    {
1464      /* Find the mode of the largest move... */
1465      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1466	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1467	if (GET_MODE_SIZE (tmode) < max_size)
1468	  mode = tmode;
1469
1470      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1471	{
1472	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1473	  data.autinc_from = 1;
1474	  data.explicit_inc_from = -1;
1475	}
1476      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1477	{
1478	  data.from_addr = copy_addr_to_reg (from_addr);
1479	  data.autinc_from = 1;
1480	  data.explicit_inc_from = 1;
1481	}
1482      if (!data.autinc_from && CONSTANT_P (from_addr))
1483	data.from_addr = copy_addr_to_reg (from_addr);
1484      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1485	{
1486	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1487	  data.autinc_to = 1;
1488	  data.explicit_inc_to = -1;
1489	}
1490      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1491	{
1492	  data.to_addr = copy_addr_to_reg (to_addr);
1493	  data.autinc_to = 1;
1494	  data.explicit_inc_to = 1;
1495	}
1496      if (!data.autinc_to && CONSTANT_P (to_addr))
1497	data.to_addr = copy_addr_to_reg (to_addr);
1498    }
1499
1500  if (! SLOW_UNALIGNED_ACCESS
1501      || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1502    align = MOVE_MAX;
1503
1504  /* First move what we can in the largest integer mode, then go to
1505     successively smaller modes.  */
1506
1507  while (max_size > 1)
1508    {
1509      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1510	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1511	if (GET_MODE_SIZE (tmode) < max_size)
1512	  mode = tmode;
1513
1514      if (mode == VOIDmode)
1515	break;
1516
1517      icode = mov_optab->handlers[(int) mode].insn_code;
1518      if (icode != CODE_FOR_nothing
1519	  && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1520			   GET_MODE_SIZE (mode)))
1521	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1522
1523      max_size = GET_MODE_SIZE (mode);
1524    }
1525
1526  /* The code above should have handled everything.  */
1527  if (data.len > 0)
1528    abort ();
1529}
1530
1531/* Return number of insns required to move L bytes by pieces.
1532   ALIGN (in bytes) is maximum alignment we can assume.  */
1533
1534static int
1535move_by_pieces_ninsns (l, align)
1536     unsigned int l;
1537     int align;
1538{
1539  register int n_insns = 0;
1540  int max_size = MOVE_MAX + 1;
1541
1542  if (! SLOW_UNALIGNED_ACCESS
1543      || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1544    align = MOVE_MAX;
1545
1546  while (max_size > 1)
1547    {
1548      enum machine_mode mode = VOIDmode, tmode;
1549      enum insn_code icode;
1550
1551      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1552	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1553	if (GET_MODE_SIZE (tmode) < max_size)
1554	  mode = tmode;
1555
1556      if (mode == VOIDmode)
1557	break;
1558
1559      icode = mov_optab->handlers[(int) mode].insn_code;
1560      if (icode != CODE_FOR_nothing
1561	  && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1562			   GET_MODE_SIZE (mode)))
1563	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1564
1565      max_size = GET_MODE_SIZE (mode);
1566    }
1567
1568  return n_insns;
1569}
1570
1571/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1572   with move instructions for mode MODE.  GENFUN is the gen_... function
1573   to make a move insn for that mode.  DATA has all the other info.  */
1574
1575static void
1576move_by_pieces_1 (genfun, mode, data)
1577     rtx (*genfun) PROTO ((rtx, ...));
1578     enum machine_mode mode;
1579     struct move_by_pieces *data;
1580{
1581  register int size = GET_MODE_SIZE (mode);
1582  register rtx to1, from1;
1583
1584  while (data->len >= size)
1585    {
1586      if (data->reverse) data->offset -= size;
1587
1588      to1 = (data->autinc_to
1589	     ? gen_rtx_MEM (mode, data->to_addr)
1590	     : copy_rtx (change_address (data->to, mode,
1591					 plus_constant (data->to_addr,
1592							data->offset))));
1593      MEM_IN_STRUCT_P (to1) = data->to_struct;
1594
1595      from1
1596	= (data->autinc_from
1597	   ? gen_rtx_MEM (mode, data->from_addr)
1598	   : copy_rtx (change_address (data->from, mode,
1599				       plus_constant (data->from_addr,
1600						      data->offset))));
1601      MEM_IN_STRUCT_P (from1) = data->from_struct;
1602
1603      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1604	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1605      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1606	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1607
1608      emit_insn ((*genfun) (to1, from1));
1609      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1610	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1611      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1612	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1613
1614      if (! data->reverse) data->offset += size;
1615
1616      data->len -= size;
1617    }
1618}
1619
1620/* Emit code to move a block Y to a block X.
1621   This may be done with string-move instructions,
1622   with multiple scalar move instructions, or with a library call.
1623
1624   Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1625   with mode BLKmode.
1626   SIZE is an rtx that says how long they are.
1627   ALIGN is the maximum alignment we can assume they have,
1628   measured in bytes.
1629
1630   Return the address of the new block, if memcpy is called and returns it,
1631   0 otherwise.  */
1632
1633rtx
1634emit_block_move (x, y, size, align)
1635     rtx x, y;
1636     rtx size;
1637     int align;
1638{
1639  rtx retval = 0;
1640#ifdef TARGET_MEM_FUNCTIONS
1641  static tree fn;
1642  tree call_expr, arg_list;
1643#endif
1644
1645  if (GET_MODE (x) != BLKmode)
1646    abort ();
1647
1648  if (GET_MODE (y) != BLKmode)
1649    abort ();
1650
1651  x = protect_from_queue (x, 1);
1652  y = protect_from_queue (y, 0);
1653  size = protect_from_queue (size, 0);
1654
1655  if (GET_CODE (x) != MEM)
1656    abort ();
1657  if (GET_CODE (y) != MEM)
1658    abort ();
1659  if (size == 0)
1660    abort ();
1661
1662  if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1663    move_by_pieces (x, y, INTVAL (size), align);
1664  else
1665    {
1666      /* Try the most limited insn first, because there's no point
1667	 including more than one in the machine description unless
1668	 the more limited one has some advantage.  */
1669
1670      rtx opalign = GEN_INT (align);
1671      enum machine_mode mode;
1672
1673      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1674	   mode = GET_MODE_WIDER_MODE (mode))
1675	{
1676	  enum insn_code code = movstr_optab[(int) mode];
1677
1678	  if (code != CODE_FOR_nothing
1679	      /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1680		 here because if SIZE is less than the mode mask, as it is
1681		 returned by the macro, it will definitely be less than the
1682		 actual mode mask.  */
1683	      && ((GET_CODE (size) == CONST_INT
1684		   && ((unsigned HOST_WIDE_INT) INTVAL (size)
1685		       <= (GET_MODE_MASK (mode) >> 1)))
1686		  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1687	      && (insn_operand_predicate[(int) code][0] == 0
1688		  || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1689	      && (insn_operand_predicate[(int) code][1] == 0
1690		  || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1691	      && (insn_operand_predicate[(int) code][3] == 0
1692		  || (*insn_operand_predicate[(int) code][3]) (opalign,
1693							       VOIDmode)))
1694	    {
1695	      rtx op2;
1696	      rtx last = get_last_insn ();
1697	      rtx pat;
1698
1699	      op2 = convert_to_mode (mode, size, 1);
1700	      if (insn_operand_predicate[(int) code][2] != 0
1701		  && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1702		op2 = copy_to_mode_reg (mode, op2);
1703
1704	      pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1705	      if (pat)
1706		{
1707		  emit_insn (pat);
1708		  return 0;
1709		}
1710	      else
1711		delete_insns_since (last);
1712	    }
1713	}
1714
1715      /* X, Y, or SIZE may have been passed through protect_from_queue.
1716
1717	 It is unsafe to save the value generated by protect_from_queue
1718	 and reuse it later.  Consider what happens if emit_queue is
1719	 called before the return value from protect_from_queue is used.
1720
1721	 Expansion of the CALL_EXPR below will call emit_queue before
1722	 we are finished emitting RTL for argument setup.  So if we are
1723	 not careful we could get the wrong value for an argument.
1724
1725	 To avoid this problem we go ahead and emit code to copy X, Y &
1726	 SIZE into new pseudos.  We can then place those new pseudos
1727	 into an RTL_EXPR and use them later, even after a call to
1728	 emit_queue.
1729
1730	 Note this is not strictly needed for library calls since they
1731	 do not call emit_queue before loading their arguments.  However,
1732	 we may need to have library calls call emit_queue in the future
1733	 since failing to do so could cause problems for targets which
1734	 define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
1735      x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1736      y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1737
1738#ifdef TARGET_MEM_FUNCTIONS
1739      size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1740#else
1741      size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1742			      TREE_UNSIGNED (integer_type_node));
1743      size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1744#endif
1745
1746#ifdef TARGET_MEM_FUNCTIONS
1747      /* It is incorrect to use the libcall calling conventions to call
1748	 memcpy in this context.
1749
1750	 This could be a user call to memcpy and the user may wish to
1751	 examine the return value from memcpy.
1752
1753	 For targets where libcalls and normal calls have different conventions
1754	 for returning pointers, we could end up generating incorrect code.
1755
1756	 So instead of using a libcall sequence we build up a suitable
1757	 CALL_EXPR and expand the call in the normal fashion.  */
1758      if (fn == NULL_TREE)
1759	{
1760	  tree fntype;
1761
1762	  /* This was copied from except.c, I don't know if all this is
1763	     necessary in this context or not.  */
1764	  fn = get_identifier ("memcpy");
1765	  push_obstacks_nochange ();
1766	  end_temporary_allocation ();
1767	  fntype = build_pointer_type (void_type_node);
1768	  fntype = build_function_type (fntype, NULL_TREE);
1769	  fn = build_decl (FUNCTION_DECL, fn, fntype);
1770	  DECL_EXTERNAL (fn) = 1;
1771	  TREE_PUBLIC (fn) = 1;
1772	  DECL_ARTIFICIAL (fn) = 1;
1773	  make_decl_rtl (fn, NULL_PTR, 1);
1774	  assemble_external (fn);
1775	  pop_obstacks ();
1776	}
1777
1778      /* We need to make an argument list for the function call.
1779
1780	 memcpy has three arguments, the first two are void * addresses and
1781	 the last is a size_t byte count for the copy.  */
1782      arg_list
1783	= build_tree_list (NULL_TREE,
1784			   make_tree (build_pointer_type (void_type_node), x));
1785      TREE_CHAIN (arg_list)
1786	= build_tree_list (NULL_TREE,
1787			   make_tree (build_pointer_type (void_type_node), y));
1788      TREE_CHAIN (TREE_CHAIN (arg_list))
1789	 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1790      TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1791
1792      /* Now we have to build up the CALL_EXPR itself.  */
1793      call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1794      call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1795			 call_expr, arg_list, NULL_TREE);
1796      TREE_SIDE_EFFECTS (call_expr) = 1;
1797
1798      retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1799#else
1800      emit_library_call (bcopy_libfunc, 0,
1801			 VOIDmode, 3, y, Pmode, x, Pmode,
1802			 convert_to_mode (TYPE_MODE (integer_type_node), size,
1803					  TREE_UNSIGNED (integer_type_node)),
1804			 TYPE_MODE (integer_type_node));
1805#endif
1806    }
1807
1808  return retval;
1809}
1810
1811/* Copy all or part of a value X into registers starting at REGNO.
1812   The number of registers to be filled is NREGS.  */
1813
1814void
1815move_block_to_reg (regno, x, nregs, mode)
1816     int regno;
1817     rtx x;
1818     int nregs;
1819     enum machine_mode mode;
1820{
1821  int i;
1822#ifdef HAVE_load_multiple
1823  rtx pat;
1824  rtx last;
1825#endif
1826
1827  if (nregs == 0)
1828    return;
1829
1830  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1831    x = validize_mem (force_const_mem (mode, x));
1832
1833  /* See if the machine can do this with a load multiple insn.  */
1834#ifdef HAVE_load_multiple
1835  if (HAVE_load_multiple)
1836    {
1837      last = get_last_insn ();
1838      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1839			       GEN_INT (nregs));
1840      if (pat)
1841	{
1842	  emit_insn (pat);
1843	  return;
1844	}
1845      else
1846	delete_insns_since (last);
1847    }
1848#endif
1849
1850  for (i = 0; i < nregs; i++)
1851    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1852		    operand_subword_force (x, i, mode));
1853}
1854
1855/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1856   The number of registers to be filled is NREGS.  SIZE indicates the number
1857   of bytes in the object X.  */
1858
1859
1860void
1861move_block_from_reg (regno, x, nregs, size)
1862     int regno;
1863     rtx x;
1864     int nregs;
1865     int size;
1866{
1867  int i;
1868#ifdef HAVE_store_multiple
1869  rtx pat;
1870  rtx last;
1871#endif
1872  enum machine_mode mode;
1873
1874  /* If SIZE is that of a mode no bigger than a word, just use that
1875     mode's store operation.  */
1876  if (size <= UNITS_PER_WORD
1877      && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1878    {
1879      emit_move_insn (change_address (x, mode, NULL),
1880		      gen_rtx_REG (mode, regno));
1881      return;
1882    }
1883
1884  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1885     to the left before storing to memory.  Note that the previous test
1886     doesn't handle all cases (e.g. SIZE == 3).  */
1887  if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1888    {
1889      rtx tem = operand_subword (x, 0, 1, BLKmode);
1890      rtx shift;
1891
1892      if (tem == 0)
1893	abort ();
1894
1895      shift = expand_shift (LSHIFT_EXPR, word_mode,
1896			    gen_rtx_REG (word_mode, regno),
1897			    build_int_2 ((UNITS_PER_WORD - size)
1898					 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1899      emit_move_insn (tem, shift);
1900      return;
1901    }
1902
1903  /* See if the machine can do this with a store multiple insn.  */
1904#ifdef HAVE_store_multiple
1905  if (HAVE_store_multiple)
1906    {
1907      last = get_last_insn ();
1908      pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1909				GEN_INT (nregs));
1910      if (pat)
1911	{
1912	  emit_insn (pat);
1913	  return;
1914	}
1915      else
1916	delete_insns_since (last);
1917    }
1918#endif
1919
1920  for (i = 0; i < nregs; i++)
1921    {
1922      rtx tem = operand_subword (x, i, 1, BLKmode);
1923
1924      if (tem == 0)
1925	abort ();
1926
1927      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1928    }
1929}
1930
1931/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1932   registers represented by a PARALLEL.  SSIZE represents the total size of
1933   block SRC in bytes, or -1 if not known.  ALIGN is the known alignment of
1934   SRC in bits.  */
1935/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1936   the balance will be in what would be the low-order memory addresses, i.e.
1937   left justified for big endian, right justified for little endian.  This
1938   happens to be true for the targets currently using this support.  If this
1939   ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1940   would be needed.  */
1941
1942void
1943emit_group_load (dst, orig_src, ssize, align)
1944     rtx dst, orig_src;
1945     int align, ssize;
1946{
1947  rtx *tmps, src;
1948  int start, i;
1949
1950  if (GET_CODE (dst) != PARALLEL)
1951    abort ();
1952
1953  /* Check for a NULL entry, used to indicate that the parameter goes
1954     both on the stack and in registers.  */
1955  if (XEXP (XVECEXP (dst, 0, 0), 0))
1956    start = 0;
1957  else
1958    start = 1;
1959
1960  tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1961
1962  /* If we won't be loading directly from memory, protect the real source
1963     from strange tricks we might play.  */
1964  src = orig_src;
1965  if (GET_CODE (src) != MEM)
1966    {
1967      src = gen_reg_rtx (GET_MODE (orig_src));
1968      emit_move_insn (src, orig_src);
1969    }
1970
1971  /* Process the pieces.  */
1972  for (i = start; i < XVECLEN (dst, 0); i++)
1973    {
1974      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1975      int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1976      int bytelen = GET_MODE_SIZE (mode);
1977      int shift = 0;
1978
1979      /* Handle trailing fragments that run over the size of the struct.  */
1980      if (ssize >= 0 && bytepos + bytelen > ssize)
1981	{
1982	  shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1983	  bytelen = ssize - bytepos;
1984	  if (bytelen <= 0)
1985	    abort();
1986	}
1987
1988      /* Optimize the access just a bit.  */
1989      if (GET_CODE (src) == MEM
1990	  && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1991	  && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1992	  && bytelen == GET_MODE_SIZE (mode))
1993	{
1994	  tmps[i] = gen_reg_rtx (mode);
1995	  emit_move_insn (tmps[i],
1996			  change_address (src, mode,
1997					  plus_constant (XEXP (src, 0),
1998							 bytepos)));
1999	}
2000      else
2001	{
2002	  tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
2003				       bytepos*BITS_PER_UNIT, 1, NULL_RTX,
2004				       mode, mode, align, ssize);
2005	}
2006
2007      if (BYTES_BIG_ENDIAN && shift)
2008	{
2009	  expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2010			tmps[i], 0, OPTAB_WIDEN);
2011	}
2012    }
2013  emit_queue();
2014
2015  /* Copy the extracted pieces into the proper (probable) hard regs.  */
2016  for (i = start; i < XVECLEN (dst, 0); i++)
2017    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2018}
2019
2020/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2021   registers represented by a PARALLEL.  SSIZE represents the total size of
2022   block DST, or -1 if not known.  ALIGN is the known alignment of DST.  */
2023
2024void
2025emit_group_store (orig_dst, src, ssize, align)
2026     rtx orig_dst, src;
2027     int ssize, align;
2028{
2029  rtx *tmps, dst;
2030  int start, i;
2031
2032  if (GET_CODE (src) != PARALLEL)
2033    abort ();
2034
2035  /* Check for a NULL entry, used to indicate that the parameter goes
2036     both on the stack and in registers.  */
2037  if (XEXP (XVECEXP (src, 0, 0), 0))
2038    start = 0;
2039  else
2040    start = 1;
2041
2042  tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2043
2044  /* Copy the (probable) hard regs into pseudos.  */
2045  for (i = start; i < XVECLEN (src, 0); i++)
2046    {
2047      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2048      tmps[i] = gen_reg_rtx (GET_MODE (reg));
2049      emit_move_insn (tmps[i], reg);
2050    }
2051  emit_queue();
2052
2053  /* If we won't be storing directly into memory, protect the real destination
2054     from strange tricks we might play.  */
2055  dst = orig_dst;
2056  if (GET_CODE (dst) == PARALLEL)
2057    {
2058      rtx temp;
2059
2060      /* We can get a PARALLEL dst if there is a conditional expression in
2061	 a return statement.  In that case, the dst and src are the same,
2062	 so no action is necessary.  */
2063      if (rtx_equal_p (dst, src))
2064	return;
2065
2066      /* It is unclear if we can ever reach here, but we may as well handle
2067	 it.  Allocate a temporary, and split this into a store/load to/from
2068	 the temporary.  */
2069
2070      temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2071      emit_group_store (temp, src, ssize, align);
2072      emit_group_load (dst, temp, ssize, align);
2073      return;
2074    }
2075  else if (GET_CODE (dst) != MEM)
2076    {
2077      dst = gen_reg_rtx (GET_MODE (orig_dst));
2078      /* Make life a bit easier for combine.  */
2079      emit_move_insn (dst, const0_rtx);
2080    }
2081  else if (! MEM_IN_STRUCT_P (dst))
2082    {
2083      /* store_bit_field requires that memory operations have
2084	 mem_in_struct_p set; we might not.  */
2085
2086      dst = copy_rtx (orig_dst);
2087      MEM_SET_IN_STRUCT_P (dst, 1);
2088    }
2089
2090  /* Process the pieces.  */
2091  for (i = start; i < XVECLEN (src, 0); i++)
2092    {
2093      int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2094      enum machine_mode mode = GET_MODE (tmps[i]);
2095      int bytelen = GET_MODE_SIZE (mode);
2096
2097      /* Handle trailing fragments that run over the size of the struct.  */
2098      if (ssize >= 0 && bytepos + bytelen > ssize)
2099	{
2100	  if (BYTES_BIG_ENDIAN)
2101	    {
2102	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2103	      expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2104			    tmps[i], 0, OPTAB_WIDEN);
2105	    }
2106	  bytelen = ssize - bytepos;
2107	}
2108
2109      /* Optimize the access just a bit.  */
2110      if (GET_CODE (dst) == MEM
2111	  && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2112	  && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2113	  && bytelen == GET_MODE_SIZE (mode))
2114	{
2115	  emit_move_insn (change_address (dst, mode,
2116					  plus_constant (XEXP (dst, 0),
2117							 bytepos)),
2118			  tmps[i]);
2119	}
2120      else
2121	{
2122	  store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2123			   mode, tmps[i], align, ssize);
2124	}
2125    }
2126  emit_queue();
2127
2128  /* Copy from the pseudo into the (probable) hard reg.  */
2129  if (GET_CODE (dst) == REG)
2130    emit_move_insn (orig_dst, dst);
2131}
2132
2133/* Generate code to copy a BLKmode object of TYPE out of a
2134   set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2135   is null, a stack temporary is created.  TGTBLK is returned.
2136
2137   The primary purpose of this routine is to handle functions
2138   that return BLKmode structures in registers.  Some machines
2139   (the PA for example) want to return all small structures
2140   in registers regardless of the structure's alignment.
2141  */
2142
2143rtx
2144copy_blkmode_from_reg(tgtblk,srcreg,type)
2145     rtx tgtblk;
2146     rtx srcreg;
2147     tree type;
2148{
2149      int bytes = int_size_in_bytes (type);
2150      rtx src = NULL, dst = NULL;
2151      int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2152      int bitpos, xbitpos, big_endian_correction = 0;
2153
2154      if (tgtblk == 0)
2155	{
2156	  tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2157	  MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2158	  preserve_temp_slots (tgtblk);
2159	}
2160
2161      /* This code assumes srcreg is at least a full word.  If it isn't,
2162	 copy it into a new pseudo which is a full word.  */
2163      if (GET_MODE (srcreg) != BLKmode
2164	  && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2165	srcreg = convert_to_mode (word_mode, srcreg,
2166				  TREE_UNSIGNED (type));
2167
2168      /* Structures whose size is not a multiple of a word are aligned
2169	 to the least significant byte (to the right).  On a BYTES_BIG_ENDIAN
2170	 machine, this means we must skip the empty high order bytes when
2171	 calculating the bit offset.  */
2172      if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2173	big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2174						  * BITS_PER_UNIT));
2175
2176      /* Copy the structure BITSIZE bites at a time.
2177
2178	 We could probably emit more efficient code for machines
2179	 which do not use strict alignment, but it doesn't seem
2180	 worth the effort at the current time.  */
2181      for (bitpos = 0, xbitpos = big_endian_correction;
2182	   bitpos < bytes * BITS_PER_UNIT;
2183	   bitpos += bitsize, xbitpos += bitsize)
2184	{
2185
2186	  /* We need a new source operand each time xbitpos is on a
2187	     word boundary and when xbitpos == big_endian_correction
2188	     (the first time through).  */
2189	  if (xbitpos % BITS_PER_WORD == 0
2190	      || xbitpos == big_endian_correction)
2191	    src = operand_subword_force (srcreg,
2192					 xbitpos / BITS_PER_WORD,
2193					 BLKmode);
2194
2195	  /* We need a new destination operand each time bitpos is on
2196	     a word boundary.  */
2197	  if (bitpos % BITS_PER_WORD == 0)
2198	    dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2199
2200	  /* Use xbitpos for the source extraction (right justified) and
2201	     xbitpos for the destination store (left justified).  */
2202	  store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2203			   extract_bit_field (src, bitsize,
2204					      xbitpos % BITS_PER_WORD, 1,
2205					      NULL_RTX, word_mode,
2206					      word_mode,
2207					      bitsize / BITS_PER_UNIT,
2208					      BITS_PER_WORD),
2209			   bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2210	}
2211      return tgtblk;
2212}
2213
2214
2215/* Add a USE expression for REG to the (possibly empty) list pointed
2216   to by CALL_FUSAGE.  REG must denote a hard register.  */
2217
2218void
2219use_reg (call_fusage, reg)
2220     rtx *call_fusage, reg;
2221{
2222  if (GET_CODE (reg) != REG
2223      || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2224    abort();
2225
2226  *call_fusage
2227    = gen_rtx_EXPR_LIST (VOIDmode,
2228			 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2229}
2230
2231/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2232   starting at REGNO.  All of these registers must be hard registers.  */
2233
2234void
2235use_regs (call_fusage, regno, nregs)
2236     rtx *call_fusage;
2237     int regno;
2238     int nregs;
2239{
2240  int i;
2241
2242  if (regno + nregs > FIRST_PSEUDO_REGISTER)
2243    abort ();
2244
2245  for (i = 0; i < nregs; i++)
2246    use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2247}
2248
2249/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2250   PARALLEL REGS.  This is for calls that pass values in multiple
2251   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2252
2253void
2254use_group_regs (call_fusage, regs)
2255     rtx *call_fusage;
2256     rtx regs;
2257{
2258  int i;
2259
2260  for (i = 0; i < XVECLEN (regs, 0); i++)
2261    {
2262      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2263
2264      /* A NULL entry means the parameter goes both on the stack and in
2265	 registers.  This can also be a MEM for targets that pass values
2266	 partially on the stack and partially in registers.  */
2267      if (reg != 0 && GET_CODE (reg) == REG)
2268	use_reg (call_fusage, reg);
2269    }
2270}
2271
2272/* Generate several move instructions to clear LEN bytes of block TO.
2273   (A MEM rtx with BLKmode).   The caller must pass TO through
2274   protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2275   we can assume.  */
2276
2277static void
2278clear_by_pieces (to, len, align)
2279     rtx to;
2280     int len, align;
2281{
2282  struct clear_by_pieces data;
2283  rtx to_addr = XEXP (to, 0);
2284  int max_size = MOVE_MAX_PIECES + 1;
2285  enum machine_mode mode = VOIDmode, tmode;
2286  enum insn_code icode;
2287
2288  data.offset = 0;
2289  data.to_addr = to_addr;
2290  data.to = to;
2291  data.autinc_to
2292    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2293       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2294
2295  data.explicit_inc_to = 0;
2296  data.reverse
2297    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2298  if (data.reverse) data.offset = len;
2299  data.len = len;
2300
2301  data.to_struct = MEM_IN_STRUCT_P (to);
2302
2303  /* If copying requires more than two move insns,
2304     copy addresses to registers (to make displacements shorter)
2305     and use post-increment if available.  */
2306  if (!data.autinc_to
2307      && move_by_pieces_ninsns (len, align) > 2)
2308    {
2309      /* Determine the main mode we'll be using */
2310      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2311	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2312	if (GET_MODE_SIZE (tmode) < max_size)
2313	  mode = tmode;
2314
2315      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2316	{
2317	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2318	  data.autinc_to = 1;
2319	  data.explicit_inc_to = -1;
2320	}
2321      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2322	{
2323	  data.to_addr = copy_addr_to_reg (to_addr);
2324	  data.autinc_to = 1;
2325	  data.explicit_inc_to = 1;
2326	}
2327      if (!data.autinc_to && CONSTANT_P (to_addr))
2328	data.to_addr = copy_addr_to_reg (to_addr);
2329    }
2330
2331  if (! SLOW_UNALIGNED_ACCESS
2332      || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2333    align = MOVE_MAX;
2334
2335  /* First move what we can in the largest integer mode, then go to
2336     successively smaller modes.  */
2337
2338  while (max_size > 1)
2339    {
2340      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2341	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2342	if (GET_MODE_SIZE (tmode) < max_size)
2343	  mode = tmode;
2344
2345      if (mode == VOIDmode)
2346	break;
2347
2348      icode = mov_optab->handlers[(int) mode].insn_code;
2349      if (icode != CODE_FOR_nothing
2350	  && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2351			   GET_MODE_SIZE (mode)))
2352	clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2353
2354      max_size = GET_MODE_SIZE (mode);
2355    }
2356
2357  /* The code above should have handled everything.  */
2358  if (data.len != 0)
2359    abort ();
2360}
2361
2362/* Subroutine of clear_by_pieces.  Clear as many bytes as appropriate
2363   with move instructions for mode MODE.  GENFUN is the gen_... function
2364   to make a move insn for that mode.  DATA has all the other info.  */
2365
2366static void
2367clear_by_pieces_1 (genfun, mode, data)
2368     rtx (*genfun) PROTO ((rtx, ...));
2369     enum machine_mode mode;
2370     struct clear_by_pieces *data;
2371{
2372  register int size = GET_MODE_SIZE (mode);
2373  register rtx to1;
2374
2375  while (data->len >= size)
2376    {
2377      if (data->reverse) data->offset -= size;
2378
2379      to1 = (data->autinc_to
2380	     ? gen_rtx_MEM (mode, data->to_addr)
2381	     : copy_rtx (change_address (data->to, mode,
2382					 plus_constant (data->to_addr,
2383							data->offset))));
2384      MEM_IN_STRUCT_P (to1) = data->to_struct;
2385
2386      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2387	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2388
2389      emit_insn ((*genfun) (to1, const0_rtx));
2390      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2391	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2392
2393      if (! data->reverse) data->offset += size;
2394
2395      data->len -= size;
2396    }
2397}
2398
2399/* Write zeros through the storage of OBJECT.
2400   If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2401   the maximum alignment we can is has, measured in bytes.
2402
2403   If we call a function that returns the length of the block, return it.  */
2404
2405rtx
2406clear_storage (object, size, align)
2407     rtx object;
2408     rtx size;
2409     int align;
2410{
2411#ifdef TARGET_MEM_FUNCTIONS
2412  static tree fn;
2413  tree call_expr, arg_list;
2414#endif
2415  rtx retval = 0;
2416
2417  if (GET_MODE (object) == BLKmode)
2418    {
2419      object = protect_from_queue (object, 1);
2420      size = protect_from_queue (size, 0);
2421
2422      if (GET_CODE (size) == CONST_INT
2423	  && MOVE_BY_PIECES_P (INTVAL (size), align))
2424	clear_by_pieces (object, INTVAL (size), align);
2425
2426      else
2427	{
2428	  /* Try the most limited insn first, because there's no point
2429	     including more than one in the machine description unless
2430	     the more limited one has some advantage.  */
2431
2432	  rtx opalign = GEN_INT (align);
2433	  enum machine_mode mode;
2434
2435	  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2436	       mode = GET_MODE_WIDER_MODE (mode))
2437	    {
2438	      enum insn_code code = clrstr_optab[(int) mode];
2439
2440	      if (code != CODE_FOR_nothing
2441		  /* We don't need MODE to be narrower than
2442		     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2443		     the mode mask, as it is returned by the macro, it will
2444		     definitely be less than the actual mode mask.  */
2445		  && ((GET_CODE (size) == CONST_INT
2446		       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2447			   <= (GET_MODE_MASK (mode) >> 1)))
2448		      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2449		  && (insn_operand_predicate[(int) code][0] == 0
2450		      || (*insn_operand_predicate[(int) code][0]) (object,
2451								   BLKmode))
2452		  && (insn_operand_predicate[(int) code][2] == 0
2453		      || (*insn_operand_predicate[(int) code][2]) (opalign,
2454								   VOIDmode)))
2455		{
2456		  rtx op1;
2457		  rtx last = get_last_insn ();
2458		  rtx pat;
2459
2460		  op1 = convert_to_mode (mode, size, 1);
2461		  if (insn_operand_predicate[(int) code][1] != 0
2462		      && ! (*insn_operand_predicate[(int) code][1]) (op1,
2463								     mode))
2464		    op1 = copy_to_mode_reg (mode, op1);
2465
2466		  pat = GEN_FCN ((int) code) (object, op1, opalign);
2467		  if (pat)
2468		    {
2469		      emit_insn (pat);
2470		      return 0;
2471		    }
2472		  else
2473		    delete_insns_since (last);
2474		}
2475	    }
2476
2477	  /* OBJECT or SIZE may have been passed through protect_from_queue.
2478
2479	     It is unsafe to save the value generated by protect_from_queue
2480	     and reuse it later.  Consider what happens if emit_queue is
2481	     called before the return value from protect_from_queue is used.
2482
2483	     Expansion of the CALL_EXPR below will call emit_queue before
2484	     we are finished emitting RTL for argument setup.  So if we are
2485	     not careful we could get the wrong value for an argument.
2486
2487	     To avoid this problem we go ahead and emit code to copy OBJECT
2488	     and SIZE into new pseudos.  We can then place those new pseudos
2489	     into an RTL_EXPR and use them later, even after a call to
2490	     emit_queue.
2491
2492	     Note this is not strictly needed for library calls since they
2493	     do not call emit_queue before loading their arguments.  However,
2494	     we may need to have library calls call emit_queue in the future
2495	     since failing to do so could cause problems for targets which
2496	     define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
2497	  object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2498
2499#ifdef TARGET_MEM_FUNCTIONS
2500	  size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2501#else
2502	  size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2503				  TREE_UNSIGNED (integer_type_node));
2504	  size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2505#endif
2506
2507
2508#ifdef TARGET_MEM_FUNCTIONS
2509	  /* It is incorrect to use the libcall calling conventions to call
2510	     memset in this context.
2511
2512	     This could be a user call to memset and the user may wish to
2513	     examine the return value from memset.
2514
2515	     For targets where libcalls and normal calls have different
2516	     conventions for returning pointers, we could end up generating
2517	      incorrect code.
2518
2519	     So instead of using a libcall sequence we build up a suitable
2520	     CALL_EXPR and expand the call in the normal fashion.  */
2521	  if (fn == NULL_TREE)
2522	    {
2523	      tree fntype;
2524
2525	      /* This was copied from except.c, I don't know if all this is
2526		 necessary in this context or not.  */
2527	      fn = get_identifier ("memset");
2528	      push_obstacks_nochange ();
2529	      end_temporary_allocation ();
2530	      fntype = build_pointer_type (void_type_node);
2531	      fntype = build_function_type (fntype, NULL_TREE);
2532	      fn = build_decl (FUNCTION_DECL, fn, fntype);
2533	      DECL_EXTERNAL (fn) = 1;
2534	      TREE_PUBLIC (fn) = 1;
2535	      DECL_ARTIFICIAL (fn) = 1;
2536	      make_decl_rtl (fn, NULL_PTR, 1);
2537	      assemble_external (fn);
2538	      pop_obstacks ();
2539	    }
2540
2541	  /* We need to make an argument list for the function call.
2542
2543	     memset has three arguments, the first is a void * addresses, the
2544	     second a integer with the initialization value, the last is a
2545	     size_t byte count for the copy.  */
2546	  arg_list
2547	    = build_tree_list (NULL_TREE,
2548			       make_tree (build_pointer_type (void_type_node),
2549					  object));
2550	  TREE_CHAIN (arg_list)
2551	    = build_tree_list (NULL_TREE,
2552			        make_tree (integer_type_node, const0_rtx));
2553	  TREE_CHAIN (TREE_CHAIN (arg_list))
2554	    = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2555	  TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2556
2557	  /* Now we have to build up the CALL_EXPR itself.  */
2558	  call_expr = build1 (ADDR_EXPR,
2559			      build_pointer_type (TREE_TYPE (fn)), fn);
2560	  call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2561			     call_expr, arg_list, NULL_TREE);
2562	  TREE_SIDE_EFFECTS (call_expr) = 1;
2563
2564	  retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2565#else
2566	  emit_library_call (bzero_libfunc, 0,
2567			     VOIDmode, 2, object, Pmode, size,
2568			     TYPE_MODE (integer_type_node));
2569#endif
2570	}
2571    }
2572  else
2573    emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2574
2575  return retval;
2576}
2577
2578/* Generate code to copy Y into X.
2579   Both Y and X must have the same mode, except that
2580   Y can be a constant with VOIDmode.
2581   This mode cannot be BLKmode; use emit_block_move for that.
2582
2583   Return the last instruction emitted.  */
2584
2585rtx
2586emit_move_insn (x, y)
2587     rtx x, y;
2588{
2589  enum machine_mode mode = GET_MODE (x);
2590
2591  x = protect_from_queue (x, 1);
2592  y = protect_from_queue (y, 0);
2593
2594  if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2595    abort ();
2596
2597  /* Never force constant_p_rtx to memory.  */
2598  if (GET_CODE (y) == CONSTANT_P_RTX)
2599    ;
2600  else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2601    y = force_const_mem (mode, y);
2602
2603  /* If X or Y are memory references, verify that their addresses are valid
2604     for the machine.  */
2605  if (GET_CODE (x) == MEM
2606      && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2607	   && ! push_operand (x, GET_MODE (x)))
2608	  || (flag_force_addr
2609	      && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2610    x = change_address (x, VOIDmode, XEXP (x, 0));
2611
2612  if (GET_CODE (y) == MEM
2613      && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2614	  || (flag_force_addr
2615	      && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2616    y = change_address (y, VOIDmode, XEXP (y, 0));
2617
2618  if (mode == BLKmode)
2619    abort ();
2620
2621  return emit_move_insn_1 (x, y);
2622}
2623
2624/* Low level part of emit_move_insn.
2625   Called just like emit_move_insn, but assumes X and Y
2626   are basically valid.  */
2627
2628rtx
2629emit_move_insn_1 (x, y)
2630     rtx x, y;
2631{
2632  enum machine_mode mode = GET_MODE (x);
2633  enum machine_mode submode;
2634  enum mode_class class = GET_MODE_CLASS (mode);
2635  int i;
2636
2637  if (mode >= MAX_MACHINE_MODE)
2638      abort ();
2639
2640  if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2641    return
2642      emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2643
2644  /* Expand complex moves by moving real part and imag part, if possible.  */
2645  else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2646	   && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2647						    * BITS_PER_UNIT),
2648						   (class == MODE_COMPLEX_INT
2649						    ? MODE_INT : MODE_FLOAT),
2650						   0))
2651	   && (mov_optab->handlers[(int) submode].insn_code
2652	       != CODE_FOR_nothing))
2653    {
2654      /* Don't split destination if it is a stack push.  */
2655      int stack = push_operand (x, GET_MODE (x));
2656
2657      /* If this is a stack, push the highpart first, so it
2658	 will be in the argument order.
2659
2660	 In that case, change_address is used only to convert
2661	 the mode, not to change the address.  */
2662      if (stack)
2663	{
2664	  /* Note that the real part always precedes the imag part in memory
2665	     regardless of machine's endianness.  */
2666#ifdef STACK_GROWS_DOWNWARD
2667	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2668		     (gen_rtx_MEM (submode, (XEXP (x, 0))),
2669		      gen_imagpart (submode, y)));
2670	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2671		     (gen_rtx_MEM (submode, (XEXP (x, 0))),
2672		      gen_realpart (submode, y)));
2673#else
2674	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2675		     (gen_rtx_MEM (submode, (XEXP (x, 0))),
2676		      gen_realpart (submode, y)));
2677	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2678		     (gen_rtx_MEM (submode, (XEXP (x, 0))),
2679		      gen_imagpart (submode, y)));
2680#endif
2681	}
2682      else
2683	{
2684	  rtx realpart_x, realpart_y;
2685	  rtx imagpart_x, imagpart_y;
2686
2687	  /* If this is a complex value with each part being smaller than a
2688	     word, the usual calling sequence will likely pack the pieces into
2689	     a single register.  Unfortunately, SUBREG of hard registers only
2690	     deals in terms of words, so we have a problem converting input
2691	     arguments to the CONCAT of two registers that is used elsewhere
2692	     for complex values.  If this is before reload, we can copy it into
2693	     memory and reload.  FIXME, we should see about using extract and
2694	     insert on integer registers, but complex short and complex char
2695	     variables should be rarely used.  */
2696	  if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2697	      && (reload_in_progress | reload_completed) == 0)
2698	    {
2699	      int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2700	      int packed_src_p  = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2701
2702	      if (packed_dest_p || packed_src_p)
2703		{
2704		  enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2705					       ? MODE_FLOAT : MODE_INT);
2706
2707		  enum machine_mode reg_mode =
2708		    mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2709
2710		  if (reg_mode != BLKmode)
2711		    {
2712		      rtx mem = assign_stack_temp (reg_mode,
2713						   GET_MODE_SIZE (mode), 0);
2714
2715		      rtx cmem = change_address (mem, mode, NULL_RTX);
2716
2717		      current_function_cannot_inline
2718			= "function using short complex types cannot be inline";
2719
2720		      if (packed_dest_p)
2721			{
2722			  rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2723			  emit_move_insn_1 (cmem, y);
2724			  return emit_move_insn_1 (sreg, mem);
2725			}
2726		      else
2727			{
2728			  rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2729			  emit_move_insn_1 (mem, sreg);
2730			  return emit_move_insn_1 (x, cmem);
2731			}
2732		    }
2733		}
2734	    }
2735
2736	  realpart_x = gen_realpart (submode, x);
2737	  realpart_y = gen_realpart (submode, y);
2738	  imagpart_x = gen_imagpart (submode, x);
2739	  imagpart_y = gen_imagpart (submode, y);
2740
2741	  /* Show the output dies here.  This is necessary for SUBREGs
2742	     of pseudos since we cannot track their lifetimes correctly;
2743	     hard regs shouldn't appear here except as return values.
2744	     We never want to emit such a clobber after reload.  */
2745	  if (x != y
2746	      && ! (reload_in_progress || reload_completed)
2747	      && (GET_CODE (realpart_x) == SUBREG
2748		  || GET_CODE (imagpart_x) == SUBREG))
2749	    {
2750	      emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2751	    }
2752
2753	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2754		     (realpart_x, realpart_y));
2755	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2756		     (imagpart_x, imagpart_y));
2757	}
2758
2759      return get_last_insn ();
2760    }
2761
2762  /* This will handle any multi-word mode that lacks a move_insn pattern.
2763     However, you will get better code if you define such patterns,
2764     even if they must turn into multiple assembler instructions.  */
2765  else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2766    {
2767      rtx last_insn = 0;
2768      rtx seq;
2769      int need_clobber;
2770
2771#ifdef PUSH_ROUNDING
2772
2773      /* If X is a push on the stack, do the push now and replace
2774	 X with a reference to the stack pointer.  */
2775      if (push_operand (x, GET_MODE (x)))
2776	{
2777	  anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2778	  x = change_address (x, VOIDmode, stack_pointer_rtx);
2779	}
2780#endif
2781
2782      start_sequence ();
2783
2784      need_clobber = 0;
2785      for (i = 0;
2786	   i < (GET_MODE_SIZE (mode)  + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2787	   i++)
2788	{
2789	  rtx xpart = operand_subword (x, i, 1, mode);
2790	  rtx ypart = operand_subword (y, i, 1, mode);
2791
2792	  /* If we can't get a part of Y, put Y into memory if it is a
2793	     constant.  Otherwise, force it into a register.  If we still
2794	     can't get a part of Y, abort.  */
2795	  if (ypart == 0 && CONSTANT_P (y))
2796	    {
2797	      y = force_const_mem (mode, y);
2798	      ypart = operand_subword (y, i, 1, mode);
2799	    }
2800	  else if (ypart == 0)
2801	    ypart = operand_subword_force (y, i, mode);
2802
2803	  if (xpart == 0 || ypart == 0)
2804	    abort ();
2805
2806	  need_clobber |= (GET_CODE (xpart) == SUBREG);
2807
2808	  last_insn = emit_move_insn (xpart, ypart);
2809	}
2810
2811      seq = gen_sequence ();
2812      end_sequence ();
2813
2814      /* Show the output dies here.  This is necessary for SUBREGs
2815	 of pseudos since we cannot track their lifetimes correctly;
2816	 hard regs shouldn't appear here except as return values.
2817	 We never want to emit such a clobber after reload.  */
2818      if (x != y
2819	  && ! (reload_in_progress || reload_completed)
2820	  && need_clobber != 0)
2821	{
2822	  emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2823	}
2824
2825      emit_insn (seq);
2826
2827      return last_insn;
2828    }
2829  else
2830    abort ();
2831}
2832
2833/* Pushing data onto the stack.  */
2834
2835/* Push a block of length SIZE (perhaps variable)
2836   and return an rtx to address the beginning of the block.
2837   Note that it is not possible for the value returned to be a QUEUED.
2838   The value may be virtual_outgoing_args_rtx.
2839
2840   EXTRA is the number of bytes of padding to push in addition to SIZE.
2841   BELOW nonzero means this padding comes at low addresses;
2842   otherwise, the padding comes at high addresses.  */
2843
2844rtx
2845push_block (size, extra, below)
2846     rtx size;
2847     int extra, below;
2848{
2849  register rtx temp;
2850
2851  size = convert_modes (Pmode, ptr_mode, size, 1);
2852  if (CONSTANT_P (size))
2853    anti_adjust_stack (plus_constant (size, extra));
2854  else if (GET_CODE (size) == REG && extra == 0)
2855    anti_adjust_stack (size);
2856  else
2857    {
2858      rtx temp = copy_to_mode_reg (Pmode, size);
2859      if (extra != 0)
2860	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2861			     temp, 0, OPTAB_LIB_WIDEN);
2862      anti_adjust_stack (temp);
2863    }
2864
2865#if defined (STACK_GROWS_DOWNWARD) \
2866    || (defined (ARGS_GROW_DOWNWARD) \
2867	&& !defined (ACCUMULATE_OUTGOING_ARGS))
2868
2869  /* Return the lowest stack address when STACK or ARGS grow downward and
2870     we are not aaccumulating outgoing arguments (the c4x port uses such
2871     conventions).  */
2872  temp = virtual_outgoing_args_rtx;
2873  if (extra != 0 && below)
2874    temp = plus_constant (temp, extra);
2875#else
2876  if (GET_CODE (size) == CONST_INT)
2877    temp = plus_constant (virtual_outgoing_args_rtx,
2878			  - INTVAL (size) - (below ? 0 : extra));
2879  else if (extra != 0 && !below)
2880    temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2881		    negate_rtx (Pmode, plus_constant (size, extra)));
2882  else
2883    temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2884		    negate_rtx (Pmode, size));
2885#endif
2886
2887  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2888}
2889
2890rtx
2891gen_push_operand ()
2892{
2893  return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2894}
2895
2896/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2897   block of SIZE bytes.  */
2898
2899static rtx
2900get_push_address (size)
2901	int size;
2902{
2903  register rtx temp;
2904
2905  if (STACK_PUSH_CODE == POST_DEC)
2906    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2907  else if (STACK_PUSH_CODE == POST_INC)
2908    temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2909  else
2910    temp = stack_pointer_rtx;
2911
2912  return copy_to_reg (temp);
2913}
2914
2915/* Generate code to push X onto the stack, assuming it has mode MODE and
2916   type TYPE.
2917   MODE is redundant except when X is a CONST_INT (since they don't
2918   carry mode info).
2919   SIZE is an rtx for the size of data to be copied (in bytes),
2920   needed only if X is BLKmode.
2921
2922   ALIGN (in bytes) is maximum alignment we can assume.
2923
2924   If PARTIAL and REG are both nonzero, then copy that many of the first
2925   words of X into registers starting with REG, and push the rest of X.
2926   The amount of space pushed is decreased by PARTIAL words,
2927   rounded *down* to a multiple of PARM_BOUNDARY.
2928   REG must be a hard register in this case.
2929   If REG is zero but PARTIAL is not, take any all others actions for an
2930   argument partially in registers, but do not actually load any
2931   registers.
2932
2933   EXTRA is the amount in bytes of extra space to leave next to this arg.
2934   This is ignored if an argument block has already been allocated.
2935
2936   On a machine that lacks real push insns, ARGS_ADDR is the address of
2937   the bottom of the argument block for this call.  We use indexing off there
2938   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
2939   argument block has not been preallocated.
2940
2941   ARGS_SO_FAR is the size of args previously pushed for this call.
2942
2943   REG_PARM_STACK_SPACE is nonzero if functions require stack space
2944   for arguments passed in registers.  If nonzero, it will be the number
2945   of bytes required.  */
2946
2947void
2948emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2949		args_addr, args_so_far, reg_parm_stack_space)
2950     register rtx x;
2951     enum machine_mode mode;
2952     tree type;
2953     rtx size;
2954     int align;
2955     int partial;
2956     rtx reg;
2957     int extra;
2958     rtx args_addr;
2959     rtx args_so_far;
2960     int reg_parm_stack_space;
2961{
2962  rtx xinner;
2963  enum direction stack_direction
2964#ifdef STACK_GROWS_DOWNWARD
2965    = downward;
2966#else
2967    = upward;
2968#endif
2969
2970  /* Decide where to pad the argument: `downward' for below,
2971     `upward' for above, or `none' for don't pad it.
2972     Default is below for small data on big-endian machines; else above.  */
2973  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2974
2975  /* Invert direction if stack is post-update.  */
2976  if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2977    if (where_pad != none)
2978      where_pad = (where_pad == downward ? upward : downward);
2979
2980  xinner = x = protect_from_queue (x, 0);
2981
2982  if (mode == BLKmode)
2983    {
2984      /* Copy a block into the stack, entirely or partially.  */
2985
2986      register rtx temp;
2987      int used = partial * UNITS_PER_WORD;
2988      int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2989      int skip;
2990
2991      if (size == 0)
2992	abort ();
2993
2994      used -= offset;
2995
2996      /* USED is now the # of bytes we need not copy to the stack
2997	 because registers will take care of them.  */
2998
2999      if (partial != 0)
3000	xinner = change_address (xinner, BLKmode,
3001				 plus_constant (XEXP (xinner, 0), used));
3002
3003      /* If the partial register-part of the arg counts in its stack size,
3004	 skip the part of stack space corresponding to the registers.
3005	 Otherwise, start copying to the beginning of the stack space,
3006	 by setting SKIP to 0.  */
3007      skip = (reg_parm_stack_space == 0) ? 0 : used;
3008
3009#ifdef PUSH_ROUNDING
3010      /* Do it with several push insns if that doesn't take lots of insns
3011	 and if there is no difficulty with push insns that skip bytes
3012	 on the stack for alignment purposes.  */
3013      if (args_addr == 0
3014	  && GET_CODE (size) == CONST_INT
3015	  && skip == 0
3016	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3017	  /* Here we avoid the case of a structure whose weak alignment
3018	     forces many pushes of a small amount of data,
3019	     and such small pushes do rounding that causes trouble.  */
3020	  && ((! SLOW_UNALIGNED_ACCESS)
3021	      || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
3022	      || PUSH_ROUNDING (align) == align)
3023	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3024	{
3025	  /* Push padding now if padding above and stack grows down,
3026	     or if padding below and stack grows up.
3027	     But if space already allocated, this has already been done.  */
3028	  if (extra && args_addr == 0
3029	      && where_pad != none && where_pad != stack_direction)
3030	    anti_adjust_stack (GEN_INT (extra));
3031
3032	  move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3033			  INTVAL (size) - used, align);
3034
3035	  if (current_function_check_memory_usage && ! in_check_memory_usage)
3036	    {
3037	      rtx temp;
3038
3039	      in_check_memory_usage = 1;
3040	      temp = get_push_address (INTVAL(size) - used);
3041	      if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3042		emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3043				   temp, Pmode,
3044				   XEXP (xinner, 0), Pmode,
3045				   GEN_INT (INTVAL(size) - used),
3046				   TYPE_MODE (sizetype));
3047	      else
3048		emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3049				   temp, Pmode,
3050			 	   GEN_INT (INTVAL(size) - used),
3051				   TYPE_MODE (sizetype),
3052				   GEN_INT (MEMORY_USE_RW),
3053				   TYPE_MODE (integer_type_node));
3054	      in_check_memory_usage = 0;
3055	    }
3056	}
3057      else
3058#endif /* PUSH_ROUNDING */
3059	{
3060	  /* Otherwise make space on the stack and copy the data
3061	     to the address of that space.  */
3062
3063	  /* Deduct words put into registers from the size we must copy.  */
3064	  if (partial != 0)
3065	    {
3066	      if (GET_CODE (size) == CONST_INT)
3067		size = GEN_INT (INTVAL (size) - used);
3068	      else
3069		size = expand_binop (GET_MODE (size), sub_optab, size,
3070				     GEN_INT (used), NULL_RTX, 0,
3071				     OPTAB_LIB_WIDEN);
3072	    }
3073
3074	  /* Get the address of the stack space.
3075	     In this case, we do not deal with EXTRA separately.
3076	     A single stack adjust will do.  */
3077	  if (! args_addr)
3078	    {
3079	      temp = push_block (size, extra, where_pad == downward);
3080	      extra = 0;
3081	    }
3082	  else if (GET_CODE (args_so_far) == CONST_INT)
3083	    temp = memory_address (BLKmode,
3084				   plus_constant (args_addr,
3085						  skip + INTVAL (args_so_far)));
3086	  else
3087	    temp = memory_address (BLKmode,
3088				   plus_constant (gen_rtx_PLUS (Pmode,
3089								args_addr,
3090								args_so_far),
3091						  skip));
3092	  if (current_function_check_memory_usage && ! in_check_memory_usage)
3093	    {
3094	      rtx target;
3095
3096	      in_check_memory_usage = 1;
3097	      target = copy_to_reg (temp);
3098	      if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3099		emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3100				   target, Pmode,
3101				   XEXP (xinner, 0), Pmode,
3102				   size, TYPE_MODE (sizetype));
3103	      else
3104	        emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3105				   target, Pmode,
3106			 	   size, TYPE_MODE (sizetype),
3107				   GEN_INT (MEMORY_USE_RW),
3108				   TYPE_MODE (integer_type_node));
3109	      in_check_memory_usage = 0;
3110	    }
3111
3112	  /* TEMP is the address of the block.  Copy the data there.  */
3113	  if (GET_CODE (size) == CONST_INT
3114	      && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
3115	    {
3116	      move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3117			      INTVAL (size), align);
3118	      goto ret;
3119	    }
3120	  else
3121	    {
3122	      rtx opalign = GEN_INT (align);
3123	      enum machine_mode mode;
3124	      rtx target = gen_rtx_MEM (BLKmode, temp);
3125
3126	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3127		   mode != VOIDmode;
3128		   mode = GET_MODE_WIDER_MODE (mode))
3129		{
3130		  enum insn_code code = movstr_optab[(int) mode];
3131
3132		  if (code != CODE_FOR_nothing
3133		      && ((GET_CODE (size) == CONST_INT
3134			   && ((unsigned HOST_WIDE_INT) INTVAL (size)
3135			       <= (GET_MODE_MASK (mode) >> 1)))
3136			  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3137		      && (insn_operand_predicate[(int) code][0] == 0
3138			  || ((*insn_operand_predicate[(int) code][0])
3139			      (target, BLKmode)))
3140		      && (insn_operand_predicate[(int) code][1] == 0
3141			  || ((*insn_operand_predicate[(int) code][1])
3142			      (xinner, BLKmode)))
3143		      && (insn_operand_predicate[(int) code][3] == 0
3144			  || ((*insn_operand_predicate[(int) code][3])
3145			      (opalign, VOIDmode))))
3146		    {
3147		      rtx op2 = convert_to_mode (mode, size, 1);
3148		      rtx last = get_last_insn ();
3149		      rtx pat;
3150
3151		      if (insn_operand_predicate[(int) code][2] != 0
3152			  && ! ((*insn_operand_predicate[(int) code][2])
3153				(op2, mode)))
3154			op2 = copy_to_mode_reg (mode, op2);
3155
3156		      pat = GEN_FCN ((int) code) (target, xinner,
3157						  op2, opalign);
3158		      if (pat)
3159			{
3160			  emit_insn (pat);
3161			  goto ret;
3162			}
3163		      else
3164			delete_insns_since (last);
3165		    }
3166		}
3167	    }
3168
3169#ifndef ACCUMULATE_OUTGOING_ARGS
3170	  /* If the source is referenced relative to the stack pointer,
3171	     copy it to another register to stabilize it.  We do not need
3172	     to do this if we know that we won't be changing sp.  */
3173
3174	  if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3175	      || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3176	    temp = copy_to_reg (temp);
3177#endif
3178
3179	  /* Make inhibit_defer_pop nonzero around the library call
3180	     to force it to pop the bcopy-arguments right away.  */
3181	  NO_DEFER_POP;
3182#ifdef TARGET_MEM_FUNCTIONS
3183	  emit_library_call (memcpy_libfunc, 0,
3184			     VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3185			     convert_to_mode (TYPE_MODE (sizetype),
3186					      size, TREE_UNSIGNED (sizetype)),
3187			     TYPE_MODE (sizetype));
3188#else
3189	  emit_library_call (bcopy_libfunc, 0,
3190			     VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3191			     convert_to_mode (TYPE_MODE (integer_type_node),
3192					      size,
3193					      TREE_UNSIGNED (integer_type_node)),
3194			     TYPE_MODE (integer_type_node));
3195#endif
3196	  OK_DEFER_POP;
3197	}
3198    }
3199  else if (partial > 0)
3200    {
3201      /* Scalar partly in registers.  */
3202
3203      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3204      int i;
3205      int not_stack;
3206      /* # words of start of argument
3207	 that we must make space for but need not store.  */
3208      int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3209      int args_offset = INTVAL (args_so_far);
3210      int skip;
3211
3212      /* Push padding now if padding above and stack grows down,
3213	 or if padding below and stack grows up.
3214	 But if space already allocated, this has already been done.  */
3215      if (extra && args_addr == 0
3216	  && where_pad != none && where_pad != stack_direction)
3217	anti_adjust_stack (GEN_INT (extra));
3218
3219      /* If we make space by pushing it, we might as well push
3220	 the real data.  Otherwise, we can leave OFFSET nonzero
3221	 and leave the space uninitialized.  */
3222      if (args_addr == 0)
3223	offset = 0;
3224
3225      /* Now NOT_STACK gets the number of words that we don't need to
3226	 allocate on the stack.  */
3227      not_stack = partial - offset;
3228
3229      /* If the partial register-part of the arg counts in its stack size,
3230	 skip the part of stack space corresponding to the registers.
3231	 Otherwise, start copying to the beginning of the stack space,
3232	 by setting SKIP to 0.  */
3233      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3234
3235      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3236	x = validize_mem (force_const_mem (mode, x));
3237
3238      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3239	 SUBREGs of such registers are not allowed.  */
3240      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3241	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3242	x = copy_to_reg (x);
3243
3244      /* Loop over all the words allocated on the stack for this arg.  */
3245      /* We can do it by words, because any scalar bigger than a word
3246	 has a size a multiple of a word.  */
3247#ifndef PUSH_ARGS_REVERSED
3248      for (i = not_stack; i < size; i++)
3249#else
3250      for (i = size - 1; i >= not_stack; i--)
3251#endif
3252	if (i >= not_stack + offset)
3253	  emit_push_insn (operand_subword_force (x, i, mode),
3254			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3255			  0, args_addr,
3256			  GEN_INT (args_offset + ((i - not_stack + skip)
3257						  * UNITS_PER_WORD)),
3258			  reg_parm_stack_space);
3259    }
3260  else
3261    {
3262      rtx addr;
3263      rtx target = NULL_RTX;
3264
3265      /* Push padding now if padding above and stack grows down,
3266	 or if padding below and stack grows up.
3267	 But if space already allocated, this has already been done.  */
3268      if (extra && args_addr == 0
3269	  && where_pad != none && where_pad != stack_direction)
3270	anti_adjust_stack (GEN_INT (extra));
3271
3272#ifdef PUSH_ROUNDING
3273      if (args_addr == 0)
3274	addr = gen_push_operand ();
3275      else
3276#endif
3277	{
3278	  if (GET_CODE (args_so_far) == CONST_INT)
3279	    addr
3280	      = memory_address (mode,
3281				plus_constant (args_addr,
3282					       INTVAL (args_so_far)));
3283          else
3284	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3285						       args_so_far));
3286	  target = addr;
3287	}
3288
3289      emit_move_insn (gen_rtx_MEM (mode, addr), x);
3290
3291      if (current_function_check_memory_usage && ! in_check_memory_usage)
3292	{
3293	  in_check_memory_usage = 1;
3294	  if (target == 0)
3295	    target = get_push_address (GET_MODE_SIZE (mode));
3296
3297	  if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3298	    emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3299			       target, Pmode,
3300			       XEXP (x, 0), Pmode,
3301			       GEN_INT (GET_MODE_SIZE (mode)),
3302			       TYPE_MODE (sizetype));
3303	  else
3304	    emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3305			       target, Pmode,
3306			       GEN_INT (GET_MODE_SIZE (mode)),
3307			       TYPE_MODE (sizetype),
3308			       GEN_INT (MEMORY_USE_RW),
3309			       TYPE_MODE (integer_type_node));
3310	  in_check_memory_usage = 0;
3311	}
3312    }
3313
3314 ret:
3315  /* If part should go in registers, copy that part
3316     into the appropriate registers.  Do this now, at the end,
3317     since mem-to-mem copies above may do function calls.  */
3318  if (partial > 0 && reg != 0)
3319    {
3320      /* Handle calls that pass values in multiple non-contiguous locations.
3321	 The Irix 6 ABI has examples of this.  */
3322      if (GET_CODE (reg) == PARALLEL)
3323	emit_group_load (reg, x, -1, align);  /* ??? size? */
3324      else
3325	move_block_to_reg (REGNO (reg), x, partial, mode);
3326    }
3327
3328  if (extra && args_addr == 0 && where_pad == stack_direction)
3329    anti_adjust_stack (GEN_INT (extra));
3330}
3331
3332/* Expand an assignment that stores the value of FROM into TO.
3333   If WANT_VALUE is nonzero, return an rtx for the value of TO.
3334   (This may contain a QUEUED rtx;
3335   if the value is constant, this rtx is a constant.)
3336   Otherwise, the returned value is NULL_RTX.
3337
3338   SUGGEST_REG is no longer actually used.
3339   It used to mean, copy the value through a register
3340   and return that register, if that is possible.
3341   We now use WANT_VALUE to decide whether to do this.  */
3342
3343rtx
3344expand_assignment (to, from, want_value, suggest_reg)
3345     tree to, from;
3346     int want_value;
3347     int suggest_reg;
3348{
3349  register rtx to_rtx = 0;
3350  rtx result;
3351
3352  /* Don't crash if the lhs of the assignment was erroneous.  */
3353
3354  if (TREE_CODE (to) == ERROR_MARK)
3355    {
3356      result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3357      return want_value ? result : NULL_RTX;
3358    }
3359
3360  /* Assignment of a structure component needs special treatment
3361     if the structure component's rtx is not simply a MEM.
3362     Assignment of an array element at a constant index, and assignment of
3363     an array element in an unaligned packed structure field, has the same
3364     problem.  */
3365
3366  if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3367      || TREE_CODE (to) == ARRAY_REF)
3368    {
3369      enum machine_mode mode1;
3370      int bitsize;
3371      int bitpos;
3372      tree offset;
3373      int unsignedp;
3374      int volatilep = 0;
3375      tree tem;
3376      int alignment;
3377
3378      push_temp_slots ();
3379      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3380				 &unsignedp, &volatilep, &alignment);
3381
3382      /* If we are going to use store_bit_field and extract_bit_field,
3383	 make sure to_rtx will be safe for multiple use.  */
3384
3385      if (mode1 == VOIDmode && want_value)
3386	tem = stabilize_reference (tem);
3387
3388      to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3389      if (offset != 0)
3390	{
3391	  rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3392
3393	  if (GET_CODE (to_rtx) != MEM)
3394	    abort ();
3395
3396	  if (GET_MODE (offset_rtx) != ptr_mode)
3397	    {
3398#ifdef POINTERS_EXTEND_UNSIGNED
3399	      offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3400#else
3401	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3402#endif
3403	    }
3404
3405	  /* A constant address in TO_RTX can have VOIDmode, we must not try
3406	     to call force_reg for that case.  Avoid that case.  */
3407	  if (GET_CODE (to_rtx) == MEM
3408	      && GET_MODE (to_rtx) == BLKmode
3409	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3410	      && bitsize
3411	      && (bitpos % bitsize) == 0
3412	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3413	      && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3414	    {
3415	      rtx temp = change_address (to_rtx, mode1,
3416				         plus_constant (XEXP (to_rtx, 0),
3417						        (bitpos /
3418						         BITS_PER_UNIT)));
3419	      if (GET_CODE (XEXP (temp, 0)) == REG)
3420	        to_rtx = temp;
3421	      else
3422		to_rtx = change_address (to_rtx, mode1,
3423				         force_reg (GET_MODE (XEXP (temp, 0)),
3424						    XEXP (temp, 0)));
3425	      bitpos = 0;
3426	    }
3427
3428	  to_rtx = change_address (to_rtx, VOIDmode,
3429				   gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3430						 force_reg (ptr_mode, offset_rtx)));
3431	}
3432      if (volatilep)
3433	{
3434	  if (GET_CODE (to_rtx) == MEM)
3435	    {
3436	      /* When the offset is zero, to_rtx is the address of the
3437		 structure we are storing into, and hence may be shared.
3438		 We must make a new MEM before setting the volatile bit.  */
3439	      if (offset == 0)
3440		to_rtx = copy_rtx (to_rtx);
3441
3442	      MEM_VOLATILE_P (to_rtx) = 1;
3443	    }
3444#if 0  /* This was turned off because, when a field is volatile
3445	  in an object which is not volatile, the object may be in a register,
3446	  and then we would abort over here.  */
3447	  else
3448	    abort ();
3449#endif
3450	}
3451
3452      if (TREE_CODE (to) == COMPONENT_REF
3453	  && TREE_READONLY (TREE_OPERAND (to, 1)))
3454	{
3455	  if (offset == 0)
3456	    to_rtx = copy_rtx (to_rtx);
3457
3458	  RTX_UNCHANGING_P (to_rtx) = 1;
3459	}
3460
3461      /* Check the access.  */
3462      if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3463	{
3464	  rtx to_addr;
3465	  int size;
3466	  int best_mode_size;
3467	  enum machine_mode best_mode;
3468
3469	  best_mode = get_best_mode (bitsize, bitpos,
3470	  			     TYPE_ALIGN (TREE_TYPE (tem)),
3471	  			     mode1, volatilep);
3472	  if (best_mode == VOIDmode)
3473	    best_mode = QImode;
3474
3475	  best_mode_size = GET_MODE_BITSIZE (best_mode);
3476	  to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3477	  size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3478	  size *= GET_MODE_SIZE (best_mode);
3479
3480	  /* Check the access right of the pointer.  */
3481	  if (size)
3482	    emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3483			       to_addr, Pmode,
3484			       GEN_INT (size), TYPE_MODE (sizetype),
3485			       GEN_INT (MEMORY_USE_WO),
3486			       TYPE_MODE (integer_type_node));
3487	}
3488
3489      result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3490			    (want_value
3491			     /* Spurious cast makes HPUX compiler happy.  */
3492			     ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3493			     : VOIDmode),
3494			    unsignedp,
3495			    /* Required alignment of containing datum.  */
3496			    alignment,
3497			    int_size_in_bytes (TREE_TYPE (tem)),
3498			    get_alias_set (to));
3499      preserve_temp_slots (result);
3500      free_temp_slots ();
3501      pop_temp_slots ();
3502
3503      /* If the value is meaningful, convert RESULT to the proper mode.
3504	 Otherwise, return nothing.  */
3505      return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3506					  TYPE_MODE (TREE_TYPE (from)),
3507					  result,
3508					  TREE_UNSIGNED (TREE_TYPE (to)))
3509	      : NULL_RTX);
3510    }
3511
3512  /* If the rhs is a function call and its value is not an aggregate,
3513     call the function before we start to compute the lhs.
3514     This is needed for correct code for cases such as
3515     val = setjmp (buf) on machines where reference to val
3516     requires loading up part of an address in a separate insn.
3517
3518     Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3519     a promoted variable where the zero- or sign- extension needs to be done.
3520     Handling this in the normal way is safe because no computation is done
3521     before the call.  */
3522  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3523      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3524      && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3525    {
3526      rtx value;
3527
3528      push_temp_slots ();
3529      value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3530      if (to_rtx == 0)
3531	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3532
3533      /* Handle calls that return values in multiple non-contiguous locations.
3534	 The Irix 6 ABI has examples of this.  */
3535      if (GET_CODE (to_rtx) == PARALLEL)
3536	emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3537			 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3538      else if (GET_MODE (to_rtx) == BLKmode)
3539	emit_block_move (to_rtx, value, expr_size (from),
3540			 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3541      else
3542	{
3543#ifdef POINTERS_EXTEND_UNSIGNED
3544	  if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3545	     || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3546	    value = convert_memory_address (GET_MODE (to_rtx), value);
3547#endif
3548	  emit_move_insn (to_rtx, value);
3549	}
3550      preserve_temp_slots (to_rtx);
3551      free_temp_slots ();
3552      pop_temp_slots ();
3553      return want_value ? to_rtx : NULL_RTX;
3554    }
3555
3556  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
3557     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
3558
3559  if (to_rtx == 0)
3560    {
3561      to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3562      if (GET_CODE (to_rtx) == MEM)
3563	MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3564    }
3565
3566  /* Don't move directly into a return register.  */
3567  if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3568    {
3569      rtx temp;
3570
3571      push_temp_slots ();
3572      temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3573      emit_move_insn (to_rtx, temp);
3574      preserve_temp_slots (to_rtx);
3575      free_temp_slots ();
3576      pop_temp_slots ();
3577      return want_value ? to_rtx : NULL_RTX;
3578    }
3579
3580  /* In case we are returning the contents of an object which overlaps
3581     the place the value is being stored, use a safe function when copying
3582     a value through a pointer into a structure value return block.  */
3583  if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3584      && current_function_returns_struct
3585      && !current_function_returns_pcc_struct)
3586    {
3587      rtx from_rtx, size;
3588
3589      push_temp_slots ();
3590      size = expr_size (from);
3591      from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3592			      EXPAND_MEMORY_USE_DONT);
3593
3594      /* Copy the rights of the bitmap.  */
3595      if (current_function_check_memory_usage)
3596	emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3597			   XEXP (to_rtx, 0), Pmode,
3598			   XEXP (from_rtx, 0), Pmode,
3599			   convert_to_mode (TYPE_MODE (sizetype),
3600					    size, TREE_UNSIGNED (sizetype)),
3601			   TYPE_MODE (sizetype));
3602
3603#ifdef TARGET_MEM_FUNCTIONS
3604      emit_library_call (memcpy_libfunc, 0,
3605			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3606			 XEXP (from_rtx, 0), Pmode,
3607			 convert_to_mode (TYPE_MODE (sizetype),
3608					  size, TREE_UNSIGNED (sizetype)),
3609			 TYPE_MODE (sizetype));
3610#else
3611      emit_library_call (bcopy_libfunc, 0,
3612			 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3613			 XEXP (to_rtx, 0), Pmode,
3614			 convert_to_mode (TYPE_MODE (integer_type_node),
3615					  size, TREE_UNSIGNED (integer_type_node)),
3616			 TYPE_MODE (integer_type_node));
3617#endif
3618
3619      preserve_temp_slots (to_rtx);
3620      free_temp_slots ();
3621      pop_temp_slots ();
3622      return want_value ? to_rtx : NULL_RTX;
3623    }
3624
3625  /* Compute FROM and store the value in the rtx we got.  */
3626
3627  push_temp_slots ();
3628  result = store_expr (from, to_rtx, want_value);
3629  preserve_temp_slots (result);
3630  free_temp_slots ();
3631  pop_temp_slots ();
3632  return want_value ? result : NULL_RTX;
3633}
3634
3635/* Generate code for computing expression EXP,
3636   and storing the value into TARGET.
3637   TARGET may contain a QUEUED rtx.
3638
3639   If WANT_VALUE is nonzero, return a copy of the value
3640   not in TARGET, so that we can be sure to use the proper
3641   value in a containing expression even if TARGET has something
3642   else stored in it.  If possible, we copy the value through a pseudo
3643   and return that pseudo.  Or, if the value is constant, we try to
3644   return the constant.  In some cases, we return a pseudo
3645   copied *from* TARGET.
3646
3647   If the mode is BLKmode then we may return TARGET itself.
3648   It turns out that in BLKmode it doesn't cause a problem.
3649   because C has no operators that could combine two different
3650   assignments into the same BLKmode object with different values
3651   with no sequence point.  Will other languages need this to
3652   be more thorough?
3653
3654   If WANT_VALUE is 0, we return NULL, to make sure
3655   to catch quickly any cases where the caller uses the value
3656   and fails to set WANT_VALUE.  */
3657
3658rtx
3659store_expr (exp, target, want_value)
3660     register tree exp;
3661     register rtx target;
3662     int want_value;
3663{
3664  register rtx temp;
3665  int dont_return_target = 0;
3666
3667  if (TREE_CODE (exp) == COMPOUND_EXPR)
3668    {
3669      /* Perform first part of compound expression, then assign from second
3670	 part.  */
3671      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3672      emit_queue ();
3673      return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3674    }
3675  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3676    {
3677      /* For conditional expression, get safe form of the target.  Then
3678	 test the condition, doing the appropriate assignment on either
3679	 side.  This avoids the creation of unnecessary temporaries.
3680	 For non-BLKmode, it is more efficient not to do this.  */
3681
3682      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3683
3684      emit_queue ();
3685      target = protect_from_queue (target, 1);
3686
3687      do_pending_stack_adjust ();
3688      NO_DEFER_POP;
3689      jumpifnot (TREE_OPERAND (exp, 0), lab1);
3690      start_cleanup_deferral ();
3691      store_expr (TREE_OPERAND (exp, 1), target, 0);
3692      end_cleanup_deferral ();
3693      emit_queue ();
3694      emit_jump_insn (gen_jump (lab2));
3695      emit_barrier ();
3696      emit_label (lab1);
3697      start_cleanup_deferral ();
3698      store_expr (TREE_OPERAND (exp, 2), target, 0);
3699      end_cleanup_deferral ();
3700      emit_queue ();
3701      emit_label (lab2);
3702      OK_DEFER_POP;
3703
3704      return want_value ? target : NULL_RTX;
3705    }
3706  else if (queued_subexp_p (target))
3707    /* If target contains a postincrement, let's not risk
3708       using it as the place to generate the rhs.  */
3709    {
3710      if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3711	{
3712	  /* Expand EXP into a new pseudo.  */
3713	  temp = gen_reg_rtx (GET_MODE (target));
3714	  temp = expand_expr (exp, temp, GET_MODE (target), 0);
3715	}
3716      else
3717	temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3718
3719      /* If target is volatile, ANSI requires accessing the value
3720	 *from* the target, if it is accessed.  So make that happen.
3721	 In no case return the target itself.  */
3722      if (! MEM_VOLATILE_P (target) && want_value)
3723	dont_return_target = 1;
3724    }
3725  else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3726	   && GET_MODE (target) != BLKmode)
3727    /* If target is in memory and caller wants value in a register instead,
3728       arrange that.  Pass TARGET as target for expand_expr so that,
3729       if EXP is another assignment, WANT_VALUE will be nonzero for it.
3730       We know expand_expr will not use the target in that case.
3731       Don't do this if TARGET is volatile because we are supposed
3732       to write it and then read it.  */
3733    {
3734      temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3735			  GET_MODE (target), 0);
3736      if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3737	temp = copy_to_reg (temp);
3738      dont_return_target = 1;
3739    }
3740  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3741    /* If this is an scalar in a register that is stored in a wider mode
3742       than the declared mode, compute the result into its declared mode
3743       and then convert to the wider mode.  Our value is the computed
3744       expression.  */
3745    {
3746      /* If we don't want a value, we can do the conversion inside EXP,
3747	 which will often result in some optimizations.  Do the conversion
3748	 in two steps: first change the signedness, if needed, then
3749	 the extend.  But don't do this if the type of EXP is a subtype
3750	 of something else since then the conversion might involve
3751	 more than just converting modes.  */
3752      if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3753	  && TREE_TYPE (TREE_TYPE (exp)) == 0)
3754	{
3755	  if (TREE_UNSIGNED (TREE_TYPE (exp))
3756	      != SUBREG_PROMOTED_UNSIGNED_P (target))
3757	    exp
3758	      = convert
3759		(signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3760					  TREE_TYPE (exp)),
3761		 exp);
3762
3763	  exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3764					SUBREG_PROMOTED_UNSIGNED_P (target)),
3765			 exp);
3766	}
3767
3768      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3769
3770      /* If TEMP is a volatile MEM and we want a result value, make
3771	 the access now so it gets done only once.  Likewise if
3772	 it contains TARGET.  */
3773      if (GET_CODE (temp) == MEM && want_value
3774	  && (MEM_VOLATILE_P (temp)
3775	      || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3776	temp = copy_to_reg (temp);
3777
3778      /* If TEMP is a VOIDmode constant, use convert_modes to make
3779	 sure that we properly convert it.  */
3780      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3781	temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3782			      TYPE_MODE (TREE_TYPE (exp)), temp,
3783			      SUBREG_PROMOTED_UNSIGNED_P (target));
3784
3785      convert_move (SUBREG_REG (target), temp,
3786		    SUBREG_PROMOTED_UNSIGNED_P (target));
3787      return want_value ? temp : NULL_RTX;
3788    }
3789  else
3790    {
3791      temp = expand_expr (exp, target, GET_MODE (target), 0);
3792      /* Return TARGET if it's a specified hardware register.
3793	 If TARGET is a volatile mem ref, either return TARGET
3794	 or return a reg copied *from* TARGET; ANSI requires this.
3795
3796	 Otherwise, if TEMP is not TARGET, return TEMP
3797	 if it is constant (for efficiency),
3798	 or if we really want the correct value.  */
3799      if (!(target && GET_CODE (target) == REG
3800	    && REGNO (target) < FIRST_PSEUDO_REGISTER)
3801	  && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3802	  && ! rtx_equal_p (temp, target)
3803	  && (CONSTANT_P (temp) || want_value))
3804	dont_return_target = 1;
3805    }
3806
3807  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3808     the same as that of TARGET, adjust the constant.  This is needed, for
3809     example, in case it is a CONST_DOUBLE and we want only a word-sized
3810     value.  */
3811  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3812      && TREE_CODE (exp) != ERROR_MARK
3813      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3814    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3815			  temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3816
3817  if (current_function_check_memory_usage
3818      && GET_CODE (target) == MEM
3819      && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3820    {
3821      if (GET_CODE (temp) == MEM)
3822        emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3823			   XEXP (target, 0), Pmode,
3824			   XEXP (temp, 0), Pmode,
3825			   expr_size (exp), TYPE_MODE (sizetype));
3826      else
3827        emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3828			   XEXP (target, 0), Pmode,
3829			   expr_size (exp), TYPE_MODE (sizetype),
3830			   GEN_INT (MEMORY_USE_WO),
3831			   TYPE_MODE (integer_type_node));
3832    }
3833
3834  /* If value was not generated in the target, store it there.
3835     Convert the value to TARGET's type first if nec.  */
3836  /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3837     one or both of them are volatile memory refs, we have to distinguish
3838     two cases:
3839     - expand_expr has used TARGET.  In this case, we must not generate
3840       another copy.  This can be detected by TARGET being equal according
3841       to == .
3842     - expand_expr has not used TARGET - that means that the source just
3843       happens to have the same RTX form.  Since temp will have been created
3844       by expand_expr, it will compare unequal according to == .
3845       We must generate a copy in this case, to reach the correct number
3846       of volatile memory references.  */
3847
3848  if ((! rtx_equal_p (temp, target)
3849       || (temp != target && (side_effects_p (temp)
3850			      || side_effects_p (target))))
3851      && TREE_CODE (exp) != ERROR_MARK)
3852    {
3853      target = protect_from_queue (target, 1);
3854      if (GET_MODE (temp) != GET_MODE (target)
3855	  && GET_MODE (temp) != VOIDmode)
3856	{
3857	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3858	  if (dont_return_target)
3859	    {
3860	      /* In this case, we will return TEMP,
3861		 so make sure it has the proper mode.
3862		 But don't forget to store the value into TARGET.  */
3863	      temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3864	      emit_move_insn (target, temp);
3865	    }
3866	  else
3867	    convert_move (target, temp, unsignedp);
3868	}
3869
3870      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3871	{
3872	  /* Handle copying a string constant into an array.
3873	     The string constant may be shorter than the array.
3874	     So copy just the string's actual length, and clear the rest.  */
3875	  rtx size;
3876	  rtx addr;
3877
3878	  /* Get the size of the data type of the string,
3879	     which is actually the size of the target.  */
3880	  size = expr_size (exp);
3881	  if (GET_CODE (size) == CONST_INT
3882	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
3883	    emit_block_move (target, temp, size,
3884			     TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3885	  else
3886	    {
3887	      /* Compute the size of the data to copy from the string.  */
3888	      tree copy_size
3889		= size_binop (MIN_EXPR,
3890			      make_tree (sizetype, size),
3891			      convert (sizetype,
3892				       build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3893	      rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3894					       VOIDmode, 0);
3895	      rtx label = 0;
3896
3897	      /* Copy that much.  */
3898	      emit_block_move (target, temp, copy_size_rtx,
3899			       TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3900
3901	      /* Figure out how much is left in TARGET that we have to clear.
3902		 Do all calculations in ptr_mode.  */
3903
3904	      addr = XEXP (target, 0);
3905	      addr = convert_modes (ptr_mode, Pmode, addr, 1);
3906
3907	      if (GET_CODE (copy_size_rtx) == CONST_INT)
3908		{
3909		  addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3910		  size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3911		}
3912	      else
3913		{
3914		  addr = force_reg (ptr_mode, addr);
3915		  addr = expand_binop (ptr_mode, add_optab, addr,
3916				       copy_size_rtx, NULL_RTX, 0,
3917				       OPTAB_LIB_WIDEN);
3918
3919		  size = expand_binop (ptr_mode, sub_optab, size,
3920				       copy_size_rtx, NULL_RTX, 0,
3921				       OPTAB_LIB_WIDEN);
3922
3923		  label = gen_label_rtx ();
3924		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3925					   GET_MODE (size), 0, 0, label);
3926		}
3927
3928	      if (size != const0_rtx)
3929		{
3930		  /* Be sure we can write on ADDR.  */
3931		  if (current_function_check_memory_usage)
3932		    emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3933				       addr, Pmode,
3934				       size, TYPE_MODE (sizetype),
3935 				       GEN_INT (MEMORY_USE_WO),
3936				       TYPE_MODE (integer_type_node));
3937#ifdef TARGET_MEM_FUNCTIONS
3938		  emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3939				     addr, ptr_mode,
3940				     const0_rtx, TYPE_MODE (integer_type_node),
3941				     convert_to_mode (TYPE_MODE (sizetype),
3942						      size,
3943						      TREE_UNSIGNED (sizetype)),
3944				     TYPE_MODE (sizetype));
3945#else
3946		  emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3947				     addr, ptr_mode,
3948				     convert_to_mode (TYPE_MODE (integer_type_node),
3949						      size,
3950						      TREE_UNSIGNED (integer_type_node)),
3951				     TYPE_MODE (integer_type_node));
3952#endif
3953		}
3954
3955	      if (label)
3956		emit_label (label);
3957	    }
3958	}
3959      /* Handle calls that return values in multiple non-contiguous locations.
3960	 The Irix 6 ABI has examples of this.  */
3961      else if (GET_CODE (target) == PARALLEL)
3962	emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3963			 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3964      else if (GET_MODE (temp) == BLKmode)
3965	emit_block_move (target, temp, expr_size (exp),
3966			 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3967      else
3968	emit_move_insn (target, temp);
3969    }
3970
3971  /* If we don't want a value, return NULL_RTX.  */
3972  if (! want_value)
3973    return NULL_RTX;
3974
3975  /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3976     ??? The latter test doesn't seem to make sense.  */
3977  else if (dont_return_target && GET_CODE (temp) != MEM)
3978    return temp;
3979
3980  /* Return TARGET itself if it is a hard register.  */
3981  else if (want_value && GET_MODE (target) != BLKmode
3982	   && ! (GET_CODE (target) == REG
3983		 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3984    return copy_to_reg (target);
3985
3986  else
3987    return target;
3988}
3989
3990/* Return 1 if EXP just contains zeros.  */
3991
3992static int
3993is_zeros_p (exp)
3994     tree exp;
3995{
3996  tree elt;
3997
3998  switch (TREE_CODE (exp))
3999    {
4000    case CONVERT_EXPR:
4001    case NOP_EXPR:
4002    case NON_LVALUE_EXPR:
4003      return is_zeros_p (TREE_OPERAND (exp, 0));
4004
4005    case INTEGER_CST:
4006      return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
4007
4008    case COMPLEX_CST:
4009      return
4010	is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4011
4012    case REAL_CST:
4013      return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4014
4015    case CONSTRUCTOR:
4016      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4017	return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4018      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4019	if (! is_zeros_p (TREE_VALUE (elt)))
4020	  return 0;
4021
4022      return 1;
4023
4024    default:
4025      return 0;
4026    }
4027}
4028
4029/* Return 1 if EXP contains mostly (3/4)  zeros.  */
4030
4031static int
4032mostly_zeros_p (exp)
4033     tree exp;
4034{
4035  if (TREE_CODE (exp) == CONSTRUCTOR)
4036    {
4037      int elts = 0, zeros = 0;
4038      tree elt = CONSTRUCTOR_ELTS (exp);
4039      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4040	{
4041	  /* If there are no ranges of true bits, it is all zero.  */
4042	  return elt == NULL_TREE;
4043	}
4044      for (; elt; elt = TREE_CHAIN (elt))
4045	{
4046	  /* We do not handle the case where the index is a RANGE_EXPR,
4047	     so the statistic will be somewhat inaccurate.
4048	     We do make a more accurate count in store_constructor itself,
4049	     so since this function is only used for nested array elements,
4050	     this should be close enough.  */
4051	  if (mostly_zeros_p (TREE_VALUE (elt)))
4052	    zeros++;
4053	  elts++;
4054	}
4055
4056      return 4 * zeros >= 3 * elts;
4057    }
4058
4059  return is_zeros_p (exp);
4060}
4061
4062/* Helper function for store_constructor.
4063   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4064   TYPE is the type of the CONSTRUCTOR, not the element type.
4065   CLEARED is as for store_constructor.
4066
4067   This provides a recursive shortcut back to store_constructor when it isn't
4068   necessary to go through store_field.  This is so that we can pass through
4069   the cleared field to let store_constructor know that we may not have to
4070   clear a substructure if the outer structure has already been cleared.  */
4071
4072static void
4073store_constructor_field (target, bitsize, bitpos,
4074			 mode, exp, type, cleared)
4075     rtx target;
4076     int bitsize, bitpos;
4077     enum machine_mode mode;
4078     tree exp, type;
4079     int cleared;
4080{
4081  if (TREE_CODE (exp) == CONSTRUCTOR
4082      && bitpos % BITS_PER_UNIT == 0
4083      /* If we have a non-zero bitpos for a register target, then we just
4084	 let store_field do the bitfield handling.  This is unlikely to
4085	 generate unnecessary clear instructions anyways.  */
4086      && (bitpos == 0 || GET_CODE (target) == MEM))
4087    {
4088      if (bitpos != 0)
4089	target = change_address (target, VOIDmode,
4090				 plus_constant (XEXP (target, 0),
4091						bitpos / BITS_PER_UNIT));
4092      store_constructor (exp, target, cleared);
4093    }
4094  else
4095    store_field (target, bitsize, bitpos, mode, exp,
4096		 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
4097		 int_size_in_bytes (type), 0);
4098}
4099
4100/* Store the value of constructor EXP into the rtx TARGET.
4101   TARGET is either a REG or a MEM.
4102   CLEARED is true if TARGET is known to have been zero'd.  */
4103
4104static void
4105store_constructor (exp, target, cleared)
4106     tree exp;
4107     rtx target;
4108     int cleared;
4109{
4110  tree type = TREE_TYPE (exp);
4111  rtx exp_size = expr_size (exp);
4112
4113  /* We know our target cannot conflict, since safe_from_p has been called.  */
4114#if 0
4115  /* Don't try copying piece by piece into a hard register
4116     since that is vulnerable to being clobbered by EXP.
4117     Instead, construct in a pseudo register and then copy it all.  */
4118  if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4119    {
4120      rtx temp = gen_reg_rtx (GET_MODE (target));
4121      store_constructor (exp, temp, 0);
4122      emit_move_insn (target, temp);
4123      return;
4124    }
4125#endif
4126
4127  if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4128      || TREE_CODE (type) == QUAL_UNION_TYPE)
4129    {
4130      register tree elt;
4131
4132      /* Inform later passes that the whole union value is dead.  */
4133      if (TREE_CODE (type) == UNION_TYPE
4134	  || TREE_CODE (type) == QUAL_UNION_TYPE)
4135	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4136
4137      /* If we are building a static constructor into a register,
4138	 set the initial value as zero so we can fold the value into
4139	 a constant.  But if more than one register is involved,
4140	 this probably loses.  */
4141      else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4142	       && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4143	{
4144	  if (! cleared)
4145	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4146
4147	  cleared = 1;
4148	}
4149
4150      /* If the constructor has fewer fields than the structure
4151	 or if we are initializing the structure to mostly zeros,
4152	 clear the whole structure first.  */
4153      else if ((list_length (CONSTRUCTOR_ELTS (exp))
4154		!= list_length (TYPE_FIELDS (type)))
4155	       || mostly_zeros_p (exp))
4156	{
4157	  if (! cleared)
4158	    clear_storage (target, expr_size (exp),
4159			   TYPE_ALIGN (type) / BITS_PER_UNIT);
4160
4161	  cleared = 1;
4162	}
4163      else
4164	/* Inform later passes that the old value is dead.  */
4165	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4166
4167      /* Store each element of the constructor into
4168	 the corresponding field of TARGET.  */
4169
4170      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4171	{
4172	  register tree field = TREE_PURPOSE (elt);
4173	  tree value = TREE_VALUE (elt);
4174	  register enum machine_mode mode;
4175	  int bitsize;
4176	  int bitpos = 0;
4177	  int unsignedp;
4178	  tree pos, constant = 0, offset = 0;
4179	  rtx to_rtx = target;
4180
4181	  /* Just ignore missing fields.
4182	     We cleared the whole structure, above,
4183	     if any fields are missing.  */
4184	  if (field == 0)
4185	    continue;
4186
4187	  if (cleared && is_zeros_p (TREE_VALUE (elt)))
4188	    continue;
4189
4190	  bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4191	  unsignedp = TREE_UNSIGNED (field);
4192	  mode = DECL_MODE (field);
4193	  if (DECL_BIT_FIELD (field))
4194	    mode = VOIDmode;
4195
4196	  pos = DECL_FIELD_BITPOS (field);
4197	  if (TREE_CODE (pos) == INTEGER_CST)
4198	    constant = pos;
4199	  else if (TREE_CODE (pos) == PLUS_EXPR
4200		   && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4201	    constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4202	  else
4203	    offset = pos;
4204
4205	  if (constant)
4206	    bitpos = TREE_INT_CST_LOW (constant);
4207
4208	  if (offset)
4209	    {
4210	      rtx offset_rtx;
4211
4212	      if (contains_placeholder_p (offset))
4213		offset = build (WITH_RECORD_EXPR, sizetype,
4214				offset, make_tree (TREE_TYPE (exp), target));
4215
4216	      offset = size_binop (FLOOR_DIV_EXPR, offset,
4217				   size_int (BITS_PER_UNIT));
4218
4219	      offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4220	      if (GET_CODE (to_rtx) != MEM)
4221		abort ();
4222
4223              if (GET_MODE (offset_rtx) != ptr_mode)
4224                {
4225#ifdef POINTERS_EXTEND_UNSIGNED
4226                  offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4227#else
4228                  offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4229#endif
4230                }
4231
4232	      to_rtx
4233		= change_address (to_rtx, VOIDmode,
4234				  gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4235					   force_reg (ptr_mode, offset_rtx)));
4236	    }
4237	  if (TREE_READONLY (field))
4238	    {
4239	      if (GET_CODE (to_rtx) == MEM)
4240		to_rtx = copy_rtx (to_rtx);
4241
4242	      RTX_UNCHANGING_P (to_rtx) = 1;
4243	    }
4244
4245#ifdef WORD_REGISTER_OPERATIONS
4246	  /* If this initializes a field that is smaller than a word, at the
4247	     start of a word, try to widen it to a full word.
4248	     This special case allows us to output C++ member function
4249	     initializations in a form that the optimizers can understand.  */
4250	  if (constant
4251	      && GET_CODE (target) == REG
4252	      && bitsize < BITS_PER_WORD
4253	      && bitpos % BITS_PER_WORD == 0
4254	      && GET_MODE_CLASS (mode) == MODE_INT
4255	      && TREE_CODE (value) == INTEGER_CST
4256	      && GET_CODE (exp_size) == CONST_INT
4257	      && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4258	    {
4259	      tree type = TREE_TYPE (value);
4260	      if (TYPE_PRECISION (type) < BITS_PER_WORD)
4261		{
4262		  type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4263		  value = convert (type, value);
4264		}
4265	      if (BYTES_BIG_ENDIAN)
4266		value
4267		  = fold (build (LSHIFT_EXPR, type, value,
4268				 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4269	      bitsize = BITS_PER_WORD;
4270	      mode = word_mode;
4271	    }
4272#endif
4273	  store_constructor_field (to_rtx, bitsize, bitpos,
4274				   mode, value, type, cleared);
4275	}
4276    }
4277  else if (TREE_CODE (type) == ARRAY_TYPE)
4278    {
4279      register tree elt;
4280      register int i;
4281      int need_to_clear;
4282      tree domain = TYPE_DOMAIN (type);
4283      HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4284      HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4285      tree elttype = TREE_TYPE (type);
4286
4287      /* If the constructor has fewer elements than the array,
4288         clear the whole array first.  Similarly if this is
4289         static constructor of a non-BLKmode object.  */
4290      if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4291	need_to_clear = 1;
4292      else
4293	{
4294	  HOST_WIDE_INT count = 0, zero_count = 0;
4295	  need_to_clear = 0;
4296	  /* This loop is a more accurate version of the loop in
4297	     mostly_zeros_p (it handles RANGE_EXPR in an index).
4298	     It is also needed to check for missing elements.  */
4299	  for (elt = CONSTRUCTOR_ELTS (exp);
4300	       elt != NULL_TREE;
4301	       elt = TREE_CHAIN (elt))
4302	    {
4303	      tree index = TREE_PURPOSE (elt);
4304	      HOST_WIDE_INT this_node_count;
4305	      if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4306		{
4307		  tree lo_index = TREE_OPERAND (index, 0);
4308		  tree hi_index = TREE_OPERAND (index, 1);
4309		  if (TREE_CODE (lo_index) != INTEGER_CST
4310		      || TREE_CODE (hi_index) != INTEGER_CST)
4311		    {
4312		      need_to_clear = 1;
4313		      break;
4314		    }
4315		  this_node_count = TREE_INT_CST_LOW (hi_index)
4316		    - TREE_INT_CST_LOW (lo_index) + 1;
4317		}
4318	      else
4319		this_node_count = 1;
4320	      count += this_node_count;
4321	      if (mostly_zeros_p (TREE_VALUE (elt)))
4322		zero_count += this_node_count;
4323	    }
4324	  /* Clear the entire array first if there are any missing elements,
4325	     or if the incidence of zero elements is >= 75%.  */
4326	  if (count < maxelt - minelt + 1
4327	      || 4 * zero_count >= 3 * count)
4328	    need_to_clear = 1;
4329	}
4330      if (need_to_clear)
4331	{
4332	  if (! cleared)
4333	    clear_storage (target, expr_size (exp),
4334			   TYPE_ALIGN (type) / BITS_PER_UNIT);
4335	  cleared = 1;
4336	}
4337      else
4338	/* Inform later passes that the old value is dead.  */
4339	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4340
4341      /* Store each element of the constructor into
4342	 the corresponding element of TARGET, determined
4343	 by counting the elements.  */
4344      for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4345	   elt;
4346	   elt = TREE_CHAIN (elt), i++)
4347	{
4348	  register enum machine_mode mode;
4349	  int bitsize;
4350	  int bitpos;
4351	  int unsignedp;
4352	  tree value = TREE_VALUE (elt);
4353	  tree index = TREE_PURPOSE (elt);
4354	  rtx xtarget = target;
4355
4356	  if (cleared && is_zeros_p (value))
4357	    continue;
4358
4359	  mode = TYPE_MODE (elttype);
4360	  bitsize = GET_MODE_BITSIZE (mode);
4361	  unsignedp = TREE_UNSIGNED (elttype);
4362
4363	  if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4364	    {
4365	      tree lo_index = TREE_OPERAND (index, 0);
4366	      tree hi_index = TREE_OPERAND (index, 1);
4367	      rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4368	      struct nesting *loop;
4369	      HOST_WIDE_INT lo, hi, count;
4370	      tree position;
4371
4372	      /* If the range is constant and "small", unroll the loop.  */
4373	      if (TREE_CODE (lo_index) == INTEGER_CST
4374		  && TREE_CODE (hi_index) == INTEGER_CST
4375		  && (lo = TREE_INT_CST_LOW (lo_index),
4376		      hi = TREE_INT_CST_LOW (hi_index),
4377		      count = hi - lo + 1,
4378		      (GET_CODE (target) != MEM
4379		       || count <= 2
4380		       || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4381			   && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4382			   <= 40 * 8))))
4383		{
4384		  lo -= minelt;  hi -= minelt;
4385		  for (; lo <= hi; lo++)
4386		    {
4387		      bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4388		      store_constructor_field (target, bitsize, bitpos,
4389					       mode, value, type, cleared);
4390		    }
4391		}
4392	      else
4393		{
4394		  hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4395		  loop_top = gen_label_rtx ();
4396		  loop_end = gen_label_rtx ();
4397
4398		  unsignedp = TREE_UNSIGNED (domain);
4399
4400		  index = build_decl (VAR_DECL, NULL_TREE, domain);
4401
4402		  DECL_RTL (index) = index_r
4403		    = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4404						 &unsignedp, 0));
4405
4406		  if (TREE_CODE (value) == SAVE_EXPR
4407		      && SAVE_EXPR_RTL (value) == 0)
4408		    {
4409		      /* Make sure value gets expanded once before the
4410                         loop.  */
4411		      expand_expr (value, const0_rtx, VOIDmode, 0);
4412		      emit_queue ();
4413		    }
4414		  store_expr (lo_index, index_r, 0);
4415		  loop = expand_start_loop (0);
4416
4417		  /* Assign value to element index.  */
4418		  position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4419					 size_int (BITS_PER_UNIT));
4420		  position = size_binop (MULT_EXPR,
4421					 size_binop (MINUS_EXPR, index,
4422						     TYPE_MIN_VALUE (domain)),
4423					 position);
4424		  pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4425		  addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4426		  xtarget = change_address (target, mode, addr);
4427		  if (TREE_CODE (value) == CONSTRUCTOR)
4428		    store_constructor (value, xtarget, cleared);
4429		  else
4430		    store_expr (value, xtarget, 0);
4431
4432		  expand_exit_loop_if_false (loop,
4433					     build (LT_EXPR, integer_type_node,
4434						    index, hi_index));
4435
4436		  expand_increment (build (PREINCREMENT_EXPR,
4437					   TREE_TYPE (index),
4438					   index, integer_one_node), 0, 0);
4439		  expand_end_loop ();
4440		  emit_label (loop_end);
4441
4442		  /* Needed by stupid register allocation. to extend the
4443		     lifetime of pseudo-regs used by target past the end
4444		     of the loop.  */
4445		  emit_insn (gen_rtx_USE (GET_MODE (target), target));
4446		}
4447	    }
4448	  else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4449	      || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4450	    {
4451	      rtx pos_rtx, addr;
4452	      tree position;
4453
4454	      if (index == 0)
4455		index = size_int (i);
4456
4457	      if (minelt)
4458		index = size_binop (MINUS_EXPR, index,
4459				    TYPE_MIN_VALUE (domain));
4460	      position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4461				     size_int (BITS_PER_UNIT));
4462	      position = size_binop (MULT_EXPR, index, position);
4463	      pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4464	      addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4465	      xtarget = change_address (target, mode, addr);
4466	      store_expr (value, xtarget, 0);
4467	    }
4468	  else
4469	    {
4470	      if (index != 0)
4471		bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4472			  * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4473	      else
4474		bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4475	      store_constructor_field (target, bitsize, bitpos,
4476				       mode, value, type, cleared);
4477	    }
4478	}
4479    }
4480  /* set constructor assignments */
4481  else if (TREE_CODE (type) == SET_TYPE)
4482    {
4483      tree elt = CONSTRUCTOR_ELTS (exp);
4484      int nbytes = int_size_in_bytes (type), nbits;
4485      tree domain = TYPE_DOMAIN (type);
4486      tree domain_min, domain_max, bitlength;
4487
4488      /* The default implementation strategy is to extract the constant
4489	 parts of the constructor, use that to initialize the target,
4490	 and then "or" in whatever non-constant ranges we need in addition.
4491
4492	 If a large set is all zero or all ones, it is
4493	 probably better to set it using memset (if available) or bzero.
4494	 Also, if a large set has just a single range, it may also be
4495	 better to first clear all the first clear the set (using
4496	 bzero/memset), and set the bits we want.  */
4497
4498      /* Check for all zeros.  */
4499      if (elt == NULL_TREE)
4500	{
4501	  if (!cleared)
4502	    clear_storage (target, expr_size (exp),
4503			   TYPE_ALIGN (type) / BITS_PER_UNIT);
4504	  return;
4505	}
4506
4507      domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4508      domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4509      bitlength = size_binop (PLUS_EXPR,
4510			      size_binop (MINUS_EXPR, domain_max, domain_min),
4511			      size_one_node);
4512
4513      if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4514	abort ();
4515      nbits = TREE_INT_CST_LOW (bitlength);
4516
4517      /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4518	 are "complicated" (more than one range), initialize (the
4519	 constant parts) by copying from a constant.  */
4520      if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4521	  || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4522	{
4523	  int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4524	  enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4525	  char *bit_buffer = (char *) alloca (nbits);
4526	  HOST_WIDE_INT word = 0;
4527	  int bit_pos = 0;
4528	  int ibit = 0;
4529	  int offset = 0;  /* In bytes from beginning of set.  */
4530	  elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4531	  for (;;)
4532	    {
4533	      if (bit_buffer[ibit])
4534		{
4535		  if (BYTES_BIG_ENDIAN)
4536		    word |= (1 << (set_word_size - 1 - bit_pos));
4537		  else
4538		    word |= 1 << bit_pos;
4539		}
4540	      bit_pos++;  ibit++;
4541	      if (bit_pos >= set_word_size || ibit == nbits)
4542		{
4543		  if (word != 0 || ! cleared)
4544		    {
4545		      rtx datum = GEN_INT (word);
4546		      rtx to_rtx;
4547		      /* The assumption here is that it is safe to use
4548			 XEXP if the set is multi-word, but not if
4549			 it's single-word.  */
4550		      if (GET_CODE (target) == MEM)
4551			{
4552			  to_rtx = plus_constant (XEXP (target, 0), offset);
4553			  to_rtx = change_address (target, mode, to_rtx);
4554			}
4555		      else if (offset == 0)
4556			to_rtx = target;
4557		      else
4558			abort ();
4559		      emit_move_insn (to_rtx, datum);
4560		    }
4561		  if (ibit == nbits)
4562		    break;
4563		  word = 0;
4564		  bit_pos = 0;
4565		  offset += set_word_size / BITS_PER_UNIT;
4566		}
4567	    }
4568	}
4569      else if (!cleared)
4570	{
4571	  /* Don't bother clearing storage if the set is all ones.  */
4572	  if (TREE_CHAIN (elt) != NULL_TREE
4573	      || (TREE_PURPOSE (elt) == NULL_TREE
4574		  ? nbits != 1
4575		  : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4576		     || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4577		     || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4578			 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4579			 != nbits))))
4580	    clear_storage (target, expr_size (exp),
4581			   TYPE_ALIGN (type) / BITS_PER_UNIT);
4582	}
4583
4584      for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4585	{
4586	  /* start of range of element or NULL */
4587	  tree startbit = TREE_PURPOSE (elt);
4588	  /* end of range of element, or element value */
4589	  tree endbit   = TREE_VALUE (elt);
4590#ifdef TARGET_MEM_FUNCTIONS
4591	  HOST_WIDE_INT startb, endb;
4592#endif
4593	  rtx  bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4594
4595	  bitlength_rtx = expand_expr (bitlength,
4596			    NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4597
4598	  /* handle non-range tuple element like [ expr ]  */
4599	  if (startbit == NULL_TREE)
4600	    {
4601	      startbit = save_expr (endbit);
4602	      endbit = startbit;
4603	    }
4604	  startbit = convert (sizetype, startbit);
4605	  endbit = convert (sizetype, endbit);
4606	  if (! integer_zerop (domain_min))
4607	    {
4608	      startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4609	      endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4610	    }
4611	  startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4612				      EXPAND_CONST_ADDRESS);
4613	  endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4614				    EXPAND_CONST_ADDRESS);
4615
4616	  if (REG_P (target))
4617	    {
4618	      targetx = assign_stack_temp (GET_MODE (target),
4619					   GET_MODE_SIZE (GET_MODE (target)),
4620					   0);
4621	      emit_move_insn (targetx, target);
4622	    }
4623	  else if (GET_CODE (target) == MEM)
4624	    targetx = target;
4625	  else
4626	    abort ();
4627
4628#ifdef TARGET_MEM_FUNCTIONS
4629	  /* Optimization:  If startbit and endbit are
4630	     constants divisible by BITS_PER_UNIT,
4631	     call memset instead.  */
4632	  if (TREE_CODE (startbit) == INTEGER_CST
4633	      && TREE_CODE (endbit) == INTEGER_CST
4634	      && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4635	      && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4636	    {
4637	      emit_library_call (memset_libfunc, 0,
4638				 VOIDmode, 3,
4639				 plus_constant (XEXP (targetx, 0),
4640						startb / BITS_PER_UNIT),
4641				 Pmode,
4642				 constm1_rtx, TYPE_MODE (integer_type_node),
4643				 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4644				 TYPE_MODE (sizetype));
4645	    }
4646	  else
4647#endif
4648	    {
4649	      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4650				 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4651				 bitlength_rtx, TYPE_MODE (sizetype),
4652				 startbit_rtx, TYPE_MODE (sizetype),
4653				 endbit_rtx, TYPE_MODE (sizetype));
4654	    }
4655	  if (REG_P (target))
4656	    emit_move_insn (target, targetx);
4657	}
4658    }
4659
4660  else
4661    abort ();
4662}
4663
4664/* Store the value of EXP (an expression tree)
4665   into a subfield of TARGET which has mode MODE and occupies
4666   BITSIZE bits, starting BITPOS bits from the start of TARGET.
4667   If MODE is VOIDmode, it means that we are storing into a bit-field.
4668
4669   If VALUE_MODE is VOIDmode, return nothing in particular.
4670   UNSIGNEDP is not used in this case.
4671
4672   Otherwise, return an rtx for the value stored.  This rtx
4673   has mode VALUE_MODE if that is convenient to do.
4674   In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4675
4676   ALIGN is the alignment that TARGET is known to have, measured in bytes.
4677   TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4678
4679   ALIAS_SET is the alias set for the destination.  This value will
4680   (in general) be different from that for TARGET, since TARGET is a
4681   reference to the containing structure.  */
4682
4683static rtx
4684store_field (target, bitsize, bitpos, mode, exp, value_mode,
4685	     unsignedp, align, total_size, alias_set)
4686     rtx target;
4687     int bitsize, bitpos;
4688     enum machine_mode mode;
4689     tree exp;
4690     enum machine_mode value_mode;
4691     int unsignedp;
4692     int align;
4693     int total_size;
4694     int alias_set;
4695{
4696  HOST_WIDE_INT width_mask = 0;
4697
4698  if (TREE_CODE (exp) == ERROR_MARK)
4699    return const0_rtx;
4700
4701  if (bitsize < HOST_BITS_PER_WIDE_INT)
4702    width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4703
4704  /* If we are storing into an unaligned field of an aligned union that is
4705     in a register, we may have the mode of TARGET being an integer mode but
4706     MODE == BLKmode.  In that case, get an aligned object whose size and
4707     alignment are the same as TARGET and store TARGET into it (we can avoid
4708     the store if the field being stored is the entire width of TARGET).  Then
4709     call ourselves recursively to store the field into a BLKmode version of
4710     that object.  Finally, load from the object into TARGET.  This is not
4711     very efficient in general, but should only be slightly more expensive
4712     than the otherwise-required unaligned accesses.  Perhaps this can be
4713     cleaned up later.  */
4714
4715  if (mode == BLKmode
4716      && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4717    {
4718      rtx object = assign_stack_temp (GET_MODE (target),
4719				      GET_MODE_SIZE (GET_MODE (target)), 0);
4720      rtx blk_object = copy_rtx (object);
4721
4722      MEM_SET_IN_STRUCT_P (object, 1);
4723      MEM_SET_IN_STRUCT_P (blk_object, 1);
4724      PUT_MODE (blk_object, BLKmode);
4725
4726      if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4727	emit_move_insn (object, target);
4728
4729      store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4730		   align, total_size, alias_set);
4731
4732      /* Even though we aren't returning target, we need to
4733	 give it the updated value.  */
4734      emit_move_insn (target, object);
4735
4736      return blk_object;
4737    }
4738
4739  /* If the structure is in a register or if the component
4740     is a bit field, we cannot use addressing to access it.
4741     Use bit-field techniques or SUBREG to store in it.  */
4742
4743  if (mode == VOIDmode
4744      || (mode != BLKmode && ! direct_store[(int) mode]
4745	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4746	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4747      || GET_CODE (target) == REG
4748      || GET_CODE (target) == SUBREG
4749      /* If the field isn't aligned enough to store as an ordinary memref,
4750	 store it as a bit field.  */
4751      || (SLOW_UNALIGNED_ACCESS
4752	  && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4753      || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4754    {
4755      rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4756
4757      /* If BITSIZE is narrower than the size of the type of EXP
4758	 we will be narrowing TEMP.  Normally, what's wanted are the
4759	 low-order bits.  However, if EXP's type is a record and this is
4760	 big-endian machine, we want the upper BITSIZE bits.  */
4761      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4762	  && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4763	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4764	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4765			     size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4766				       - bitsize),
4767			     temp, 1);
4768
4769      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4770	 MODE.  */
4771      if (mode != VOIDmode && mode != BLKmode
4772	  && mode != TYPE_MODE (TREE_TYPE (exp)))
4773	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4774
4775      /* If the modes of TARGET and TEMP are both BLKmode, both
4776	 must be in memory and BITPOS must be aligned on a byte
4777	 boundary.  If so, we simply do a block copy.  */
4778      if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4779	{
4780	  if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4781	      || bitpos % BITS_PER_UNIT != 0)
4782	    abort ();
4783
4784	  target = change_address (target, VOIDmode,
4785				   plus_constant (XEXP (target, 0),
4786						bitpos / BITS_PER_UNIT));
4787
4788	  emit_block_move (target, temp,
4789			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4790				    / BITS_PER_UNIT),
4791			   1);
4792
4793	  return value_mode == VOIDmode ? const0_rtx : target;
4794	}
4795
4796      /* Store the value in the bitfield.  */
4797      store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4798      if (value_mode != VOIDmode)
4799	{
4800	  /* The caller wants an rtx for the value.  */
4801	  /* If possible, avoid refetching from the bitfield itself.  */
4802	  if (width_mask != 0
4803	      && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4804	    {
4805	      tree count;
4806	      enum machine_mode tmode;
4807
4808	      if (unsignedp)
4809		return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4810	      tmode = GET_MODE (temp);
4811	      if (tmode == VOIDmode)
4812		tmode = value_mode;
4813	      count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4814	      temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4815	      return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4816	    }
4817	  return extract_bit_field (target, bitsize, bitpos, unsignedp,
4818				    NULL_RTX, value_mode, 0, align,
4819				    total_size);
4820	}
4821      return const0_rtx;
4822    }
4823  else
4824    {
4825      rtx addr = XEXP (target, 0);
4826      rtx to_rtx;
4827
4828      /* If a value is wanted, it must be the lhs;
4829	 so make the address stable for multiple use.  */
4830
4831      if (value_mode != VOIDmode && GET_CODE (addr) != REG
4832	  && ! CONSTANT_ADDRESS_P (addr)
4833	  /* A frame-pointer reference is already stable.  */
4834	  && ! (GET_CODE (addr) == PLUS
4835		&& GET_CODE (XEXP (addr, 1)) == CONST_INT
4836		&& (XEXP (addr, 0) == virtual_incoming_args_rtx
4837		    || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4838	addr = copy_to_reg (addr);
4839
4840      /* Now build a reference to just the desired component.  */
4841
4842      to_rtx = copy_rtx (change_address (target, mode,
4843					 plus_constant (addr,
4844							(bitpos
4845							 / BITS_PER_UNIT))));
4846      MEM_SET_IN_STRUCT_P (to_rtx, 1);
4847      MEM_ALIAS_SET (to_rtx) = alias_set;
4848
4849      return store_expr (exp, to_rtx, value_mode != VOIDmode);
4850    }
4851}
4852
4853/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4854   or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4855   ARRAY_REFs and find the ultimate containing object, which we return.
4856
4857   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4858   bit position, and *PUNSIGNEDP to the signedness of the field.
4859   If the position of the field is variable, we store a tree
4860   giving the variable offset (in units) in *POFFSET.
4861   This offset is in addition to the bit position.
4862   If the position is not variable, we store 0 in *POFFSET.
4863   We set *PALIGNMENT to the alignment in bytes of the address that will be
4864   computed.  This is the alignment of the thing we return if *POFFSET
4865   is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4866
4867   If any of the extraction expressions is volatile,
4868   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
4869
4870   If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
4871   is a mode that can be used to access the field.  In that case, *PBITSIZE
4872   is redundant.
4873
4874   If the field describes a variable-sized object, *PMODE is set to
4875   VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
4876   this case, but the address of the object can be found.   */
4877
4878tree
4879get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4880		     punsignedp, pvolatilep, palignment)
4881     tree exp;
4882     int *pbitsize;
4883     int *pbitpos;
4884     tree *poffset;
4885     enum machine_mode *pmode;
4886     int *punsignedp;
4887     int *pvolatilep;
4888     int *palignment;
4889{
4890  tree orig_exp = exp;
4891  tree size_tree = 0;
4892  enum machine_mode mode = VOIDmode;
4893  tree offset = integer_zero_node;
4894  unsigned int alignment = BIGGEST_ALIGNMENT;
4895
4896  if (TREE_CODE (exp) == COMPONENT_REF)
4897    {
4898      size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4899      if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4900	mode = DECL_MODE (TREE_OPERAND (exp, 1));
4901      *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4902    }
4903  else if (TREE_CODE (exp) == BIT_FIELD_REF)
4904    {
4905      size_tree = TREE_OPERAND (exp, 1);
4906      *punsignedp = TREE_UNSIGNED (exp);
4907    }
4908  else
4909    {
4910      mode = TYPE_MODE (TREE_TYPE (exp));
4911      if (mode == BLKmode)
4912	size_tree = TYPE_SIZE (TREE_TYPE (exp));
4913
4914      *pbitsize = GET_MODE_BITSIZE (mode);
4915      *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4916    }
4917
4918  if (size_tree)
4919    {
4920      if (TREE_CODE (size_tree) != INTEGER_CST)
4921	mode = BLKmode, *pbitsize = -1;
4922      else
4923	*pbitsize = TREE_INT_CST_LOW (size_tree);
4924    }
4925
4926  /* Compute cumulative bit-offset for nested component-refs and array-refs,
4927     and find the ultimate containing object.  */
4928
4929  *pbitpos = 0;
4930
4931  while (1)
4932    {
4933      if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4934	{
4935	  tree pos = (TREE_CODE (exp) == COMPONENT_REF
4936		      ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4937		      : TREE_OPERAND (exp, 2));
4938	  tree constant = integer_zero_node, var = pos;
4939
4940	  /* If this field hasn't been filled in yet, don't go
4941	     past it.  This should only happen when folding expressions
4942	     made during type construction.  */
4943	  if (pos == 0)
4944	    break;
4945
4946	  /* Assume here that the offset is a multiple of a unit.
4947	     If not, there should be an explicitly added constant.  */
4948	  if (TREE_CODE (pos) == PLUS_EXPR
4949	      && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4950	    constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4951	  else if (TREE_CODE (pos) == INTEGER_CST)
4952	    constant = pos, var = integer_zero_node;
4953
4954	  *pbitpos += TREE_INT_CST_LOW (constant);
4955	  offset = size_binop (PLUS_EXPR, offset,
4956			       size_binop (EXACT_DIV_EXPR, var,
4957					   size_int (BITS_PER_UNIT)));
4958	}
4959
4960      else if (TREE_CODE (exp) == ARRAY_REF)
4961	{
4962	  /* This code is based on the code in case ARRAY_REF in expand_expr
4963	     below.  We assume here that the size of an array element is
4964	     always an integral multiple of BITS_PER_UNIT.  */
4965
4966	  tree index = TREE_OPERAND (exp, 1);
4967	  tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4968	  tree low_bound
4969	    = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4970	  tree index_type = TREE_TYPE (index);
4971	  tree xindex;
4972
4973	  if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4974	    {
4975	      index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4976			       index);
4977	      index_type = TREE_TYPE (index);
4978	    }
4979
4980	  /* Optimize the special-case of a zero lower bound.
4981
4982	     We convert the low_bound to sizetype to avoid some problems
4983	     with constant folding.  (E.g. suppose the lower bound is 1,
4984	     and its mode is QI.  Without the conversion,  (ARRAY
4985	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4986	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)
4987
4988	     But sizetype isn't quite right either (especially if
4989	     the lowbound is negative).  FIXME */
4990
4991	  if (! integer_zerop (low_bound))
4992	    index = fold (build (MINUS_EXPR, index_type, index,
4993				 convert (sizetype, low_bound)));
4994
4995	  if (TREE_CODE (index) == INTEGER_CST)
4996	    {
4997	      index = convert (sbitsizetype, index);
4998	      index_type = TREE_TYPE (index);
4999	    }
5000
5001	  xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5002			        convert (sbitsizetype,
5003					 TYPE_SIZE (TREE_TYPE (exp)))));
5004
5005	  if (TREE_CODE (xindex) == INTEGER_CST
5006	      && TREE_INT_CST_HIGH (xindex) == 0)
5007	    *pbitpos += TREE_INT_CST_LOW (xindex);
5008	  else
5009	    {
5010	      /* Either the bit offset calculated above is not constant, or
5011		 it overflowed.  In either case, redo the multiplication
5012		 against the size in units.  This is especially important
5013		 in the non-constant case to avoid a division at runtime.  */
5014	      xindex = fold (build (MULT_EXPR, ssizetype, index,
5015                                    convert (ssizetype,
5016                                         TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
5017
5018	      if (contains_placeholder_p (xindex))
5019		xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
5020
5021	      offset = size_binop (PLUS_EXPR, offset, xindex);
5022	    }
5023	}
5024      else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5025	       && ! ((TREE_CODE (exp) == NOP_EXPR
5026		      || TREE_CODE (exp) == CONVERT_EXPR)
5027		     && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
5028			   && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5029			       != UNION_TYPE))
5030		     && (TYPE_MODE (TREE_TYPE (exp))
5031			 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5032	break;
5033
5034      /* If any reference in the chain is volatile, the effect is volatile.  */
5035      if (TREE_THIS_VOLATILE (exp))
5036	*pvolatilep = 1;
5037
5038      /* If the offset is non-constant already, then we can't assume any
5039	 alignment more than the alignment here.  */
5040      if (! integer_zerop (offset))
5041	alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5042
5043      exp = TREE_OPERAND (exp, 0);
5044    }
5045
5046  if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5047    alignment = MIN (alignment, DECL_ALIGN (exp));
5048  else if (TREE_TYPE (exp) != 0)
5049    alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5050
5051  if (integer_zerop (offset))
5052    offset = 0;
5053
5054  if (offset != 0 && contains_placeholder_p (offset))
5055    offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5056
5057  *pmode = mode;
5058  *poffset = offset;
5059  *palignment = alignment / BITS_PER_UNIT;
5060  return exp;
5061}
5062
5063/* Subroutine of expand_exp: compute memory_usage from modifier.  */
5064static enum memory_use_mode
5065get_memory_usage_from_modifier (modifier)
5066     enum expand_modifier modifier;
5067{
5068  switch (modifier)
5069    {
5070    case EXPAND_NORMAL:
5071    case EXPAND_SUM:
5072      return MEMORY_USE_RO;
5073      break;
5074    case EXPAND_MEMORY_USE_WO:
5075      return MEMORY_USE_WO;
5076      break;
5077    case EXPAND_MEMORY_USE_RW:
5078      return MEMORY_USE_RW;
5079      break;
5080    case EXPAND_MEMORY_USE_DONT:
5081      /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5082	 MEMORY_USE_DONT, because they are modifiers to a call of
5083	 expand_expr in the ADDR_EXPR case of expand_expr.  */
5084    case EXPAND_CONST_ADDRESS:
5085    case EXPAND_INITIALIZER:
5086      return MEMORY_USE_DONT;
5087    case EXPAND_MEMORY_USE_BAD:
5088    default:
5089      abort ();
5090    }
5091}
5092
5093/* Given an rtx VALUE that may contain additions and multiplications,
5094   return an equivalent value that just refers to a register or memory.
5095   This is done by generating instructions to perform the arithmetic
5096   and returning a pseudo-register containing the value.
5097
5098   The returned value may be a REG, SUBREG, MEM or constant.  */
5099
5100rtx
5101force_operand (value, target)
5102     rtx value, target;
5103{
5104  register optab binoptab = 0;
5105  /* Use a temporary to force order of execution of calls to
5106     `force_operand'.  */
5107  rtx tmp;
5108  register rtx op2;
5109  /* Use subtarget as the target for operand 0 of a binary operation.  */
5110  register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5111
5112  /* Check for a PIC address load.  */
5113  if (flag_pic
5114      && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5115      && XEXP (value, 0) == pic_offset_table_rtx
5116      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5117	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
5118	  || GET_CODE (XEXP (value, 1)) == CONST))
5119    {
5120      if (!subtarget)
5121	subtarget = gen_reg_rtx (GET_MODE (value));
5122      emit_move_insn (subtarget, value);
5123      return subtarget;
5124    }
5125
5126  if (GET_CODE (value) == PLUS)
5127    binoptab = add_optab;
5128  else if (GET_CODE (value) == MINUS)
5129    binoptab = sub_optab;
5130  else if (GET_CODE (value) == MULT)
5131    {
5132      op2 = XEXP (value, 1);
5133      if (!CONSTANT_P (op2)
5134	  && !(GET_CODE (op2) == REG && op2 != subtarget))
5135	subtarget = 0;
5136      tmp = force_operand (XEXP (value, 0), subtarget);
5137      return expand_mult (GET_MODE (value), tmp,
5138			  force_operand (op2, NULL_RTX),
5139			  target, 0);
5140    }
5141
5142  if (binoptab)
5143    {
5144      op2 = XEXP (value, 1);
5145      if (!CONSTANT_P (op2)
5146	  && !(GET_CODE (op2) == REG && op2 != subtarget))
5147	subtarget = 0;
5148      if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5149	{
5150	  binoptab = add_optab;
5151	  op2 = negate_rtx (GET_MODE (value), op2);
5152	}
5153
5154      /* Check for an addition with OP2 a constant integer and our first
5155	 operand a PLUS of a virtual register and something else.  In that
5156	 case, we want to emit the sum of the virtual register and the
5157	 constant first and then add the other value.  This allows virtual
5158	 register instantiation to simply modify the constant rather than
5159	 creating another one around this addition.  */
5160      if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5161	  && GET_CODE (XEXP (value, 0)) == PLUS
5162	  && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5163	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5164	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5165	{
5166	  rtx temp = expand_binop (GET_MODE (value), binoptab,
5167				   XEXP (XEXP (value, 0), 0), op2,
5168				   subtarget, 0, OPTAB_LIB_WIDEN);
5169	  return expand_binop (GET_MODE (value), binoptab, temp,
5170			       force_operand (XEXP (XEXP (value, 0), 1), 0),
5171			       target, 0, OPTAB_LIB_WIDEN);
5172	}
5173
5174      tmp = force_operand (XEXP (value, 0), subtarget);
5175      return expand_binop (GET_MODE (value), binoptab, tmp,
5176			   force_operand (op2, NULL_RTX),
5177			   target, 0, OPTAB_LIB_WIDEN);
5178      /* We give UNSIGNEDP = 0 to expand_binop
5179	 because the only operations we are expanding here are signed ones.  */
5180    }
5181  return value;
5182}
5183
5184/* Subroutine of expand_expr:
5185   save the non-copied parts (LIST) of an expr (LHS), and return a list
5186   which can restore these values to their previous values,
5187   should something modify their storage.  */
5188
5189static tree
5190save_noncopied_parts (lhs, list)
5191     tree lhs;
5192     tree list;
5193{
5194  tree tail;
5195  tree parts = 0;
5196
5197  for (tail = list; tail; tail = TREE_CHAIN (tail))
5198    if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5199      parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5200    else
5201      {
5202	tree part = TREE_VALUE (tail);
5203	tree part_type = TREE_TYPE (part);
5204	tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5205	rtx target = assign_temp (part_type, 0, 1, 1);
5206	if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5207	  target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5208	parts = tree_cons (to_be_saved,
5209			   build (RTL_EXPR, part_type, NULL_TREE,
5210				  (tree) target),
5211			   parts);
5212	store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5213      }
5214  return parts;
5215}
5216
5217/* Subroutine of expand_expr:
5218   record the non-copied parts (LIST) of an expr (LHS), and return a list
5219   which specifies the initial values of these parts.  */
5220
5221static tree
5222init_noncopied_parts (lhs, list)
5223     tree lhs;
5224     tree list;
5225{
5226  tree tail;
5227  tree parts = 0;
5228
5229  for (tail = list; tail; tail = TREE_CHAIN (tail))
5230    if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5231      parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5232    else if (TREE_PURPOSE (tail))
5233      {
5234	tree part = TREE_VALUE (tail);
5235	tree part_type = TREE_TYPE (part);
5236	tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5237	parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5238      }
5239  return parts;
5240}
5241
5242/* Subroutine of expand_expr: return nonzero iff there is no way that
5243   EXP can reference X, which is being modified.  TOP_P is nonzero if this
5244   call is going to be used to determine whether we need a temporary
5245   for EXP, as opposed to a recursive call to this function.
5246
5247   It is always safe for this routine to return zero since it merely
5248   searches for optimization opportunities.  */
5249
5250static int
5251safe_from_p (x, exp, top_p)
5252     rtx x;
5253     tree exp;
5254     int top_p;
5255{
5256  rtx exp_rtl = 0;
5257  int i, nops;
5258  static int save_expr_count;
5259  static int save_expr_size = 0;
5260  static tree *save_expr_rewritten;
5261  static tree save_expr_trees[256];
5262
5263  if (x == 0
5264      /* If EXP has varying size, we MUST use a target since we currently
5265	 have no way of allocating temporaries of variable size
5266	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5267	 So we assume here that something at a higher level has prevented a
5268	 clash.  This is somewhat bogus, but the best we can do.  Only
5269	 do this when X is BLKmode and when we are at the top level.  */
5270      || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5271	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5272	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5273	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5274	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5275	      != INTEGER_CST)
5276	  && GET_MODE (x) == BLKmode))
5277    return 1;
5278
5279  if (top_p && save_expr_size == 0)
5280    {
5281      int rtn;
5282
5283      save_expr_count = 0;
5284      save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5285      save_expr_rewritten = &save_expr_trees[0];
5286
5287      rtn = safe_from_p (x, exp, 1);
5288
5289      for (i = 0; i < save_expr_count; ++i)
5290	{
5291	  if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5292	    abort ();
5293	  TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5294	}
5295
5296      save_expr_size = 0;
5297
5298      return rtn;
5299    }
5300
5301  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5302     find the underlying pseudo.  */
5303  if (GET_CODE (x) == SUBREG)
5304    {
5305      x = SUBREG_REG (x);
5306      if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5307	return 0;
5308    }
5309
5310  /* If X is a location in the outgoing argument area, it is always safe.  */
5311  if (GET_CODE (x) == MEM
5312      && (XEXP (x, 0) == virtual_outgoing_args_rtx
5313	  || (GET_CODE (XEXP (x, 0)) == PLUS
5314	      && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5315    return 1;
5316
5317  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5318    {
5319    case 'd':
5320      exp_rtl = DECL_RTL (exp);
5321      break;
5322
5323    case 'c':
5324      return 1;
5325
5326    case 'x':
5327      if (TREE_CODE (exp) == TREE_LIST)
5328	return ((TREE_VALUE (exp) == 0
5329		 || safe_from_p (x, TREE_VALUE (exp), 0))
5330		&& (TREE_CHAIN (exp) == 0
5331		    || safe_from_p (x, TREE_CHAIN (exp), 0)));
5332      else if (TREE_CODE (exp) == ERROR_MARK)
5333	return 1;	/* An already-visited SAVE_EXPR? */
5334      else
5335	return 0;
5336
5337    case '1':
5338      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5339
5340    case '2':
5341    case '<':
5342      return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5343	      && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5344
5345    case 'e':
5346    case 'r':
5347      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
5348	 the expression.  If it is set, we conflict iff we are that rtx or
5349	 both are in memory.  Otherwise, we check all operands of the
5350	 expression recursively.  */
5351
5352      switch (TREE_CODE (exp))
5353	{
5354	case ADDR_EXPR:
5355	  return (staticp (TREE_OPERAND (exp, 0))
5356		  || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5357		  || TREE_STATIC (exp));
5358
5359	case INDIRECT_REF:
5360	  if (GET_CODE (x) == MEM)
5361	    return 0;
5362	  break;
5363
5364	case CALL_EXPR:
5365	  exp_rtl = CALL_EXPR_RTL (exp);
5366	  if (exp_rtl == 0)
5367	    {
5368	      /* Assume that the call will clobber all hard registers and
5369		 all of memory.  */
5370	      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5371		  || GET_CODE (x) == MEM)
5372		return 0;
5373	    }
5374
5375	  break;
5376
5377	case RTL_EXPR:
5378	  /* If a sequence exists, we would have to scan every instruction
5379	     in the sequence to see if it was safe.  This is probably not
5380	     worthwhile.  */
5381	  if (RTL_EXPR_SEQUENCE (exp))
5382	    return 0;
5383
5384	  exp_rtl = RTL_EXPR_RTL (exp);
5385	  break;
5386
5387	case WITH_CLEANUP_EXPR:
5388	  exp_rtl = RTL_EXPR_RTL (exp);
5389	  break;
5390
5391	case CLEANUP_POINT_EXPR:
5392	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5393
5394	case SAVE_EXPR:
5395	  exp_rtl = SAVE_EXPR_RTL (exp);
5396	  if (exp_rtl)
5397	    break;
5398
5399	  /* This SAVE_EXPR might appear many times in the top-level
5400	     safe_from_p() expression, and if it has a complex
5401	     subexpression, examining it multiple times could result
5402	     in a combinatorial explosion.  E.g. on an Alpha
5403	     running at least 200MHz, a Fortran test case compiled with
5404	     optimization took about 28 minutes to compile -- even though
5405	     it was only a few lines long, and the complicated line causing
5406	     so much time to be spent in the earlier version of safe_from_p()
5407	     had only 293 or so unique nodes.
5408
5409	     So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5410	     where it is so we can turn it back in the top-level safe_from_p()
5411	     when we're done.  */
5412
5413	  /* For now, don't bother re-sizing the array. */
5414	  if (save_expr_count >= save_expr_size)
5415	    return 0;
5416	  save_expr_rewritten[save_expr_count++] = exp;
5417
5418	  nops = tree_code_length[(int) SAVE_EXPR];
5419	  for (i = 0; i < nops; i++)
5420	    {
5421	      tree operand = TREE_OPERAND (exp, i);
5422	      if (operand == NULL_TREE)
5423		continue;
5424	      TREE_SET_CODE (exp, ERROR_MARK);
5425	      if (!safe_from_p (x, operand, 0))
5426		return 0;
5427	      TREE_SET_CODE (exp, SAVE_EXPR);
5428	    }
5429	  TREE_SET_CODE (exp, ERROR_MARK);
5430	  return 1;
5431
5432	case BIND_EXPR:
5433	  /* The only operand we look at is operand 1.  The rest aren't
5434	     part of the expression.  */
5435	  return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5436
5437	case METHOD_CALL_EXPR:
5438	  /* This takes a rtx argument, but shouldn't appear here.  */
5439	  abort ();
5440
5441	default:
5442	  break;
5443	}
5444
5445      /* If we have an rtx, we do not need to scan our operands.  */
5446      if (exp_rtl)
5447	break;
5448
5449      nops = tree_code_length[(int) TREE_CODE (exp)];
5450      for (i = 0; i < nops; i++)
5451	if (TREE_OPERAND (exp, i) != 0
5452	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5453	  return 0;
5454    }
5455
5456  /* If we have an rtl, find any enclosed object.  Then see if we conflict
5457     with it.  */
5458  if (exp_rtl)
5459    {
5460      if (GET_CODE (exp_rtl) == SUBREG)
5461	{
5462	  exp_rtl = SUBREG_REG (exp_rtl);
5463	  if (GET_CODE (exp_rtl) == REG
5464	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5465	    return 0;
5466	}
5467
5468      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
5469	 are memory and EXP is not readonly.  */
5470      return ! (rtx_equal_p (x, exp_rtl)
5471		|| (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5472		    && ! TREE_READONLY (exp)));
5473    }
5474
5475  /* If we reach here, it is safe.  */
5476  return 1;
5477}
5478
5479/* Subroutine of expand_expr: return nonzero iff EXP is an
5480   expression whose type is statically determinable.  */
5481
5482static int
5483fixed_type_p (exp)
5484     tree exp;
5485{
5486  if (TREE_CODE (exp) == PARM_DECL
5487      || TREE_CODE (exp) == VAR_DECL
5488      || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5489      || TREE_CODE (exp) == COMPONENT_REF
5490      || TREE_CODE (exp) == ARRAY_REF)
5491    return 1;
5492  return 0;
5493}
5494
5495/* Subroutine of expand_expr: return rtx if EXP is a
5496   variable or parameter; else return 0.  */
5497
5498static rtx
5499var_rtx (exp)
5500     tree exp;
5501{
5502  STRIP_NOPS (exp);
5503  switch (TREE_CODE (exp))
5504    {
5505    case PARM_DECL:
5506    case VAR_DECL:
5507      return DECL_RTL (exp);
5508    default:
5509      return 0;
5510    }
5511}
5512
5513#ifdef MAX_INTEGER_COMPUTATION_MODE
5514void
5515check_max_integer_computation_mode (exp)
5516    tree exp;
5517{
5518  enum tree_code code = TREE_CODE (exp);
5519  enum machine_mode mode;
5520
5521  /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE.  */
5522  if (code == NOP_EXPR
5523      && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5524    return;
5525
5526  /* First check the type of the overall operation.   We need only look at
5527     unary, binary and relational operations.  */
5528  if (TREE_CODE_CLASS (code) == '1'
5529      || TREE_CODE_CLASS (code) == '2'
5530      || TREE_CODE_CLASS (code) == '<')
5531    {
5532      mode = TYPE_MODE (TREE_TYPE (exp));
5533      if (GET_MODE_CLASS (mode) == MODE_INT
5534	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5535	fatal ("unsupported wide integer operation");
5536    }
5537
5538  /* Check operand of a unary op.  */
5539  if (TREE_CODE_CLASS (code) == '1')
5540    {
5541      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5542      if (GET_MODE_CLASS (mode) == MODE_INT
5543	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5544	fatal ("unsupported wide integer operation");
5545    }
5546
5547  /* Check operands of a binary/comparison op.  */
5548  if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5549    {
5550      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5551      if (GET_MODE_CLASS (mode) == MODE_INT
5552	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5553	fatal ("unsupported wide integer operation");
5554
5555      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5556      if (GET_MODE_CLASS (mode) == MODE_INT
5557	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5558	fatal ("unsupported wide integer operation");
5559    }
5560}
5561#endif
5562
5563
5564/* expand_expr: generate code for computing expression EXP.
5565   An rtx for the computed value is returned.  The value is never null.
5566   In the case of a void EXP, const0_rtx is returned.
5567
5568   The value may be stored in TARGET if TARGET is nonzero.
5569   TARGET is just a suggestion; callers must assume that
5570   the rtx returned may not be the same as TARGET.
5571
5572   If TARGET is CONST0_RTX, it means that the value will be ignored.
5573
5574   If TMODE is not VOIDmode, it suggests generating the
5575   result in mode TMODE.  But this is done only when convenient.
5576   Otherwise, TMODE is ignored and the value generated in its natural mode.
5577   TMODE is just a suggestion; callers must assume that
5578   the rtx returned may not have mode TMODE.
5579
5580   Note that TARGET may have neither TMODE nor MODE.  In that case, it
5581   probably will not be used.
5582
5583   If MODIFIER is EXPAND_SUM then when EXP is an addition
5584   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5585   or a nest of (PLUS ...) and (MINUS ...) where the terms are
5586   products as above, or REG or MEM, or constant.
5587   Ordinarily in such cases we would output mul or add instructions
5588   and then return a pseudo reg containing the sum.
5589
5590   EXPAND_INITIALIZER is much like EXPAND_SUM except that
5591   it also marks a label as absolutely required (it can't be dead).
5592   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5593   This is used for outputting expressions used in initializers.
5594
5595   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5596   with a constant address even if that address is not normally legitimate.
5597   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.  */
5598
5599rtx
5600expand_expr (exp, target, tmode, modifier)
5601     register tree exp;
5602     rtx target;
5603     enum machine_mode tmode;
5604     enum expand_modifier modifier;
5605{
5606  /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5607     This is static so it will be accessible to our recursive callees.  */
5608  static tree placeholder_list = 0;
5609  register rtx op0, op1, temp;
5610  tree type = TREE_TYPE (exp);
5611  int unsignedp = TREE_UNSIGNED (type);
5612  register enum machine_mode mode;
5613  register enum tree_code code = TREE_CODE (exp);
5614  optab this_optab;
5615  rtx subtarget, original_target;
5616  int ignore;
5617  tree context;
5618  /* Used by check-memory-usage to make modifier read only.  */
5619  enum expand_modifier ro_modifier;
5620
5621  /* Handle ERROR_MARK before anybody tries to access its type. */
5622  if (TREE_CODE (exp) == ERROR_MARK)
5623    {
5624      op0 = CONST0_RTX (tmode);
5625      if (op0 != 0)
5626	return op0;
5627      return const0_rtx;
5628    }
5629
5630  mode = TYPE_MODE (type);
5631  /* Use subtarget as the target for operand 0 of a binary operation.  */
5632  subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5633  original_target = target;
5634  ignore = (target == const0_rtx
5635	    || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5636		 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5637		 || code == COND_EXPR)
5638		&& TREE_CODE (type) == VOID_TYPE));
5639
5640  /* Make a read-only version of the modifier.  */
5641  if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5642      || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5643    ro_modifier = modifier;
5644  else
5645    ro_modifier = EXPAND_NORMAL;
5646
5647  /* Don't use hard regs as subtargets, because the combiner
5648     can only handle pseudo regs.  */
5649  if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5650    subtarget = 0;
5651  /* Avoid subtargets inside loops,
5652     since they hide some invariant expressions.  */
5653  if (preserve_subexpressions_p ())
5654    subtarget = 0;
5655
5656  /* If we are going to ignore this result, we need only do something
5657     if there is a side-effect somewhere in the expression.  If there
5658     is, short-circuit the most common cases here.  Note that we must
5659     not call expand_expr with anything but const0_rtx in case this
5660     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
5661
5662  if (ignore)
5663    {
5664      if (! TREE_SIDE_EFFECTS (exp))
5665	return const0_rtx;
5666
5667      /* Ensure we reference a volatile object even if value is ignored.  */
5668      if (TREE_THIS_VOLATILE (exp)
5669	  && TREE_CODE (exp) != FUNCTION_DECL
5670	  && mode != VOIDmode && mode != BLKmode)
5671	{
5672	  temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5673	  if (GET_CODE (temp) == MEM)
5674	    temp = copy_to_reg (temp);
5675	  return const0_rtx;
5676	}
5677
5678      if (TREE_CODE_CLASS (code) == '1')
5679	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5680			    VOIDmode, ro_modifier);
5681      else if (TREE_CODE_CLASS (code) == '2'
5682	       || TREE_CODE_CLASS (code) == '<')
5683	{
5684	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5685	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5686	  return const0_rtx;
5687	}
5688      else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5689	       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5690	/* If the second operand has no side effects, just evaluate
5691	   the first.  */
5692	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5693			    VOIDmode, ro_modifier);
5694
5695      target = 0;
5696    }
5697
5698#ifdef MAX_INTEGER_COMPUTATION_MODE
5699  if (target
5700      && TREE_CODE (exp) != INTEGER_CST
5701      && TREE_CODE (exp) != PARM_DECL
5702      && TREE_CODE (exp) != ARRAY_REF
5703      && TREE_CODE (exp) != COMPONENT_REF
5704      && TREE_CODE (exp) != BIT_FIELD_REF
5705      && TREE_CODE (exp) != INDIRECT_REF
5706      && TREE_CODE (exp) != CALL_EXPR
5707      && TREE_CODE (exp) != VAR_DECL)
5708    {
5709      enum machine_mode mode = GET_MODE (target);
5710
5711      if (GET_MODE_CLASS (mode) == MODE_INT
5712	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5713	fatal ("unsupported wide integer operation");
5714    }
5715
5716  if (TREE_CODE (exp) != INTEGER_CST
5717      && TREE_CODE (exp) != PARM_DECL
5718      && TREE_CODE (exp) != ARRAY_REF
5719      && TREE_CODE (exp) != COMPONENT_REF
5720      && TREE_CODE (exp) != BIT_FIELD_REF
5721      && TREE_CODE (exp) != INDIRECT_REF
5722      && TREE_CODE (exp) != VAR_DECL
5723      && TREE_CODE (exp) != CALL_EXPR
5724      && GET_MODE_CLASS (tmode) == MODE_INT
5725      && tmode > MAX_INTEGER_COMPUTATION_MODE)
5726    fatal ("unsupported wide integer operation");
5727
5728  check_max_integer_computation_mode (exp);
5729#endif
5730
5731  /* If will do cse, generate all results into pseudo registers
5732     since 1) that allows cse to find more things
5733     and 2) otherwise cse could produce an insn the machine
5734     cannot support.  */
5735
5736  if (! cse_not_expected && mode != BLKmode && target
5737      && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5738    target = subtarget;
5739
5740  switch (code)
5741    {
5742    case LABEL_DECL:
5743      {
5744	tree function = decl_function_context (exp);
5745	/* Handle using a label in a containing function.  */
5746	if (function != current_function_decl
5747	    && function != inline_function_decl && function != 0)
5748	  {
5749	    struct function *p = find_function_data (function);
5750	    /* Allocate in the memory associated with the function
5751	       that the label is in.  */
5752	    push_obstacks (p->function_obstack,
5753			   p->function_maybepermanent_obstack);
5754
5755	    p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5756						  label_rtx (exp),
5757						  p->forced_labels);
5758	    pop_obstacks ();
5759	  }
5760	else
5761	  {
5762	    if (modifier == EXPAND_INITIALIZER)
5763	      forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5764						 label_rtx (exp),
5765						 forced_labels);
5766	  }
5767	temp = gen_rtx_MEM (FUNCTION_MODE,
5768			    gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5769	if (function != current_function_decl
5770	    && function != inline_function_decl && function != 0)
5771	  LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5772	return temp;
5773      }
5774
5775    case PARM_DECL:
5776      if (DECL_RTL (exp) == 0)
5777	{
5778	  error_with_decl (exp, "prior parameter's size depends on `%s'");
5779	  return CONST0_RTX (mode);
5780	}
5781
5782      /* ... fall through ...  */
5783
5784    case VAR_DECL:
5785      /* If a static var's type was incomplete when the decl was written,
5786	 but the type is complete now, lay out the decl now.  */
5787      if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5788	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5789	{
5790	  push_obstacks_nochange ();
5791	  end_temporary_allocation ();
5792	  layout_decl (exp, 0);
5793	  PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5794	  pop_obstacks ();
5795	}
5796
5797      /* Although static-storage variables start off initialized, according to
5798	 ANSI C, a memcpy could overwrite them with uninitialized values.  So
5799	 we check them too.  This also lets us check for read-only variables
5800	 accessed via a non-const declaration, in case it won't be detected
5801	 any other way (e.g., in an embedded system or OS kernel without
5802	 memory protection).
5803
5804	 Aggregates are not checked here; they're handled elsewhere.  */
5805      if (current_function_check_memory_usage && code == VAR_DECL
5806	  && GET_CODE (DECL_RTL (exp)) == MEM
5807	  && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5808	{
5809	  enum memory_use_mode memory_usage;
5810	  memory_usage = get_memory_usage_from_modifier (modifier);
5811
5812	  if (memory_usage != MEMORY_USE_DONT)
5813	    emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5814			       XEXP (DECL_RTL (exp), 0), Pmode,
5815			       GEN_INT (int_size_in_bytes (type)),
5816			       TYPE_MODE (sizetype),
5817			       GEN_INT (memory_usage),
5818			       TYPE_MODE (integer_type_node));
5819	}
5820
5821      /* ... fall through ...  */
5822
5823    case FUNCTION_DECL:
5824    case RESULT_DECL:
5825      if (DECL_RTL (exp) == 0)
5826	abort ();
5827
5828      /* Ensure variable marked as used even if it doesn't go through
5829	 a parser.  If it hasn't be used yet, write out an external
5830	 definition.  */
5831      if (! TREE_USED (exp))
5832	{
5833	  assemble_external (exp);
5834	  TREE_USED (exp) = 1;
5835	}
5836
5837      /* Show we haven't gotten RTL for this yet.  */
5838      temp = 0;
5839
5840      /* Handle variables inherited from containing functions.  */
5841      context = decl_function_context (exp);
5842
5843      /* We treat inline_function_decl as an alias for the current function
5844	 because that is the inline function whose vars, types, etc.
5845	 are being merged into the current function.
5846	 See expand_inline_function.  */
5847
5848      if (context != 0 && context != current_function_decl
5849	  && context != inline_function_decl
5850	  /* If var is static, we don't need a static chain to access it.  */
5851	  && ! (GET_CODE (DECL_RTL (exp)) == MEM
5852		&& CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5853	{
5854	  rtx addr;
5855
5856	  /* Mark as non-local and addressable.  */
5857	  DECL_NONLOCAL (exp) = 1;
5858	  if (DECL_NO_STATIC_CHAIN (current_function_decl))
5859	    abort ();
5860	  mark_addressable (exp);
5861	  if (GET_CODE (DECL_RTL (exp)) != MEM)
5862	    abort ();
5863	  addr = XEXP (DECL_RTL (exp), 0);
5864	  if (GET_CODE (addr) == MEM)
5865	    addr = gen_rtx_MEM (Pmode,
5866				fix_lexical_addr (XEXP (addr, 0), exp));
5867	  else
5868	    addr = fix_lexical_addr (addr, exp);
5869	  temp = change_address (DECL_RTL (exp), mode, addr);
5870	}
5871
5872      /* This is the case of an array whose size is to be determined
5873	 from its initializer, while the initializer is still being parsed.
5874	 See expand_decl.  */
5875
5876      else if (GET_CODE (DECL_RTL (exp)) == MEM
5877	       && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5878	temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5879			       XEXP (DECL_RTL (exp), 0));
5880
5881      /* If DECL_RTL is memory, we are in the normal case and either
5882	 the address is not valid or it is not a register and -fforce-addr
5883	 is specified, get the address into a register.  */
5884
5885      else if (GET_CODE (DECL_RTL (exp)) == MEM
5886	       && modifier != EXPAND_CONST_ADDRESS
5887	       && modifier != EXPAND_SUM
5888	       && modifier != EXPAND_INITIALIZER
5889	       && (! memory_address_p (DECL_MODE (exp),
5890				       XEXP (DECL_RTL (exp), 0))
5891		   || (flag_force_addr
5892		       && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5893	temp = change_address (DECL_RTL (exp), VOIDmode,
5894			       copy_rtx (XEXP (DECL_RTL (exp), 0)));
5895
5896      /* If we got something, return it.  But first, set the alignment
5897	 the address is a register.  */
5898      if (temp != 0)
5899	{
5900	  if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5901	    mark_reg_pointer (XEXP (temp, 0),
5902			      DECL_ALIGN (exp) / BITS_PER_UNIT);
5903
5904	  return temp;
5905	}
5906
5907      /* If the mode of DECL_RTL does not match that of the decl, it
5908	 must be a promoted value.  We return a SUBREG of the wanted mode,
5909	 but mark it so that we know that it was already extended.  */
5910
5911      if (GET_CODE (DECL_RTL (exp)) == REG
5912	  && GET_MODE (DECL_RTL (exp)) != mode)
5913	{
5914	  /* Get the signedness used for this variable.  Ensure we get the
5915	     same mode we got when the variable was declared.  */
5916	  if (GET_MODE (DECL_RTL (exp))
5917	      != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5918	    abort ();
5919
5920	  temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5921	  SUBREG_PROMOTED_VAR_P (temp) = 1;
5922	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5923	  return temp;
5924	}
5925
5926      return DECL_RTL (exp);
5927
5928    case INTEGER_CST:
5929      return immed_double_const (TREE_INT_CST_LOW (exp),
5930				 TREE_INT_CST_HIGH (exp),
5931				 mode);
5932
5933    case CONST_DECL:
5934      return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5935      			  EXPAND_MEMORY_USE_BAD);
5936
5937    case REAL_CST:
5938      /* If optimized, generate immediate CONST_DOUBLE
5939	 which will be turned into memory by reload if necessary.
5940
5941	 We used to force a register so that loop.c could see it.  But
5942	 this does not allow gen_* patterns to perform optimizations with
5943	 the constants.  It also produces two insns in cases like "x = 1.0;".
5944	 On most machines, floating-point constants are not permitted in
5945	 many insns, so we'd end up copying it to a register in any case.
5946
5947	 Now, we do the copying in expand_binop, if appropriate.  */
5948      return immed_real_const (exp);
5949
5950    case COMPLEX_CST:
5951    case STRING_CST:
5952      if (! TREE_CST_RTL (exp))
5953	output_constant_def (exp);
5954
5955      /* TREE_CST_RTL probably contains a constant address.
5956	 On RISC machines where a constant address isn't valid,
5957	 make some insns to get that address into a register.  */
5958      if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5959	  && modifier != EXPAND_CONST_ADDRESS
5960	  && modifier != EXPAND_INITIALIZER
5961	  && modifier != EXPAND_SUM
5962	  && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5963	      || (flag_force_addr
5964		  && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5965	return change_address (TREE_CST_RTL (exp), VOIDmode,
5966			       copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5967      return TREE_CST_RTL (exp);
5968
5969    case EXPR_WITH_FILE_LOCATION:
5970      {
5971	rtx to_return;
5972	char *saved_input_filename = input_filename;
5973	int saved_lineno = lineno;
5974	input_filename = EXPR_WFL_FILENAME (exp);
5975	lineno = EXPR_WFL_LINENO (exp);
5976	if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5977	  emit_line_note (input_filename, lineno);
5978	/* Possibly avoid switching back and force here */
5979	to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5980	input_filename = saved_input_filename;
5981	lineno = saved_lineno;
5982	return to_return;
5983      }
5984
5985    case SAVE_EXPR:
5986      context = decl_function_context (exp);
5987
5988      /* If this SAVE_EXPR was at global context, assume we are an
5989	 initialization function and move it into our context.  */
5990      if (context == 0)
5991	SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5992
5993      /* We treat inline_function_decl as an alias for the current function
5994	 because that is the inline function whose vars, types, etc.
5995	 are being merged into the current function.
5996	 See expand_inline_function.  */
5997      if (context == current_function_decl || context == inline_function_decl)
5998	context = 0;
5999
6000      /* If this is non-local, handle it.  */
6001      if (context)
6002	{
6003	  /* The following call just exists to abort if the context is
6004	     not of a containing function.  */
6005	  find_function_data (context);
6006
6007	  temp = SAVE_EXPR_RTL (exp);
6008	  if (temp && GET_CODE (temp) == REG)
6009	    {
6010	      put_var_into_stack (exp);
6011	      temp = SAVE_EXPR_RTL (exp);
6012	    }
6013	  if (temp == 0 || GET_CODE (temp) != MEM)
6014	    abort ();
6015	  return change_address (temp, mode,
6016				 fix_lexical_addr (XEXP (temp, 0), exp));
6017	}
6018      if (SAVE_EXPR_RTL (exp) == 0)
6019	{
6020	  if (mode == VOIDmode)
6021	    temp = const0_rtx;
6022	  else
6023	    temp = assign_temp (type, 3, 0, 0);
6024
6025	  SAVE_EXPR_RTL (exp) = temp;
6026	  if (!optimize && GET_CODE (temp) == REG)
6027	    save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6028						save_expr_regs);
6029
6030	  /* If the mode of TEMP does not match that of the expression, it
6031	     must be a promoted value.  We pass store_expr a SUBREG of the
6032	     wanted mode but mark it so that we know that it was already
6033	     extended.  Note that `unsignedp' was modified above in
6034	     this case.  */
6035
6036	  if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6037	    {
6038	      temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6039	      SUBREG_PROMOTED_VAR_P (temp) = 1;
6040	      SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6041	    }
6042
6043	  if (temp == const0_rtx)
6044	    expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6045			 EXPAND_MEMORY_USE_BAD);
6046	  else
6047	    store_expr (TREE_OPERAND (exp, 0), temp, 0);
6048
6049	  TREE_USED (exp) = 1;
6050	}
6051
6052      /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6053	 must be a promoted value.  We return a SUBREG of the wanted mode,
6054	 but mark it so that we know that it was already extended.  */
6055
6056      if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6057	  && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6058	{
6059	  /* Compute the signedness and make the proper SUBREG.  */
6060	  promote_mode (type, mode, &unsignedp, 0);
6061	  temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6062	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6063	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6064	  return temp;
6065	}
6066
6067      return SAVE_EXPR_RTL (exp);
6068
6069    case UNSAVE_EXPR:
6070      {
6071	rtx temp;
6072	temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6073	TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6074	return temp;
6075      }
6076
6077    case PLACEHOLDER_EXPR:
6078      {
6079	tree placeholder_expr;
6080
6081	/* If there is an object on the head of the placeholder list,
6082	   see if some object in it of type TYPE or a pointer to it.  For
6083	   further information, see tree.def.  */
6084	for (placeholder_expr = placeholder_list;
6085	     placeholder_expr != 0;
6086	     placeholder_expr = TREE_CHAIN (placeholder_expr))
6087	  {
6088	    tree need_type = TYPE_MAIN_VARIANT (type);
6089	    tree object = 0;
6090	    tree old_list = placeholder_list;
6091	    tree elt;
6092
6093	    /* Find the outermost reference that is of the type we want.
6094	       If none, see if any object has a type that is a pointer to
6095	       the type we want.  */
6096	    for (elt = TREE_PURPOSE (placeholder_expr);
6097		 elt != 0 && object == 0;
6098		 elt
6099		 = ((TREE_CODE (elt) == COMPOUND_EXPR
6100		     || TREE_CODE (elt) == COND_EXPR)
6101		    ? TREE_OPERAND (elt, 1)
6102		    : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6103		       || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6104		       || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6105		       || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6106		    ? TREE_OPERAND (elt, 0) : 0))
6107	      if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6108		object = elt;
6109
6110	    for (elt = TREE_PURPOSE (placeholder_expr);
6111		 elt != 0 && object == 0;
6112		 elt
6113		 = ((TREE_CODE (elt) == COMPOUND_EXPR
6114		     || TREE_CODE (elt) == COND_EXPR)
6115		    ? TREE_OPERAND (elt, 1)
6116		    : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6117		       || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6118		       || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6119		       || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6120		    ? TREE_OPERAND (elt, 0) : 0))
6121	      if (POINTER_TYPE_P (TREE_TYPE (elt))
6122		  && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6123		      == need_type))
6124		object = build1 (INDIRECT_REF, need_type, elt);
6125
6126	    if (object != 0)
6127	      {
6128		/* Expand this object skipping the list entries before
6129		   it was found in case it is also a PLACEHOLDER_EXPR.
6130		   In that case, we want to translate it using subsequent
6131		   entries.  */
6132		placeholder_list = TREE_CHAIN (placeholder_expr);
6133		temp = expand_expr (object, original_target, tmode,
6134				    ro_modifier);
6135		placeholder_list = old_list;
6136		return temp;
6137	      }
6138	  }
6139      }
6140
6141      /* We can't find the object or there was a missing WITH_RECORD_EXPR.  */
6142      abort ();
6143
6144    case WITH_RECORD_EXPR:
6145      /* Put the object on the placeholder list, expand our first operand,
6146	 and pop the list.  */
6147      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6148				    placeholder_list);
6149      target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6150			    tmode, ro_modifier);
6151      placeholder_list = TREE_CHAIN (placeholder_list);
6152      return target;
6153
6154    case GOTO_EXPR:
6155      if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6156	expand_goto (TREE_OPERAND (exp, 0));
6157      else
6158	expand_computed_goto (TREE_OPERAND (exp, 0));
6159      return const0_rtx;
6160
6161    case EXIT_EXPR:
6162      expand_exit_loop_if_false (NULL_PTR,
6163				 invert_truthvalue (TREE_OPERAND (exp, 0)));
6164      return const0_rtx;
6165
6166    case LABELED_BLOCK_EXPR:
6167      if (LABELED_BLOCK_BODY (exp))
6168	expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6169      emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6170      return const0_rtx;
6171
6172    case EXIT_BLOCK_EXPR:
6173      if (EXIT_BLOCK_RETURN (exp))
6174	sorry ("returned value in block_exit_expr");
6175      expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6176      return const0_rtx;
6177
6178    case LOOP_EXPR:
6179      push_temp_slots ();
6180      expand_start_loop (1);
6181      expand_expr_stmt (TREE_OPERAND (exp, 0));
6182      expand_end_loop ();
6183      pop_temp_slots ();
6184
6185      return const0_rtx;
6186
6187    case BIND_EXPR:
6188      {
6189	tree vars = TREE_OPERAND (exp, 0);
6190	int vars_need_expansion = 0;
6191
6192	/* Need to open a binding contour here because
6193	   if there are any cleanups they must be contained here.  */
6194	expand_start_bindings (0);
6195
6196	/* Mark the corresponding BLOCK for output in its proper place.  */
6197	if (TREE_OPERAND (exp, 2) != 0
6198	    && ! TREE_USED (TREE_OPERAND (exp, 2)))
6199	  insert_block (TREE_OPERAND (exp, 2));
6200
6201	/* If VARS have not yet been expanded, expand them now.  */
6202	while (vars)
6203	  {
6204	    if (DECL_RTL (vars) == 0)
6205	      {
6206		vars_need_expansion = 1;
6207		expand_decl (vars);
6208	      }
6209	    expand_decl_init (vars);
6210	    vars = TREE_CHAIN (vars);
6211	  }
6212
6213	temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6214
6215	expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6216
6217	return temp;
6218      }
6219
6220    case RTL_EXPR:
6221      if (RTL_EXPR_SEQUENCE (exp))
6222	{
6223	  if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6224	    abort ();
6225	  emit_insns (RTL_EXPR_SEQUENCE (exp));
6226	  RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6227	}
6228      preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6229      free_temps_for_rtl_expr (exp);
6230      return RTL_EXPR_RTL (exp);
6231
6232    case CONSTRUCTOR:
6233      /* If we don't need the result, just ensure we evaluate any
6234	 subexpressions.  */
6235      if (ignore)
6236	{
6237	  tree elt;
6238	  for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6239	    expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6240	    		 EXPAND_MEMORY_USE_BAD);
6241	  return const0_rtx;
6242	}
6243
6244      /* All elts simple constants => refer to a constant in memory.  But
6245	 if this is a non-BLKmode mode, let it store a field at a time
6246	 since that should make a CONST_INT or CONST_DOUBLE when we
6247	 fold.  Likewise, if we have a target we can use, it is best to
6248	 store directly into the target unless the type is large enough
6249	 that memcpy will be used.  If we are making an initializer and
6250	 all operands are constant, put it in memory as well.  */
6251      else if ((TREE_STATIC (exp)
6252		&& ((mode == BLKmode
6253		     && ! (target != 0 && safe_from_p (target, exp, 1)))
6254		    || TREE_ADDRESSABLE (exp)
6255		    || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6256			&& (!MOVE_BY_PIECES_P
6257                             (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6258			     TYPE_ALIGN (type) / BITS_PER_UNIT))
6259			&& ! mostly_zeros_p (exp))))
6260	       || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6261	{
6262	  rtx constructor = output_constant_def (exp);
6263	  if (modifier != EXPAND_CONST_ADDRESS
6264	      && modifier != EXPAND_INITIALIZER
6265	      && modifier != EXPAND_SUM
6266	      && (! memory_address_p (GET_MODE (constructor),
6267				      XEXP (constructor, 0))
6268		  || (flag_force_addr
6269		      && GET_CODE (XEXP (constructor, 0)) != REG)))
6270	    constructor = change_address (constructor, VOIDmode,
6271					  XEXP (constructor, 0));
6272	  return constructor;
6273	}
6274
6275      else
6276	{
6277	  /* Handle calls that pass values in multiple non-contiguous
6278	     locations.  The Irix 6 ABI has examples of this.  */
6279	  if (target == 0 || ! safe_from_p (target, exp, 1)
6280	      || GET_CODE (target) == PARALLEL)
6281	    {
6282	      if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6283		target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6284	      else
6285		target = assign_temp (type, 0, 1, 1);
6286	    }
6287
6288	  if (TREE_READONLY (exp))
6289	    {
6290	      if (GET_CODE (target) == MEM)
6291		target = copy_rtx (target);
6292
6293	      RTX_UNCHANGING_P (target) = 1;
6294	    }
6295
6296	  store_constructor (exp, target, 0);
6297	  return target;
6298	}
6299
6300    case INDIRECT_REF:
6301      {
6302	tree exp1 = TREE_OPERAND (exp, 0);
6303	tree exp2;
6304	tree index;
6305 	tree string = string_constant (exp1, &index);
6306 	int i;
6307
6308	/* Try to optimize reads from const strings.  */
6309 	if (string
6310 	    && TREE_CODE (string) == STRING_CST
6311 	    && TREE_CODE (index) == INTEGER_CST
6312 	    && !TREE_INT_CST_HIGH (index)
6313 	    && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6314 	    && GET_MODE_CLASS (mode) == MODE_INT
6315 	    && GET_MODE_SIZE (mode) == 1
6316	    && modifier != EXPAND_MEMORY_USE_WO)
6317 	  return GEN_INT (TREE_STRING_POINTER (string)[i]);
6318
6319	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6320	op0 = memory_address (mode, op0);
6321
6322	if (current_function_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6323	  {
6324	    enum memory_use_mode memory_usage;
6325	    memory_usage = get_memory_usage_from_modifier (modifier);
6326
6327            if (memory_usage != MEMORY_USE_DONT)
6328	      {
6329		in_check_memory_usage = 1;
6330		emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6331				   op0, Pmode,
6332				   GEN_INT (int_size_in_bytes (type)),
6333				   TYPE_MODE (sizetype),
6334				   GEN_INT (memory_usage),
6335				   TYPE_MODE (integer_type_node));
6336		in_check_memory_usage = 0;
6337	      }
6338	  }
6339
6340	temp = gen_rtx_MEM (mode, op0);
6341
6342	if (AGGREGATE_TYPE_P (TREE_TYPE (exp))
6343	    || (TREE_CODE (exp1) == ADDR_EXPR
6344		&& (exp2 = TREE_OPERAND (exp1, 0))
6345		&& AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6346	  MEM_SET_IN_STRUCT_P (temp, 1);
6347
6348	MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6349	MEM_ALIAS_SET (temp) = get_alias_set (exp);
6350
6351	/* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6352	   here, because, in C and C++, the fact that a location is accessed
6353	   through a pointer to const does not mean that the value there can
6354	   never change.  Languages where it can never change should
6355	   also set TREE_STATIC.  */
6356	RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6357	return temp;
6358      }
6359
6360    case ARRAY_REF:
6361      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6362	abort ();
6363
6364      {
6365	tree array = TREE_OPERAND (exp, 0);
6366	tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6367	tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6368	tree index = TREE_OPERAND (exp, 1);
6369	tree index_type = TREE_TYPE (index);
6370	HOST_WIDE_INT i;
6371
6372	/* Optimize the special-case of a zero lower bound.
6373
6374	   We convert the low_bound to sizetype to avoid some problems
6375	   with constant folding.  (E.g. suppose the lower bound is 1,
6376	   and its mode is QI.  Without the conversion,  (ARRAY
6377	   +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6378	   +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)
6379
6380	   But sizetype isn't quite right either (especially if
6381	   the lowbound is negative).  FIXME */
6382
6383	if (! integer_zerop (low_bound))
6384	  index = fold (build (MINUS_EXPR, index_type, index,
6385			       convert (sizetype, low_bound)));
6386
6387	/* Fold an expression like: "foo"[2].
6388	   This is not done in fold so it won't happen inside &.
6389	   Don't fold if this is for wide characters since it's too
6390	   difficult to do correctly and this is a very rare case.  */
6391
6392	if (TREE_CODE (array) == STRING_CST
6393	    && TREE_CODE (index) == INTEGER_CST
6394	    && !TREE_INT_CST_HIGH (index)
6395	    && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6396	    && GET_MODE_CLASS (mode) == MODE_INT
6397	    && GET_MODE_SIZE (mode) == 1)
6398	  return GEN_INT (TREE_STRING_POINTER (array)[i]);
6399
6400	/* If this is a constant index into a constant array,
6401	   just get the value from the array.  Handle both the cases when
6402	   we have an explicit constructor and when our operand is a variable
6403	   that was declared const.  */
6404
6405	if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6406	  {
6407	    if (TREE_CODE (index) == INTEGER_CST
6408		&& TREE_INT_CST_HIGH (index) == 0)
6409	      {
6410		tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6411
6412		i = TREE_INT_CST_LOW (index);
6413		while (elem && i--)
6414		  elem = TREE_CHAIN (elem);
6415		if (elem)
6416		  return expand_expr (fold (TREE_VALUE (elem)), target,
6417				      tmode, ro_modifier);
6418	      }
6419	  }
6420
6421	else if (optimize >= 1
6422		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6423		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6424		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6425	  {
6426	    if (TREE_CODE (index) == INTEGER_CST)
6427	      {
6428		tree init = DECL_INITIAL (array);
6429
6430		i = TREE_INT_CST_LOW (index);
6431		if (TREE_CODE (init) == CONSTRUCTOR)
6432		  {
6433		    tree elem = CONSTRUCTOR_ELTS (init);
6434
6435		    while (elem
6436			   && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6437		      elem = TREE_CHAIN (elem);
6438		    if (elem)
6439		      return expand_expr (fold (TREE_VALUE (elem)), target,
6440					  tmode, ro_modifier);
6441		  }
6442		else if (TREE_CODE (init) == STRING_CST
6443			 && TREE_INT_CST_HIGH (index) == 0
6444			 && (TREE_INT_CST_LOW (index)
6445			     < TREE_STRING_LENGTH (init)))
6446		  return (GEN_INT
6447			  (TREE_STRING_POINTER
6448			   (init)[TREE_INT_CST_LOW (index)]));
6449	      }
6450	  }
6451      }
6452
6453      /* ... fall through ... */
6454
6455    case COMPONENT_REF:
6456    case BIT_FIELD_REF:
6457      /* If the operand is a CONSTRUCTOR, we can just extract the
6458	 appropriate field if it is present.  Don't do this if we have
6459	 already written the data since we want to refer to that copy
6460	 and varasm.c assumes that's what we'll do.  */
6461      if (code != ARRAY_REF
6462	  && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6463	  && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6464	{
6465	  tree elt;
6466
6467	  for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6468	       elt = TREE_CHAIN (elt))
6469	    if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6470		/* We can normally use the value of the field in the
6471		   CONSTRUCTOR.  However, if this is a bitfield in
6472		   an integral mode that we can fit in a HOST_WIDE_INT,
6473		   we must mask only the number of bits in the bitfield,
6474		   since this is done implicitly by the constructor.  If
6475		   the bitfield does not meet either of those conditions,
6476		   we can't do this optimization.  */
6477		&& (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6478		    || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6479			 == MODE_INT)
6480			&& (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6481			    <= HOST_BITS_PER_WIDE_INT))))
6482	      {
6483		op0 =  expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6484		if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6485		  {
6486		    int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6487
6488		    if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6489		      {
6490			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6491			op0 = expand_and (op0, op1, target);
6492		      }
6493		    else
6494		      {
6495			enum machine_mode imode
6496			  = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6497			tree count
6498			  = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6499					 0);
6500
6501			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6502					    target, 0);
6503			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6504					    target, 0);
6505		      }
6506		  }
6507
6508		return op0;
6509	      }
6510	}
6511
6512      {
6513	enum machine_mode mode1;
6514	int bitsize;
6515	int bitpos;
6516	tree offset;
6517	int volatilep = 0;
6518	int alignment;
6519	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6520					&mode1, &unsignedp, &volatilep,
6521					&alignment);
6522
6523	/* If we got back the original object, something is wrong.  Perhaps
6524	   we are evaluating an expression too early.  In any event, don't
6525	   infinitely recurse.  */
6526	if (tem == exp)
6527	  abort ();
6528
6529	/* If TEM's type is a union of variable size, pass TARGET to the inner
6530	   computation, since it will need a temporary and TARGET is known
6531	   to have to do.  This occurs in unchecked conversion in Ada.  */
6532
6533	op0 = expand_expr (tem,
6534			   (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6535			    && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6536				!= INTEGER_CST)
6537			    ? target : NULL_RTX),
6538			   VOIDmode,
6539			   modifier == EXPAND_INITIALIZER
6540			   ? modifier : EXPAND_NORMAL);
6541
6542	/* If this is a constant, put it into a register if it is a
6543	   legitimate constant and memory if it isn't.  */
6544	if (CONSTANT_P (op0))
6545	  {
6546	    enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6547	    if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6548	      op0 = force_reg (mode, op0);
6549	    else
6550	      op0 = validize_mem (force_const_mem (mode, op0));
6551	  }
6552
6553	if (offset != 0)
6554	  {
6555	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6556
6557	    if (GET_CODE (op0) != MEM)
6558	      abort ();
6559
6560	    if (GET_MODE (offset_rtx) != ptr_mode)
6561	      {
6562#ifdef POINTERS_EXTEND_UNSIGNED
6563		offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6564#else
6565		offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6566#endif
6567	      }
6568
6569	    /* A constant address in TO_RTX can have VOIDmode, we must not try
6570	       to call force_reg for that case.  Avoid that case.  */
6571	    if (GET_CODE (op0) == MEM
6572		&& GET_MODE (op0) == BLKmode
6573		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
6574		&& bitsize
6575		&& (bitpos % bitsize) == 0
6576		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6577		&& (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6578	      {
6579		rtx temp = change_address (op0, mode1,
6580					   plus_constant (XEXP (op0, 0),
6581							  (bitpos /
6582							   BITS_PER_UNIT)));
6583		if (GET_CODE (XEXP (temp, 0)) == REG)
6584		  op0 = temp;
6585		else
6586		  op0 = change_address (op0, mode1,
6587					force_reg (GET_MODE (XEXP (temp, 0)),
6588						   XEXP (temp, 0)));
6589		bitpos = 0;
6590	      }
6591
6592
6593	    op0 = change_address (op0, VOIDmode,
6594				  gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6595						force_reg (ptr_mode, offset_rtx)));
6596	  }
6597
6598	/* Don't forget about volatility even if this is a bitfield.  */
6599	if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6600	  {
6601	    op0 = copy_rtx (op0);
6602	    MEM_VOLATILE_P (op0) = 1;
6603	  }
6604
6605	/* Check the access.  */
6606	if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6607          {
6608	    enum memory_use_mode memory_usage;
6609	    memory_usage = get_memory_usage_from_modifier (modifier);
6610
6611	    if (memory_usage != MEMORY_USE_DONT)
6612	      {
6613		rtx to;
6614		int size;
6615
6616		to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6617		size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6618
6619        	/* Check the access right of the pointer.  */
6620		if (size > BITS_PER_UNIT)
6621		  emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6622				     to, Pmode,
6623				     GEN_INT (size / BITS_PER_UNIT),
6624				     TYPE_MODE (sizetype),
6625				     GEN_INT (memory_usage),
6626				     TYPE_MODE (integer_type_node));
6627	      }
6628	  }
6629
6630	/* In cases where an aligned union has an unaligned object
6631	   as a field, we might be extracting a BLKmode value from
6632	   an integer-mode (e.g., SImode) object.  Handle this case
6633	   by doing the extract into an object as wide as the field
6634	   (which we know to be the width of a basic mode), then
6635	   storing into memory, and changing the mode to BLKmode.
6636	   If we ultimately want the address (EXPAND_CONST_ADDRESS or
6637	   EXPAND_INITIALIZER), then we must not copy to a temporary.  */
6638	if (mode1 == VOIDmode
6639	    || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6640	    || (modifier != EXPAND_CONST_ADDRESS
6641		&& modifier != EXPAND_INITIALIZER
6642		&& ((mode1 != BLKmode && ! direct_load[(int) mode1]
6643		     && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6644		     && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6645		    /* If the field isn't aligned enough to fetch as a memref,
6646		       fetch it as a bit field.  */
6647		    || (SLOW_UNALIGNED_ACCESS
6648			&& ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6649			    || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6650	  {
6651	    enum machine_mode ext_mode = mode;
6652
6653	    if (ext_mode == BLKmode)
6654	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6655
6656	    if (ext_mode == BLKmode)
6657	      {
6658		/* In this case, BITPOS must start at a byte boundary and
6659		   TARGET, if specified, must be a MEM.  */
6660		if (GET_CODE (op0) != MEM
6661		    || (target != 0 && GET_CODE (target) != MEM)
6662		    || bitpos % BITS_PER_UNIT != 0)
6663		  abort ();
6664
6665		op0 = change_address (op0, VOIDmode,
6666				      plus_constant (XEXP (op0, 0),
6667						     bitpos / BITS_PER_UNIT));
6668		if (target == 0)
6669		  target = assign_temp (type, 0, 1, 1);
6670
6671		emit_block_move (target, op0,
6672				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6673					  / BITS_PER_UNIT),
6674				 1);
6675
6676		return target;
6677	      }
6678
6679	    op0 = validize_mem (op0);
6680
6681	    if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6682	      mark_reg_pointer (XEXP (op0, 0), alignment);
6683
6684	    op0 = extract_bit_field (op0, bitsize, bitpos,
6685				     unsignedp, target, ext_mode, ext_mode,
6686				     alignment,
6687				     int_size_in_bytes (TREE_TYPE (tem)));
6688
6689	    /* If the result is a record type and BITSIZE is narrower than
6690	       the mode of OP0, an integral mode, and this is a big endian
6691	       machine, we must put the field into the high-order bits.  */
6692	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6693		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6694		&& bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6695	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6696				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6697					    - bitsize),
6698				  op0, 1);
6699
6700	    if (mode == BLKmode)
6701	      {
6702		rtx new = assign_stack_temp (ext_mode,
6703					     bitsize / BITS_PER_UNIT, 0);
6704
6705		emit_move_insn (new, op0);
6706		op0 = copy_rtx (new);
6707		PUT_MODE (op0, BLKmode);
6708		MEM_SET_IN_STRUCT_P (op0, 1);
6709	      }
6710
6711	    return op0;
6712	  }
6713
6714	/* If the result is BLKmode, use that to access the object
6715	   now as well.  */
6716	if (mode == BLKmode)
6717	  mode1 = BLKmode;
6718
6719	/* Get a reference to just this component.  */
6720	if (modifier == EXPAND_CONST_ADDRESS
6721	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6722	  op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6723						   (bitpos / BITS_PER_UNIT)));
6724	else
6725	  op0 = change_address (op0, mode1,
6726				plus_constant (XEXP (op0, 0),
6727					       (bitpos / BITS_PER_UNIT)));
6728
6729	if (GET_CODE (op0) == MEM)
6730	  MEM_ALIAS_SET (op0) = get_alias_set (exp);
6731
6732	if (GET_CODE (XEXP (op0, 0)) == REG)
6733	  mark_reg_pointer (XEXP (op0, 0), alignment);
6734
6735	MEM_SET_IN_STRUCT_P (op0, 1);
6736	MEM_VOLATILE_P (op0) |= volatilep;
6737	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6738	    || modifier == EXPAND_CONST_ADDRESS
6739	    || modifier == EXPAND_INITIALIZER)
6740	  return op0;
6741	else if (target == 0)
6742	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6743
6744	convert_move (target, op0, unsignedp);
6745	return target;
6746      }
6747
6748      /* Intended for a reference to a buffer of a file-object in Pascal.
6749	 But it's not certain that a special tree code will really be
6750	 necessary for these.  INDIRECT_REF might work for them.  */
6751    case BUFFER_REF:
6752      abort ();
6753
6754    case IN_EXPR:
6755      {
6756	/* Pascal set IN expression.
6757
6758	   Algorithm:
6759	       rlo       = set_low - (set_low%bits_per_word);
6760	       the_word  = set [ (index - rlo)/bits_per_word ];
6761	       bit_index = index % bits_per_word;
6762	       bitmask   = 1 << bit_index;
6763	       return !!(the_word & bitmask);  */
6764
6765	tree set = TREE_OPERAND (exp, 0);
6766	tree index = TREE_OPERAND (exp, 1);
6767	int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6768	tree set_type = TREE_TYPE (set);
6769	tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6770	tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6771	rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6772	rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6773	rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6774	rtx setval = expand_expr (set, 0, VOIDmode, 0);
6775	rtx setaddr = XEXP (setval, 0);
6776	enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6777	rtx rlow;
6778	rtx diff, quo, rem, addr, bit, result;
6779
6780	preexpand_calls (exp);
6781
6782	/* If domain is empty, answer is no.  Likewise if index is constant
6783	   and out of bounds.  */
6784	if (((TREE_CODE (set_high_bound) == INTEGER_CST
6785	     && TREE_CODE (set_low_bound) == INTEGER_CST
6786	     && tree_int_cst_lt (set_high_bound, set_low_bound))
6787	     || (TREE_CODE (index) == INTEGER_CST
6788		 && TREE_CODE (set_low_bound) == INTEGER_CST
6789		 && tree_int_cst_lt (index, set_low_bound))
6790	     || (TREE_CODE (set_high_bound) == INTEGER_CST
6791		 && TREE_CODE (index) == INTEGER_CST
6792		 && tree_int_cst_lt (set_high_bound, index))))
6793	  return const0_rtx;
6794
6795	if (target == 0)
6796	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6797
6798	/* If we get here, we have to generate the code for both cases
6799	   (in range and out of range).  */
6800
6801	op0 = gen_label_rtx ();
6802	op1 = gen_label_rtx ();
6803
6804	if (! (GET_CODE (index_val) == CONST_INT
6805	       && GET_CODE (lo_r) == CONST_INT))
6806	  {
6807	    emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6808				     GET_MODE (index_val), iunsignedp, 0, op1);
6809	  }
6810
6811	if (! (GET_CODE (index_val) == CONST_INT
6812	       && GET_CODE (hi_r) == CONST_INT))
6813	  {
6814	    emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6815				     GET_MODE (index_val), iunsignedp, 0, op1);
6816	  }
6817
6818	/* Calculate the element number of bit zero in the first word
6819	   of the set.  */
6820	if (GET_CODE (lo_r) == CONST_INT)
6821	  rlow = GEN_INT (INTVAL (lo_r)
6822			  & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6823	else
6824	  rlow = expand_binop (index_mode, and_optab, lo_r,
6825			       GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6826			       NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6827
6828	diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6829			     NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6830
6831	quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6832			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6833	rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6834			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6835
6836	addr = memory_address (byte_mode,
6837			       expand_binop (index_mode, add_optab, diff,
6838					     setaddr, NULL_RTX, iunsignedp,
6839					     OPTAB_LIB_WIDEN));
6840
6841	/* Extract the bit we want to examine */
6842	bit = expand_shift (RSHIFT_EXPR, byte_mode,
6843			    gen_rtx_MEM (byte_mode, addr),
6844			    make_tree (TREE_TYPE (index), rem),
6845			    NULL_RTX, 1);
6846	result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6847			       GET_MODE (target) == byte_mode ? target : 0,
6848			       1, OPTAB_LIB_WIDEN);
6849
6850	if (result != target)
6851	  convert_move (target, result, 1);
6852
6853	/* Output the code to handle the out-of-range case.  */
6854	emit_jump (op0);
6855	emit_label (op1);
6856	emit_move_insn (target, const0_rtx);
6857	emit_label (op0);
6858	return target;
6859      }
6860
6861    case WITH_CLEANUP_EXPR:
6862      if (RTL_EXPR_RTL (exp) == 0)
6863	{
6864	  RTL_EXPR_RTL (exp)
6865	    = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6866	  expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6867
6868	  /* That's it for this cleanup.  */
6869	  TREE_OPERAND (exp, 2) = 0;
6870	}
6871      return RTL_EXPR_RTL (exp);
6872
6873    case CLEANUP_POINT_EXPR:
6874      {
6875	/* Start a new binding layer that will keep track of all cleanup
6876	   actions to be performed.  */
6877	expand_start_bindings (0);
6878
6879	target_temp_slot_level = temp_slot_level;
6880
6881	op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6882	/* If we're going to use this value, load it up now.  */
6883	if (! ignore)
6884	  op0 = force_not_mem (op0);
6885	preserve_temp_slots (op0);
6886	expand_end_bindings (NULL_TREE, 0, 0);
6887      }
6888      return op0;
6889
6890    case CALL_EXPR:
6891      /* Check for a built-in function.  */
6892      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6893	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6894	      == FUNCTION_DECL)
6895	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6896	return expand_builtin (exp, target, subtarget, tmode, ignore);
6897
6898      /* If this call was expanded already by preexpand_calls,
6899	 just return the result we got.  */
6900      if (CALL_EXPR_RTL (exp) != 0)
6901	return CALL_EXPR_RTL (exp);
6902
6903      return expand_call (exp, target, ignore);
6904
6905    case NON_LVALUE_EXPR:
6906    case NOP_EXPR:
6907    case CONVERT_EXPR:
6908    case REFERENCE_EXPR:
6909      if (TREE_CODE (type) == UNION_TYPE)
6910	{
6911	  tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6912	  if (target == 0)
6913	    {
6914	      if (mode != BLKmode)
6915		target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6916	      else
6917		target = assign_temp (type, 0, 1, 1);
6918	    }
6919
6920	  if (GET_CODE (target) == MEM)
6921	    /* Store data into beginning of memory target.  */
6922	    store_expr (TREE_OPERAND (exp, 0),
6923			change_address (target, TYPE_MODE (valtype), 0), 0);
6924
6925	  else if (GET_CODE (target) == REG)
6926	    /* Store this field into a union of the proper type.  */
6927	    store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6928			 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6929			 VOIDmode, 0, 1,
6930			 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6931			 0);
6932	  else
6933	    abort ();
6934
6935	  /* Return the entire union.  */
6936	  return target;
6937	}
6938
6939      if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6940	{
6941	  op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6942			     ro_modifier);
6943
6944	  /* If the signedness of the conversion differs and OP0 is
6945	     a promoted SUBREG, clear that indication since we now
6946	     have to do the proper extension.  */
6947	  if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6948	      && GET_CODE (op0) == SUBREG)
6949	    SUBREG_PROMOTED_VAR_P (op0) = 0;
6950
6951	  return op0;
6952	}
6953
6954      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6955      if (GET_MODE (op0) == mode)
6956	return op0;
6957
6958      /* If OP0 is a constant, just convert it into the proper mode.  */
6959      if (CONSTANT_P (op0))
6960	return
6961	  convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6962			 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6963
6964      if (modifier == EXPAND_INITIALIZER)
6965	return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6966
6967      if (target == 0)
6968	return
6969	  convert_to_mode (mode, op0,
6970			   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6971      else
6972	convert_move (target, op0,
6973		      TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6974      return target;
6975
6976    case PLUS_EXPR:
6977      /* We come here from MINUS_EXPR when the second operand is a
6978         constant.  */
6979    plus_expr:
6980      this_optab = add_optab;
6981
6982      /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6983	 something else, make sure we add the register to the constant and
6984	 then to the other thing.  This case can occur during strength
6985	 reduction and doing it this way will produce better code if the
6986	 frame pointer or argument pointer is eliminated.
6987
6988	 fold-const.c will ensure that the constant is always in the inner
6989	 PLUS_EXPR, so the only case we need to do anything about is if
6990	 sp, ap, or fp is our second argument, in which case we must swap
6991	 the innermost first argument and our second argument.  */
6992
6993      if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6994	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6995	  && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6996	  && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6997	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6998	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6999	{
7000	  tree t = TREE_OPERAND (exp, 1);
7001
7002	  TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7003	  TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7004	}
7005
7006      /* If the result is to be ptr_mode and we are adding an integer to
7007	 something, we might be forming a constant.  So try to use
7008	 plus_constant.  If it produces a sum and we can't accept it,
7009	 use force_operand.  This allows P = &ARR[const] to generate
7010	 efficient code on machines where a SYMBOL_REF is not a valid
7011	 address.
7012
7013	 If this is an EXPAND_SUM call, always return the sum.  */
7014      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7015	  || mode == ptr_mode)
7016	{
7017	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7018	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7019	      && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7020	    {
7021	      op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7022				 EXPAND_SUM);
7023	      op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
7024	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7025		op1 = force_operand (op1, target);
7026	      return op1;
7027	    }
7028
7029	  else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7030		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7031		   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7032	    {
7033	      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7034				 EXPAND_SUM);
7035	      if (! CONSTANT_P (op0))
7036		{
7037		  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7038				     VOIDmode, modifier);
7039		  /* Don't go to both_summands if modifier
7040		     says it's not right to return a PLUS.  */
7041		  if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7042		    goto binop2;
7043		  goto both_summands;
7044		}
7045	      op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
7046	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7047		op0 = force_operand (op0, target);
7048	      return op0;
7049	    }
7050	}
7051
7052      /* No sense saving up arithmetic to be done
7053	 if it's all in the wrong mode to form part of an address.
7054	 And force_operand won't know whether to sign-extend or
7055	 zero-extend.  */
7056      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7057	  || mode != ptr_mode)
7058	goto binop;
7059
7060      preexpand_calls (exp);
7061      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7062	subtarget = 0;
7063
7064      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7065      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7066
7067    both_summands:
7068      /* Make sure any term that's a sum with a constant comes last.  */
7069      if (GET_CODE (op0) == PLUS
7070	  && CONSTANT_P (XEXP (op0, 1)))
7071	{
7072	  temp = op0;
7073	  op0 = op1;
7074	  op1 = temp;
7075	}
7076      /* If adding to a sum including a constant,
7077	 associate it to put the constant outside.  */
7078      if (GET_CODE (op1) == PLUS
7079	  && CONSTANT_P (XEXP (op1, 1)))
7080	{
7081	  rtx constant_term = const0_rtx;
7082
7083	  temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7084	  if (temp != 0)
7085	    op0 = temp;
7086	  /* Ensure that MULT comes first if there is one.  */
7087	  else if (GET_CODE (op0) == MULT)
7088	    op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7089	  else
7090	    op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7091
7092	  /* Let's also eliminate constants from op0 if possible.  */
7093	  op0 = eliminate_constant_term (op0, &constant_term);
7094
7095	  /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7096	     their sum should be a constant.  Form it into OP1, since the
7097	     result we want will then be OP0 + OP1.  */
7098
7099	  temp = simplify_binary_operation (PLUS, mode, constant_term,
7100					    XEXP (op1, 1));
7101	  if (temp != 0)
7102	    op1 = temp;
7103	  else
7104	    op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7105	}
7106
7107      /* Put a constant term last and put a multiplication first.  */
7108      if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7109	temp = op1, op1 = op0, op0 = temp;
7110
7111      temp = simplify_binary_operation (PLUS, mode, op0, op1);
7112      return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7113
7114    case MINUS_EXPR:
7115      /* For initializers, we are allowed to return a MINUS of two
7116	 symbolic constants.  Here we handle all cases when both operands
7117	 are constant.  */
7118      /* Handle difference of two symbolic constants,
7119	 for the sake of an initializer.  */
7120      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7121	  && really_constant_p (TREE_OPERAND (exp, 0))
7122	  && really_constant_p (TREE_OPERAND (exp, 1)))
7123	{
7124	  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7125				 VOIDmode, ro_modifier);
7126	  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7127				 VOIDmode, ro_modifier);
7128
7129	  /* If the last operand is a CONST_INT, use plus_constant of
7130	     the negated constant.  Else make the MINUS.  */
7131	  if (GET_CODE (op1) == CONST_INT)
7132	    return plus_constant (op0, - INTVAL (op1));
7133	  else
7134	    return gen_rtx_MINUS (mode, op0, op1);
7135	}
7136      /* Convert A - const to A + (-const).  */
7137      if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7138	{
7139	  tree negated = fold (build1 (NEGATE_EXPR, type,
7140				       TREE_OPERAND (exp, 1)));
7141
7142	  /* Deal with the case where we can't negate the constant
7143	     in TYPE.  */
7144	  if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7145	    {
7146	      tree newtype = signed_type (type);
7147	      tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
7148	      tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
7149	      tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
7150
7151	      if (! TREE_OVERFLOW (newneg))
7152		return expand_expr (convert (type,
7153					     build (PLUS_EXPR, newtype,
7154						    newop0, newneg)),
7155				    target, tmode, ro_modifier);
7156	    }
7157	  else
7158	    {
7159	      exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7160	      goto plus_expr;
7161	    }
7162	}
7163      this_optab = sub_optab;
7164      goto binop;
7165
7166    case MULT_EXPR:
7167      preexpand_calls (exp);
7168      /* If first operand is constant, swap them.
7169	 Thus the following special case checks need only
7170	 check the second operand.  */
7171      if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7172	{
7173	  register tree t1 = TREE_OPERAND (exp, 0);
7174	  TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7175	  TREE_OPERAND (exp, 1) = t1;
7176	}
7177
7178      /* Attempt to return something suitable for generating an
7179	 indexed address, for machines that support that.  */
7180
7181      if (modifier == EXPAND_SUM && mode == ptr_mode
7182	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7183	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7184	{
7185	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7186			     EXPAND_SUM);
7187
7188	  /* Apply distributive law if OP0 is x+c.  */
7189	  if (GET_CODE (op0) == PLUS
7190	      && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7191	    return gen_rtx_PLUS (mode,
7192				 gen_rtx_MULT (mode, XEXP (op0, 0),
7193					       GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7194			    GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7195				     * INTVAL (XEXP (op0, 1))));
7196
7197	  if (GET_CODE (op0) != REG)
7198	    op0 = force_operand (op0, NULL_RTX);
7199	  if (GET_CODE (op0) != REG)
7200	    op0 = copy_to_mode_reg (mode, op0);
7201
7202	  return gen_rtx_MULT (mode, op0,
7203			       GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7204	}
7205
7206      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7207	subtarget = 0;
7208
7209      /* Check for multiplying things that have been extended
7210	 from a narrower type.  If this machine supports multiplying
7211	 in that narrower type with a result in the desired type,
7212	 do it that way, and avoid the explicit type-conversion.  */
7213      if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7214	  && TREE_CODE (type) == INTEGER_TYPE
7215	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7216	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7217	  && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7218	       && int_fits_type_p (TREE_OPERAND (exp, 1),
7219				   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7220	       /* Don't use a widening multiply if a shift will do.  */
7221	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7222		    > HOST_BITS_PER_WIDE_INT)
7223		   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7224	      ||
7225	      (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7226	       && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7227		   ==
7228		   TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7229	       /* If both operands are extended, they must either both
7230		  be zero-extended or both be sign-extended.  */
7231	       && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7232		   ==
7233		   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7234	{
7235	  enum machine_mode innermode
7236	    = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7237	  optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7238			? smul_widen_optab : umul_widen_optab);
7239	  this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7240			? umul_widen_optab : smul_widen_optab);
7241	  if (mode == GET_MODE_WIDER_MODE (innermode))
7242	    {
7243	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7244		{
7245		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7246				     NULL_RTX, VOIDmode, 0);
7247		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7248		    op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7249				       VOIDmode, 0);
7250		  else
7251		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7252				       NULL_RTX, VOIDmode, 0);
7253		  goto binop2;
7254		}
7255	      else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7256		       && innermode == word_mode)
7257		{
7258		  rtx htem;
7259		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7260				     NULL_RTX, VOIDmode, 0);
7261		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7262		    op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7263				       VOIDmode, 0);
7264		  else
7265		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7266				       NULL_RTX, VOIDmode, 0);
7267		  temp = expand_binop (mode, other_optab, op0, op1, target,
7268				       unsignedp, OPTAB_LIB_WIDEN);
7269		  htem = expand_mult_highpart_adjust (innermode,
7270						      gen_highpart (innermode, temp),
7271						      op0, op1,
7272						      gen_highpart (innermode, temp),
7273						      unsignedp);
7274		  emit_move_insn (gen_highpart (innermode, temp), htem);
7275		  return temp;
7276		}
7277	    }
7278	}
7279      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7280      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7281      return expand_mult (mode, op0, op1, target, unsignedp);
7282
7283    case TRUNC_DIV_EXPR:
7284    case FLOOR_DIV_EXPR:
7285    case CEIL_DIV_EXPR:
7286    case ROUND_DIV_EXPR:
7287    case EXACT_DIV_EXPR:
7288      preexpand_calls (exp);
7289      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7290	subtarget = 0;
7291      /* Possible optimization: compute the dividend with EXPAND_SUM
7292	 then if the divisor is constant can optimize the case
7293	 where some terms of the dividend have coeffs divisible by it.  */
7294      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7295      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7296      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7297
7298    case RDIV_EXPR:
7299      this_optab = flodiv_optab;
7300      goto binop;
7301
7302    case TRUNC_MOD_EXPR:
7303    case FLOOR_MOD_EXPR:
7304    case CEIL_MOD_EXPR:
7305    case ROUND_MOD_EXPR:
7306      preexpand_calls (exp);
7307      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7308	subtarget = 0;
7309      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7310      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7311      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7312
7313    case FIX_ROUND_EXPR:
7314    case FIX_FLOOR_EXPR:
7315    case FIX_CEIL_EXPR:
7316      abort ();			/* Not used for C.  */
7317
7318    case FIX_TRUNC_EXPR:
7319      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7320      if (target == 0)
7321	target = gen_reg_rtx (mode);
7322      expand_fix (target, op0, unsignedp);
7323      return target;
7324
7325    case FLOAT_EXPR:
7326      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7327      if (target == 0)
7328	target = gen_reg_rtx (mode);
7329      /* expand_float can't figure out what to do if FROM has VOIDmode.
7330	 So give it the correct mode.  With -O, cse will optimize this.  */
7331      if (GET_MODE (op0) == VOIDmode)
7332	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7333				op0);
7334      expand_float (target, op0,
7335		    TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7336      return target;
7337
7338    case NEGATE_EXPR:
7339      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7340      temp = expand_unop (mode, neg_optab, op0, target, 0);
7341      if (temp == 0)
7342	abort ();
7343      return temp;
7344
7345    case ABS_EXPR:
7346      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7347
7348      /* Handle complex values specially.  */
7349      if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7350	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7351	return expand_complex_abs (mode, op0, target, unsignedp);
7352
7353      /* Unsigned abs is simply the operand.  Testing here means we don't
7354	 risk generating incorrect code below.  */
7355      if (TREE_UNSIGNED (type))
7356	return op0;
7357
7358      return expand_abs (mode, op0, target,
7359			 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7360
7361    case MAX_EXPR:
7362    case MIN_EXPR:
7363      target = original_target;
7364      if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7365	  || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7366	  || GET_MODE (target) != mode
7367	  || (GET_CODE (target) == REG
7368	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
7369	target = gen_reg_rtx (mode);
7370      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7371      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7372
7373      /* First try to do it with a special MIN or MAX instruction.
7374	 If that does not win, use a conditional jump to select the proper
7375	 value.  */
7376      this_optab = (TREE_UNSIGNED (type)
7377		    ? (code == MIN_EXPR ? umin_optab : umax_optab)
7378		    : (code == MIN_EXPR ? smin_optab : smax_optab));
7379
7380      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7381			   OPTAB_WIDEN);
7382      if (temp != 0)
7383	return temp;
7384
7385      /* At this point, a MEM target is no longer useful; we will get better
7386	 code without it.  */
7387
7388      if (GET_CODE (target) == MEM)
7389	target = gen_reg_rtx (mode);
7390
7391      if (target != op0)
7392	emit_move_insn (target, op0);
7393
7394      op0 = gen_label_rtx ();
7395
7396      /* If this mode is an integer too wide to compare properly,
7397	 compare word by word.  Rely on cse to optimize constant cases.  */
7398      if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
7399	{
7400	  if (code == MAX_EXPR)
7401	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7402					  target, op1, NULL_RTX, op0);
7403	  else
7404	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7405					  op1, target, NULL_RTX, op0);
7406	  emit_move_insn (target, op1);
7407	}
7408      else
7409	{
7410	  if (code == MAX_EXPR)
7411	    temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7412		    ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7413		    : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
7414	  else
7415	    temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7416		    ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7417		    : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
7418	  if (temp == const0_rtx)
7419	    emit_move_insn (target, op1);
7420	  else if (temp != const_true_rtx)
7421	    {
7422	      if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7423		emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7424	      else
7425		abort ();
7426	      emit_move_insn (target, op1);
7427	    }
7428	}
7429      emit_label (op0);
7430      return target;
7431
7432    case BIT_NOT_EXPR:
7433      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7434      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7435      if (temp == 0)
7436	abort ();
7437      return temp;
7438
7439    case FFS_EXPR:
7440      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7441      temp = expand_unop (mode, ffs_optab, op0, target, 1);
7442      if (temp == 0)
7443	abort ();
7444      return temp;
7445
7446      /* ??? Can optimize bitwise operations with one arg constant.
7447	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7448	 and (a bitwise1 b) bitwise2 b (etc)
7449	 but that is probably not worth while.  */
7450
7451      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
7452	 boolean values when we want in all cases to compute both of them.  In
7453	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7454	 as actual zero-or-1 values and then bitwise anding.  In cases where
7455	 there cannot be any side effects, better code would be made by
7456	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7457	 how to recognize those cases.  */
7458
7459    case TRUTH_AND_EXPR:
7460    case BIT_AND_EXPR:
7461      this_optab = and_optab;
7462      goto binop;
7463
7464    case TRUTH_OR_EXPR:
7465    case BIT_IOR_EXPR:
7466      this_optab = ior_optab;
7467      goto binop;
7468
7469    case TRUTH_XOR_EXPR:
7470    case BIT_XOR_EXPR:
7471      this_optab = xor_optab;
7472      goto binop;
7473
7474    case LSHIFT_EXPR:
7475    case RSHIFT_EXPR:
7476    case LROTATE_EXPR:
7477    case RROTATE_EXPR:
7478      preexpand_calls (exp);
7479      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7480	subtarget = 0;
7481      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7482      return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7483			   unsignedp);
7484
7485      /* Could determine the answer when only additive constants differ.  Also,
7486	 the addition of one can be handled by changing the condition.  */
7487    case LT_EXPR:
7488    case LE_EXPR:
7489    case GT_EXPR:
7490    case GE_EXPR:
7491    case EQ_EXPR:
7492    case NE_EXPR:
7493      preexpand_calls (exp);
7494      temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7495      if (temp != 0)
7496	return temp;
7497
7498      /* For foo != 0, load foo, and if it is nonzero load 1 instead.  */
7499      if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7500	  && original_target
7501	  && GET_CODE (original_target) == REG
7502	  && (GET_MODE (original_target)
7503	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7504	{
7505	  temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7506			      VOIDmode, 0);
7507
7508	  if (temp != original_target)
7509	    temp = copy_to_reg (temp);
7510
7511	  op1 = gen_label_rtx ();
7512	  emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7513				   GET_MODE (temp), unsignedp, 0, op1);
7514	  emit_move_insn (temp, const1_rtx);
7515	  emit_label (op1);
7516	  return temp;
7517	}
7518
7519      /* If no set-flag instruction, must generate a conditional
7520	 store into a temporary variable.  Drop through
7521	 and handle this like && and ||.  */
7522
7523    case TRUTH_ANDIF_EXPR:
7524    case TRUTH_ORIF_EXPR:
7525      if (! ignore
7526	  && (target == 0 || ! safe_from_p (target, exp, 1)
7527	      /* Make sure we don't have a hard reg (such as function's return
7528		 value) live across basic blocks, if not optimizing.  */
7529	      || (!optimize && GET_CODE (target) == REG
7530		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7531	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7532
7533      if (target)
7534	emit_clr_insn (target);
7535
7536      op1 = gen_label_rtx ();
7537      jumpifnot (exp, op1);
7538
7539      if (target)
7540	emit_0_to_1_insn (target);
7541
7542      emit_label (op1);
7543      return ignore ? const0_rtx : target;
7544
7545    case TRUTH_NOT_EXPR:
7546      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7547      /* The parser is careful to generate TRUTH_NOT_EXPR
7548	 only with operands that are always zero or one.  */
7549      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7550			   target, 1, OPTAB_LIB_WIDEN);
7551      if (temp == 0)
7552	abort ();
7553      return temp;
7554
7555    case COMPOUND_EXPR:
7556      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7557      emit_queue ();
7558      return expand_expr (TREE_OPERAND (exp, 1),
7559			  (ignore ? const0_rtx : target),
7560			  VOIDmode, 0);
7561
7562    case COND_EXPR:
7563      /* If we would have a "singleton" (see below) were it not for a
7564	 conversion in each arm, bring that conversion back out.  */
7565      if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7566	  && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7567	  && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7568	      == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7569	{
7570	  tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7571	  tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7572
7573	  if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7574	       && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7575	      || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7576		  && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7577	      || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7578		  && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7579	      || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7580		  && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7581	    return expand_expr (build1 (NOP_EXPR, type,
7582					build (COND_EXPR, TREE_TYPE (true),
7583					       TREE_OPERAND (exp, 0),
7584					       true, false)),
7585				target, tmode, modifier);
7586	}
7587
7588      {
7589	/* Note that COND_EXPRs whose type is a structure or union
7590	   are required to be constructed to contain assignments of
7591	   a temporary variable, so that we can evaluate them here
7592	   for side effect only.  If type is void, we must do likewise.  */
7593
7594	/* If an arm of the branch requires a cleanup,
7595	   only that cleanup is performed.  */
7596
7597	tree singleton = 0;
7598	tree binary_op = 0, unary_op = 0;
7599
7600	/* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7601	   convert it to our mode, if necessary.  */
7602	if (integer_onep (TREE_OPERAND (exp, 1))
7603	    && integer_zerop (TREE_OPERAND (exp, 2))
7604	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7605	  {
7606	    if (ignore)
7607	      {
7608		expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7609			     ro_modifier);
7610		return const0_rtx;
7611	      }
7612
7613	    op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7614	    if (GET_MODE (op0) == mode)
7615	      return op0;
7616
7617	    if (target == 0)
7618	      target = gen_reg_rtx (mode);
7619	    convert_move (target, op0, unsignedp);
7620	    return target;
7621	  }
7622
7623	/* Check for X ? A + B : A.  If we have this, we can copy A to the
7624	   output and conditionally add B.  Similarly for unary operations.
7625	   Don't do this if X has side-effects because those side effects
7626	   might affect A or B and the "?" operation is a sequence point in
7627	   ANSI.  (operand_equal_p tests for side effects.)  */
7628
7629	if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7630	    && operand_equal_p (TREE_OPERAND (exp, 2),
7631				TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7632	  singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7633	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7634		 && operand_equal_p (TREE_OPERAND (exp, 1),
7635				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7636	  singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7637	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7638		 && operand_equal_p (TREE_OPERAND (exp, 2),
7639				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7640	  singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7641	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7642		 && operand_equal_p (TREE_OPERAND (exp, 1),
7643				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7644	  singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7645
7646	/* If we are not to produce a result, we have no target.  Otherwise,
7647	   if a target was specified use it; it will not be used as an
7648	   intermediate target unless it is safe.  If no target, use a
7649	   temporary.  */
7650
7651	if (ignore)
7652	  temp = 0;
7653	else if (original_target
7654		 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7655		     || (singleton && GET_CODE (original_target) == REG
7656			 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7657			 && original_target == var_rtx (singleton)))
7658		 && GET_MODE (original_target) == mode
7659#ifdef HAVE_conditional_move
7660		 && (! can_conditionally_move_p (mode)
7661		     || GET_CODE (original_target) == REG
7662		     || TREE_ADDRESSABLE (type))
7663#endif
7664		 && ! (GET_CODE (original_target) == MEM
7665		       && MEM_VOLATILE_P (original_target)))
7666	  temp = original_target;
7667	else if (TREE_ADDRESSABLE (type))
7668	  abort ();
7669	else
7670	  temp = assign_temp (type, 0, 0, 1);
7671
7672	/* If we had X ? A + C : A, with C a constant power of 2, and we can
7673	   do the test of X as a store-flag operation, do this as
7674	   A + ((X != 0) << log C).  Similarly for other simple binary
7675	   operators.  Only do for C == 1 if BRANCH_COST is low.  */
7676	if (temp && singleton && binary_op
7677	    && (TREE_CODE (binary_op) == PLUS_EXPR
7678		|| TREE_CODE (binary_op) == MINUS_EXPR
7679		|| TREE_CODE (binary_op) == BIT_IOR_EXPR
7680		|| TREE_CODE (binary_op) == BIT_XOR_EXPR)
7681	    && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7682		: integer_onep (TREE_OPERAND (binary_op, 1)))
7683	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7684	  {
7685	    rtx result;
7686	    optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7687			    : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7688			    : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7689			    : xor_optab);
7690
7691	    /* If we had X ? A : A + 1, do this as A + (X == 0).
7692
7693	       We have to invert the truth value here and then put it
7694	       back later if do_store_flag fails.  We cannot simply copy
7695	       TREE_OPERAND (exp, 0) to another variable and modify that
7696	       because invert_truthvalue can modify the tree pointed to
7697	       by its argument.  */
7698	    if (singleton == TREE_OPERAND (exp, 1))
7699	      TREE_OPERAND (exp, 0)
7700		= invert_truthvalue (TREE_OPERAND (exp, 0));
7701
7702	    result = do_store_flag (TREE_OPERAND (exp, 0),
7703				    (safe_from_p (temp, singleton, 1)
7704				     ? temp : NULL_RTX),
7705				    mode, BRANCH_COST <= 1);
7706
7707	    if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7708	      result = expand_shift (LSHIFT_EXPR, mode, result,
7709				     build_int_2 (tree_log2
7710						  (TREE_OPERAND
7711						   (binary_op, 1)),
7712						  0),
7713				     (safe_from_p (temp, singleton, 1)
7714				      ? temp : NULL_RTX), 0);
7715
7716	    if (result)
7717	      {
7718		op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7719		return expand_binop (mode, boptab, op1, result, temp,
7720				     unsignedp, OPTAB_LIB_WIDEN);
7721	      }
7722	    else if (singleton == TREE_OPERAND (exp, 1))
7723	      TREE_OPERAND (exp, 0)
7724		= invert_truthvalue (TREE_OPERAND (exp, 0));
7725	  }
7726
7727	do_pending_stack_adjust ();
7728	NO_DEFER_POP;
7729	op0 = gen_label_rtx ();
7730
7731	if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7732	  {
7733	    if (temp != 0)
7734	      {
7735		/* If the target conflicts with the other operand of the
7736		   binary op, we can't use it.  Also, we can't use the target
7737		   if it is a hard register, because evaluating the condition
7738		   might clobber it.  */
7739		if ((binary_op
7740		     && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7741		    || (GET_CODE (temp) == REG
7742			&& REGNO (temp) < FIRST_PSEUDO_REGISTER))
7743		  temp = gen_reg_rtx (mode);
7744		store_expr (singleton, temp, 0);
7745	      }
7746	    else
7747	      expand_expr (singleton,
7748			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7749	    if (singleton == TREE_OPERAND (exp, 1))
7750	      jumpif (TREE_OPERAND (exp, 0), op0);
7751	    else
7752	      jumpifnot (TREE_OPERAND (exp, 0), op0);
7753
7754	    start_cleanup_deferral ();
7755	    if (binary_op && temp == 0)
7756	      /* Just touch the other operand.  */
7757	      expand_expr (TREE_OPERAND (binary_op, 1),
7758			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7759	    else if (binary_op)
7760	      store_expr (build (TREE_CODE (binary_op), type,
7761				 make_tree (type, temp),
7762				 TREE_OPERAND (binary_op, 1)),
7763			  temp, 0);
7764	    else
7765	      store_expr (build1 (TREE_CODE (unary_op), type,
7766				  make_tree (type, temp)),
7767			  temp, 0);
7768	    op1 = op0;
7769	  }
7770	/* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7771	   comparison operator.  If we have one of these cases, set the
7772	   output to A, branch on A (cse will merge these two references),
7773	   then set the output to FOO.  */
7774	else if (temp
7775		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7776		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7777		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7778				     TREE_OPERAND (exp, 1), 0)
7779		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7780		     || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7781		 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7782	  {
7783	    if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7784	      temp = gen_reg_rtx (mode);
7785	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
7786	    jumpif (TREE_OPERAND (exp, 0), op0);
7787
7788	    start_cleanup_deferral ();
7789	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
7790	    op1 = op0;
7791	  }
7792	else if (temp
7793		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7794		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7795		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7796				     TREE_OPERAND (exp, 2), 0)
7797		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7798		     || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7799		 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7800	  {
7801	    if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7802	      temp = gen_reg_rtx (mode);
7803	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
7804	    jumpifnot (TREE_OPERAND (exp, 0), op0);
7805
7806	    start_cleanup_deferral ();
7807	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
7808	    op1 = op0;
7809	  }
7810	else
7811	  {
7812	    op1 = gen_label_rtx ();
7813	    jumpifnot (TREE_OPERAND (exp, 0), op0);
7814
7815	    start_cleanup_deferral ();
7816	    if (temp != 0)
7817	      store_expr (TREE_OPERAND (exp, 1), temp, 0);
7818	    else
7819	      expand_expr (TREE_OPERAND (exp, 1),
7820			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7821	    end_cleanup_deferral ();
7822	    emit_queue ();
7823	    emit_jump_insn (gen_jump (op1));
7824	    emit_barrier ();
7825	    emit_label (op0);
7826	    start_cleanup_deferral ();
7827	    if (temp != 0)
7828	      store_expr (TREE_OPERAND (exp, 2), temp, 0);
7829	    else
7830	      expand_expr (TREE_OPERAND (exp, 2),
7831			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7832	  }
7833
7834	end_cleanup_deferral ();
7835
7836	emit_queue ();
7837	emit_label (op1);
7838	OK_DEFER_POP;
7839
7840	return temp;
7841      }
7842
7843    case TARGET_EXPR:
7844      {
7845	/* Something needs to be initialized, but we didn't know
7846	   where that thing was when building the tree.  For example,
7847	   it could be the return value of a function, or a parameter
7848	   to a function which lays down in the stack, or a temporary
7849	   variable which must be passed by reference.
7850
7851	   We guarantee that the expression will either be constructed
7852	   or copied into our original target.  */
7853
7854	tree slot = TREE_OPERAND (exp, 0);
7855	tree cleanups = NULL_TREE;
7856	tree exp1;
7857
7858	if (TREE_CODE (slot) != VAR_DECL)
7859	  abort ();
7860
7861	if (! ignore)
7862	  target = original_target;
7863
7864	if (target == 0)
7865	  {
7866	    if (DECL_RTL (slot) != 0)
7867	      {
7868		target = DECL_RTL (slot);
7869		/* If we have already expanded the slot, so don't do
7870		   it again.  (mrs)  */
7871		if (TREE_OPERAND (exp, 1) == NULL_TREE)
7872		  return target;
7873	      }
7874	    else
7875	      {
7876		target = assign_temp (type, 2, 0, 1);
7877		/* All temp slots at this level must not conflict.  */
7878		preserve_temp_slots (target);
7879		DECL_RTL (slot) = target;
7880		if (TREE_ADDRESSABLE (slot))
7881		  {
7882		    TREE_ADDRESSABLE (slot) = 0;
7883		    mark_addressable (slot);
7884		  }
7885
7886		/* Since SLOT is not known to the called function
7887		   to belong to its stack frame, we must build an explicit
7888		   cleanup.  This case occurs when we must build up a reference
7889		   to pass the reference as an argument.  In this case,
7890		   it is very likely that such a reference need not be
7891		   built here.  */
7892
7893		if (TREE_OPERAND (exp, 2) == 0)
7894		  TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7895		cleanups = TREE_OPERAND (exp, 2);
7896	      }
7897	  }
7898	else
7899	  {
7900	    /* This case does occur, when expanding a parameter which
7901	       needs to be constructed on the stack.  The target
7902	       is the actual stack address that we want to initialize.
7903	       The function we call will perform the cleanup in this case.  */
7904
7905	    /* If we have already assigned it space, use that space,
7906	       not target that we were passed in, as our target
7907	       parameter is only a hint.  */
7908	    if (DECL_RTL (slot) != 0)
7909              {
7910                target = DECL_RTL (slot);
7911                /* If we have already expanded the slot, so don't do
7912                   it again.  (mrs)  */
7913                if (TREE_OPERAND (exp, 1) == NULL_TREE)
7914                  return target;
7915	      }
7916	    else
7917	      {
7918		DECL_RTL (slot) = target;
7919		/* If we must have an addressable slot, then make sure that
7920		   the RTL that we just stored in slot is OK.  */
7921		if (TREE_ADDRESSABLE (slot))
7922		  {
7923		    TREE_ADDRESSABLE (slot) = 0;
7924		    mark_addressable (slot);
7925		  }
7926	      }
7927	  }
7928
7929	exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7930	/* Mark it as expanded.  */
7931	TREE_OPERAND (exp, 1) = NULL_TREE;
7932
7933	TREE_USED (slot) = 1;
7934	store_expr (exp1, target, 0);
7935
7936	expand_decl_cleanup (NULL_TREE, cleanups);
7937
7938	return target;
7939      }
7940
7941    case INIT_EXPR:
7942      {
7943	tree lhs = TREE_OPERAND (exp, 0);
7944	tree rhs = TREE_OPERAND (exp, 1);
7945	tree noncopied_parts = 0;
7946	tree lhs_type = TREE_TYPE (lhs);
7947
7948	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7949	if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7950	  noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7951						  TYPE_NONCOPIED_PARTS (lhs_type));
7952	while (noncopied_parts != 0)
7953	  {
7954	    expand_assignment (TREE_VALUE (noncopied_parts),
7955			       TREE_PURPOSE (noncopied_parts), 0, 0);
7956	    noncopied_parts = TREE_CHAIN (noncopied_parts);
7957	  }
7958	return temp;
7959      }
7960
7961    case MODIFY_EXPR:
7962      {
7963	/* If lhs is complex, expand calls in rhs before computing it.
7964	   That's so we don't compute a pointer and save it over a call.
7965	   If lhs is simple, compute it first so we can give it as a
7966	   target if the rhs is just a call.  This avoids an extra temp and copy
7967	   and that prevents a partial-subsumption which makes bad code.
7968	   Actually we could treat component_ref's of vars like vars.  */
7969
7970	tree lhs = TREE_OPERAND (exp, 0);
7971	tree rhs = TREE_OPERAND (exp, 1);
7972	tree noncopied_parts = 0;
7973	tree lhs_type = TREE_TYPE (lhs);
7974
7975	temp = 0;
7976
7977	if (TREE_CODE (lhs) != VAR_DECL
7978	    && TREE_CODE (lhs) != RESULT_DECL
7979	    && TREE_CODE (lhs) != PARM_DECL
7980	    && ! (TREE_CODE (lhs) == INDIRECT_REF
7981		  && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7982	  preexpand_calls (exp);
7983
7984	/* Check for |= or &= of a bitfield of size one into another bitfield
7985	   of size 1.  In this case, (unless we need the result of the
7986	   assignment) we can do this more efficiently with a
7987	   test followed by an assignment, if necessary.
7988
7989	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
7990	   things change so we do, this code should be enhanced to
7991	   support it.  */
7992	if (ignore
7993	    && TREE_CODE (lhs) == COMPONENT_REF
7994	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
7995		|| TREE_CODE (rhs) == BIT_AND_EXPR)
7996	    && TREE_OPERAND (rhs, 0) == lhs
7997	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7998	    && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7999	    && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
8000	  {
8001	    rtx label = gen_label_rtx ();
8002
8003	    do_jump (TREE_OPERAND (rhs, 1),
8004		     TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8005		     TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8006	    expand_assignment (lhs, convert (TREE_TYPE (rhs),
8007					     (TREE_CODE (rhs) == BIT_IOR_EXPR
8008					      ? integer_one_node
8009					      : integer_zero_node)),
8010			       0, 0);
8011	    do_pending_stack_adjust ();
8012	    emit_label (label);
8013	    return const0_rtx;
8014	  }
8015
8016	if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8017	    && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8018	  noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8019						  TYPE_NONCOPIED_PARTS (lhs_type));
8020
8021	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8022	while (noncopied_parts != 0)
8023	  {
8024	    expand_assignment (TREE_PURPOSE (noncopied_parts),
8025			       TREE_VALUE (noncopied_parts), 0, 0);
8026	    noncopied_parts = TREE_CHAIN (noncopied_parts);
8027	  }
8028	return temp;
8029      }
8030
8031    case RETURN_EXPR:
8032      if (!TREE_OPERAND (exp, 0))
8033	expand_null_return ();
8034      else
8035	expand_return (TREE_OPERAND (exp, 0));
8036      return const0_rtx;
8037
8038    case PREINCREMENT_EXPR:
8039    case PREDECREMENT_EXPR:
8040      return expand_increment (exp, 0, ignore);
8041
8042    case POSTINCREMENT_EXPR:
8043    case POSTDECREMENT_EXPR:
8044      /* Faster to treat as pre-increment if result is not used.  */
8045      return expand_increment (exp, ! ignore, ignore);
8046
8047    case ADDR_EXPR:
8048      /* If nonzero, TEMP will be set to the address of something that might
8049	 be a MEM corresponding to a stack slot.  */
8050      temp = 0;
8051
8052      /* Are we taking the address of a nested function?  */
8053      if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8054	  && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8055	  && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8056	  && ! TREE_STATIC (exp))
8057	{
8058	  op0 = trampoline_address (TREE_OPERAND (exp, 0));
8059	  op0 = force_operand (op0, target);
8060	}
8061      /* If we are taking the address of something erroneous, just
8062	 return a zero.  */
8063      else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8064	return const0_rtx;
8065      else
8066	{
8067	  /* We make sure to pass const0_rtx down if we came in with
8068	     ignore set, to avoid doing the cleanups twice for something.  */
8069	  op0 = expand_expr (TREE_OPERAND (exp, 0),
8070			     ignore ? const0_rtx : NULL_RTX, VOIDmode,
8071			     (modifier == EXPAND_INITIALIZER
8072			      ? modifier : EXPAND_CONST_ADDRESS));
8073
8074	  /* If we are going to ignore the result, OP0 will have been set
8075	     to const0_rtx, so just return it.  Don't get confused and
8076	     think we are taking the address of the constant.  */
8077	  if (ignore)
8078	    return op0;
8079
8080	  op0 = protect_from_queue (op0, 0);
8081
8082	  /* We would like the object in memory.  If it is a constant,
8083	     we can have it be statically allocated into memory.  For
8084	     a non-constant (REG, SUBREG or CONCAT), we need to allocate some
8085	     memory and store the value into it.  */
8086
8087	  if (CONSTANT_P (op0))
8088	    op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8089				   op0);
8090	  else if (GET_CODE (op0) == MEM)
8091	    {
8092	      mark_temp_addr_taken (op0);
8093	      temp = XEXP (op0, 0);
8094	    }
8095
8096	  else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8097		   || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8098	    {
8099	      /* If this object is in a register, it must be not
8100		 be BLKmode.  */
8101	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8102	      rtx memloc = assign_temp (inner_type, 1, 1, 1);
8103
8104	      mark_temp_addr_taken (memloc);
8105	      emit_move_insn (memloc, op0);
8106	      op0 = memloc;
8107	    }
8108
8109	  if (GET_CODE (op0) != MEM)
8110	    abort ();
8111
8112	  if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8113	    {
8114	      temp = XEXP (op0, 0);
8115#ifdef POINTERS_EXTEND_UNSIGNED
8116	      if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8117		  && mode == ptr_mode)
8118		temp = convert_memory_address (ptr_mode, temp);
8119#endif
8120	      return temp;
8121	    }
8122
8123	  op0 = force_operand (XEXP (op0, 0), target);
8124	}
8125
8126      if (flag_force_addr && GET_CODE (op0) != REG)
8127	op0 = force_reg (Pmode, op0);
8128
8129      if (GET_CODE (op0) == REG
8130	  && ! REG_USERVAR_P (op0))
8131	mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8132
8133      /* If we might have had a temp slot, add an equivalent address
8134	 for it.  */
8135      if (temp != 0)
8136	update_temp_slot_address (temp, op0);
8137
8138#ifdef POINTERS_EXTEND_UNSIGNED
8139      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8140	  && mode == ptr_mode)
8141	op0 = convert_memory_address (ptr_mode, op0);
8142#endif
8143
8144      return op0;
8145
8146    case ENTRY_VALUE_EXPR:
8147      abort ();
8148
8149    /* COMPLEX type for Extended Pascal & Fortran  */
8150    case COMPLEX_EXPR:
8151      {
8152	enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8153	rtx insns;
8154
8155	/* Get the rtx code of the operands.  */
8156	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8157	op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8158
8159	if (! target)
8160	  target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8161
8162	start_sequence ();
8163
8164	/* Move the real (op0) and imaginary (op1) parts to their location.  */
8165	emit_move_insn (gen_realpart (mode, target), op0);
8166	emit_move_insn (gen_imagpart (mode, target), op1);
8167
8168	insns = get_insns ();
8169	end_sequence ();
8170
8171	/* Complex construction should appear as a single unit.  */
8172	/* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8173	   each with a separate pseudo as destination.
8174	   It's not correct for flow to treat them as a unit.  */
8175	if (GET_CODE (target) != CONCAT)
8176	  emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8177	else
8178	  emit_insns (insns);
8179
8180	return target;
8181      }
8182
8183    case REALPART_EXPR:
8184      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8185      return gen_realpart (mode, op0);
8186
8187    case IMAGPART_EXPR:
8188      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8189      return gen_imagpart (mode, op0);
8190
8191    case CONJ_EXPR:
8192      {
8193	enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8194	rtx imag_t;
8195	rtx insns;
8196
8197	op0  = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8198
8199	if (! target)
8200	  target = gen_reg_rtx (mode);
8201
8202	start_sequence ();
8203
8204	/* Store the realpart and the negated imagpart to target.  */
8205	emit_move_insn (gen_realpart (partmode, target),
8206			gen_realpart (partmode, op0));
8207
8208	imag_t = gen_imagpart (partmode, target);
8209	temp = expand_unop (partmode, neg_optab,
8210			       gen_imagpart (partmode, op0), imag_t, 0);
8211	if (temp != imag_t)
8212	  emit_move_insn (imag_t, temp);
8213
8214	insns = get_insns ();
8215	end_sequence ();
8216
8217	/* Conjugate should appear as a single unit
8218	   If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8219	   each with a separate pseudo as destination.
8220	   It's not correct for flow to treat them as a unit.  */
8221	if (GET_CODE (target) != CONCAT)
8222	  emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8223	else
8224	  emit_insns (insns);
8225
8226	return target;
8227      }
8228
8229    case TRY_CATCH_EXPR:
8230      {
8231	tree handler = TREE_OPERAND (exp, 1);
8232
8233	expand_eh_region_start ();
8234
8235	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8236
8237	expand_eh_region_end (handler);
8238
8239	return op0;
8240      }
8241
8242    case TRY_FINALLY_EXPR:
8243      {
8244	tree try_block = TREE_OPERAND (exp, 0);
8245	tree finally_block = TREE_OPERAND (exp, 1);
8246	rtx finally_label = gen_label_rtx ();
8247	rtx done_label = gen_label_rtx ();
8248	rtx return_link = gen_reg_rtx (Pmode);
8249	tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8250			      (tree) finally_label, (tree) return_link);
8251	TREE_SIDE_EFFECTS (cleanup) = 1;
8252
8253	/* Start a new binding layer that will keep track of all cleanup
8254	   actions to be performed.  */
8255	expand_start_bindings (0);
8256
8257	target_temp_slot_level = temp_slot_level;
8258
8259	expand_decl_cleanup (NULL_TREE, cleanup);
8260	op0 = expand_expr (try_block, target, tmode, modifier);
8261
8262	preserve_temp_slots (op0);
8263	expand_end_bindings (NULL_TREE, 0, 0);
8264	emit_jump (done_label);
8265	emit_label (finally_label);
8266	expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8267	emit_indirect_jump (return_link);
8268	emit_label (done_label);
8269	return op0;
8270      }
8271
8272      case GOTO_SUBROUTINE_EXPR:
8273      {
8274	rtx subr = (rtx) TREE_OPERAND (exp, 0);
8275	rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8276	rtx return_address = gen_label_rtx ();
8277	emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8278	emit_jump (subr);
8279	emit_label (return_address);
8280	return const0_rtx;
8281      }
8282
8283    case POPDCC_EXPR:
8284      {
8285	rtx dcc = get_dynamic_cleanup_chain ();
8286	emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8287	return const0_rtx;
8288      }
8289
8290    case POPDHC_EXPR:
8291      {
8292	rtx dhc = get_dynamic_handler_chain ();
8293	emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8294	return const0_rtx;
8295      }
8296
8297    default:
8298      return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8299    }
8300
8301  /* Here to do an ordinary binary operator, generating an instruction
8302     from the optab already placed in `this_optab'.  */
8303 binop:
8304  preexpand_calls (exp);
8305  if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8306    subtarget = 0;
8307  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8308  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8309 binop2:
8310  temp = expand_binop (mode, this_optab, op0, op1, target,
8311		       unsignedp, OPTAB_LIB_WIDEN);
8312  if (temp == 0)
8313    abort ();
8314  return temp;
8315}
8316
8317
8318
8319/* Return the alignment in bits of EXP, a pointer valued expression.
8320   But don't return more than MAX_ALIGN no matter what.
8321   The alignment returned is, by default, the alignment of the thing that
8322   EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8323
8324   Otherwise, look at the expression to see if we can do better, i.e., if the
8325   expression is actually pointing at an object whose alignment is tighter.  */
8326
8327static int
8328get_pointer_alignment (exp, max_align)
8329     tree exp;
8330     unsigned max_align;
8331{
8332  unsigned align, inner;
8333
8334  if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8335    return 0;
8336
8337  align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8338  align = MIN (align, max_align);
8339
8340  while (1)
8341    {
8342      switch (TREE_CODE (exp))
8343	{
8344	case NOP_EXPR:
8345	case CONVERT_EXPR:
8346	case NON_LVALUE_EXPR:
8347	  exp = TREE_OPERAND (exp, 0);
8348	  if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8349	    return align;
8350	  inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8351	  align = MIN (inner, max_align);
8352	  break;
8353
8354	case PLUS_EXPR:
8355	  /* If sum of pointer + int, restrict our maximum alignment to that
8356	     imposed by the integer.  If not, we can't do any better than
8357	     ALIGN.  */
8358	  if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8359	    return align;
8360
8361	  while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8362		  & (max_align - 1))
8363		 != 0)
8364	    max_align >>= 1;
8365
8366	  exp = TREE_OPERAND (exp, 0);
8367	  break;
8368
8369	case ADDR_EXPR:
8370	  /* See what we are pointing at and look at its alignment.  */
8371	  exp = TREE_OPERAND (exp, 0);
8372	  if (TREE_CODE (exp) == FUNCTION_DECL)
8373	    align = FUNCTION_BOUNDARY;
8374	  else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8375	    align = DECL_ALIGN (exp);
8376#ifdef CONSTANT_ALIGNMENT
8377	  else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8378	    align = CONSTANT_ALIGNMENT (exp, align);
8379#endif
8380	  return MIN (align, max_align);
8381
8382	default:
8383	  return align;
8384	}
8385    }
8386}
8387
8388/* Return the tree node and offset if a given argument corresponds to
8389   a string constant.  */
8390
8391static tree
8392string_constant (arg, ptr_offset)
8393     tree arg;
8394     tree *ptr_offset;
8395{
8396  STRIP_NOPS (arg);
8397
8398  if (TREE_CODE (arg) == ADDR_EXPR
8399      && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8400    {
8401      *ptr_offset = integer_zero_node;
8402      return TREE_OPERAND (arg, 0);
8403    }
8404  else if (TREE_CODE (arg) == PLUS_EXPR)
8405    {
8406      tree arg0 = TREE_OPERAND (arg, 0);
8407      tree arg1 = TREE_OPERAND (arg, 1);
8408
8409      STRIP_NOPS (arg0);
8410      STRIP_NOPS (arg1);
8411
8412      if (TREE_CODE (arg0) == ADDR_EXPR
8413	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8414	{
8415	  *ptr_offset = arg1;
8416	  return TREE_OPERAND (arg0, 0);
8417	}
8418      else if (TREE_CODE (arg1) == ADDR_EXPR
8419	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8420	{
8421	  *ptr_offset = arg0;
8422	  return TREE_OPERAND (arg1, 0);
8423	}
8424    }
8425
8426  return 0;
8427}
8428
8429/* Compute the length of a C string.  TREE_STRING_LENGTH is not the right
8430   way, because it could contain a zero byte in the middle.
8431   TREE_STRING_LENGTH is the size of the character array, not the string.
8432
8433   Unfortunately, string_constant can't access the values of const char
8434   arrays with initializers, so neither can we do so here.  */
8435
8436static tree
8437c_strlen (src)
8438     tree src;
8439{
8440  tree offset_node;
8441  int offset, max;
8442  char *ptr;
8443
8444  src = string_constant (src, &offset_node);
8445  if (src == 0)
8446    return 0;
8447  max = TREE_STRING_LENGTH (src);
8448  ptr = TREE_STRING_POINTER (src);
8449  if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8450    {
8451      /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8452	 compute the offset to the following null if we don't know where to
8453	 start searching for it.  */
8454      int i;
8455      for (i = 0; i < max; i++)
8456	if (ptr[i] == 0)
8457	  return 0;
8458      /* We don't know the starting offset, but we do know that the string
8459	 has no internal zero bytes.  We can assume that the offset falls
8460	 within the bounds of the string; otherwise, the programmer deserves
8461	 what he gets.  Subtract the offset from the length of the string,
8462	 and return that.  */
8463      /* This would perhaps not be valid if we were dealing with named
8464         arrays in addition to literal string constants.  */
8465      return size_binop (MINUS_EXPR, size_int (max), offset_node);
8466    }
8467
8468  /* We have a known offset into the string.  Start searching there for
8469     a null character.  */
8470  if (offset_node == 0)
8471    offset = 0;
8472  else
8473    {
8474      /* Did we get a long long offset?  If so, punt.  */
8475      if (TREE_INT_CST_HIGH (offset_node) != 0)
8476	return 0;
8477      offset = TREE_INT_CST_LOW (offset_node);
8478    }
8479  /* If the offset is known to be out of bounds, warn, and call strlen at
8480     runtime.  */
8481  if (offset < 0 || offset > max)
8482    {
8483      warning ("offset outside bounds of constant string");
8484      return 0;
8485    }
8486  /* Use strlen to search for the first zero byte.  Since any strings
8487     constructed with build_string will have nulls appended, we win even
8488     if we get handed something like (char[4])"abcd".
8489
8490     Since OFFSET is our starting index into the string, no further
8491     calculation is needed.  */
8492  return size_int (strlen (ptr + offset));
8493}
8494
8495rtx
8496expand_builtin_return_addr (fndecl_code, count, tem)
8497     enum built_in_function fndecl_code;
8498     int count;
8499     rtx tem;
8500{
8501  int i;
8502
8503  /* Some machines need special handling before we can access
8504     arbitrary frames.  For example, on the sparc, we must first flush
8505     all register windows to the stack.  */
8506#ifdef SETUP_FRAME_ADDRESSES
8507  if (count > 0)
8508    SETUP_FRAME_ADDRESSES ();
8509#endif
8510
8511  /* On the sparc, the return address is not in the frame, it is in a
8512     register.  There is no way to access it off of the current frame
8513     pointer, but it can be accessed off the previous frame pointer by
8514     reading the value from the register window save area.  */
8515#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8516  if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8517    count--;
8518#endif
8519
8520  /* Scan back COUNT frames to the specified frame.  */
8521  for (i = 0; i < count; i++)
8522    {
8523      /* Assume the dynamic chain pointer is in the word that the
8524	 frame address points to, unless otherwise specified.  */
8525#ifdef DYNAMIC_CHAIN_ADDRESS
8526      tem = DYNAMIC_CHAIN_ADDRESS (tem);
8527#endif
8528      tem = memory_address (Pmode, tem);
8529      tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8530    }
8531
8532  /* For __builtin_frame_address, return what we've got.  */
8533  if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8534    return tem;
8535
8536  /* For __builtin_return_address, Get the return address from that
8537     frame.  */
8538#ifdef RETURN_ADDR_RTX
8539  tem = RETURN_ADDR_RTX (count, tem);
8540#else
8541  tem = memory_address (Pmode,
8542			plus_constant (tem, GET_MODE_SIZE (Pmode)));
8543  tem = gen_rtx_MEM (Pmode, tem);
8544#endif
8545  return tem;
8546}
8547
8548/* Construct the leading half of a __builtin_setjmp call.  Control will
8549   return to RECEIVER_LABEL.  This is used directly by sjlj exception
8550   handling code.  */
8551
8552void
8553expand_builtin_setjmp_setup (buf_addr, receiver_label)
8554     rtx buf_addr;
8555     rtx receiver_label;
8556{
8557  enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8558  rtx stack_save;
8559
8560#ifdef POINTERS_EXTEND_UNSIGNED
8561  buf_addr = convert_memory_address (Pmode, buf_addr);
8562#endif
8563
8564  buf_addr = force_reg (Pmode, buf_addr);
8565
8566  emit_queue ();
8567
8568  /* We store the frame pointer and the address of receiver_label in
8569     the buffer and use the rest of it for the stack save area, which
8570     is machine-dependent.  */
8571
8572#ifndef BUILTIN_SETJMP_FRAME_VALUE
8573#define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8574#endif
8575
8576  emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
8577		  BUILTIN_SETJMP_FRAME_VALUE);
8578  emit_move_insn (validize_mem
8579		  (gen_rtx_MEM (Pmode,
8580				plus_constant (buf_addr,
8581					       GET_MODE_SIZE (Pmode)))),
8582		  force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
8583
8584  stack_save = gen_rtx_MEM (sa_mode,
8585			    plus_constant (buf_addr,
8586					   2 * GET_MODE_SIZE (Pmode)));
8587  emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8588
8589  /* If there is further processing to do, do it.  */
8590#ifdef HAVE_builtin_setjmp_setup
8591  if (HAVE_builtin_setjmp_setup)
8592    emit_insn (gen_builtin_setjmp_setup (buf_addr));
8593#endif
8594
8595  /* Tell optimize_save_area_alloca that extra work is going to
8596     need to go on during alloca.  */
8597  current_function_calls_setjmp = 1;
8598
8599  /* Set this so all the registers get saved in our frame; we need to be
8600     able to copy the saved values for any registers from frames we unwind. */
8601  current_function_has_nonlocal_label = 1;
8602}
8603
8604/* Construct the trailing part of a __builtin_setjmp call.
8605   This is used directly by sjlj exception handling code.  */
8606
8607void
8608expand_builtin_setjmp_receiver (receiver_label)
8609      rtx receiver_label ATTRIBUTE_UNUSED;
8610{
8611  /* Clobber the FP when we get here, so we have to make sure it's
8612     marked as used by this function.  */
8613  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8614
8615  /* Mark the static chain as clobbered here so life information
8616     doesn't get messed up for it.  */
8617  emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
8618
8619  /* Now put in the code to restore the frame pointer, and argument
8620     pointer, if needed.  The code below is from expand_end_bindings
8621     in stmt.c; see detailed documentation there.  */
8622#ifdef HAVE_nonlocal_goto
8623  if (! HAVE_nonlocal_goto)
8624#endif
8625    emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8626
8627#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8628  if (fixed_regs[ARG_POINTER_REGNUM])
8629    {
8630#ifdef ELIMINABLE_REGS
8631      size_t i;
8632      static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8633
8634      for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8635	if (elim_regs[i].from == ARG_POINTER_REGNUM
8636	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8637	  break;
8638
8639      if (i == sizeof elim_regs / sizeof elim_regs [0])
8640#endif
8641	{
8642	  /* Now restore our arg pointer from the address at which it
8643	     was saved in our stack frame.
8644	     If there hasn't be space allocated for it yet, make
8645	     some now.  */
8646	  if (arg_pointer_save_area == 0)
8647	    arg_pointer_save_area
8648	      = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8649	  emit_move_insn (virtual_incoming_args_rtx,
8650			  copy_to_reg (arg_pointer_save_area));
8651	}
8652    }
8653#endif
8654
8655#ifdef HAVE_builtin_setjmp_receiver
8656  if (HAVE_builtin_setjmp_receiver)
8657    emit_insn (gen_builtin_setjmp_receiver (receiver_label));
8658  else
8659#endif
8660#ifdef HAVE_nonlocal_goto_receiver
8661    if (HAVE_nonlocal_goto_receiver)
8662      emit_insn (gen_nonlocal_goto_receiver ());
8663    else
8664#endif
8665      {
8666	; /* Nothing */
8667      }
8668
8669  /* @@@ This is a kludge.  Not all machine descriptions define a blockage
8670     insn, but we must not allow the code we just generated to be reordered
8671     by scheduling.  Specifically, the update of the frame pointer must
8672     happen immediately, not later.  So emit an ASM_INPUT to act as blockage
8673     insn.  */
8674  emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
8675}
8676
8677
8678/* __builtin_setjmp is passed a pointer to an array of five words (not
8679   all will be used on all machines).  It operates similarly to the C
8680   library function of the same name, but is more efficient.  Much of
8681   the code below (and for longjmp) is copied from the handling of
8682   non-local gotos.
8683
8684   NOTE: This is intended for use by GNAT and the exception handling
8685   scheme in the compiler and will only work in the method used by
8686   them.  */
8687
8688static rtx
8689expand_builtin_setjmp (arglist, target)
8690     tree arglist;
8691     rtx target;
8692{
8693  rtx buf_addr, next_lab, cont_lab;
8694
8695  if (arglist == 0
8696      || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8697    return NULL_RTX;
8698
8699  if (target == 0 || GET_CODE (target) != REG
8700      || REGNO (target) < FIRST_PSEUDO_REGISTER)
8701    target = gen_reg_rtx (TYPE_MODE (integer_type_node));
8702
8703  buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8704
8705  next_lab = gen_label_rtx ();
8706  cont_lab = gen_label_rtx ();
8707
8708  expand_builtin_setjmp_setup (buf_addr, next_lab);
8709
8710  /* Set TARGET to zero and branch to the continue label.  */
8711  emit_move_insn (target, const0_rtx);
8712  emit_jump_insn (gen_jump (cont_lab));
8713  emit_barrier ();
8714  emit_label (next_lab);
8715
8716  expand_builtin_setjmp_receiver (next_lab);
8717
8718  /* Set TARGET to one.  */
8719  emit_move_insn (target, const1_rtx);
8720  emit_label (cont_lab);
8721
8722  /* Tell flow about the strange goings on.  Putting `next_lab' on
8723     `nonlocal_goto_handler_labels' to indicates that function
8724     calls may traverse the arc back to this label.  */
8725
8726  current_function_has_nonlocal_label = 1;
8727  nonlocal_goto_handler_labels
8728    = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
8729
8730  return target;
8731}
8732
8733void
8734expand_builtin_longjmp (buf_addr, value)
8735     rtx buf_addr, value;
8736{
8737  rtx fp, lab, stack;
8738  enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8739
8740#ifdef POINTERS_EXTEND_UNSIGNED
8741  buf_addr = convert_memory_address (Pmode, buf_addr);
8742#endif
8743  buf_addr = force_reg (Pmode, buf_addr);
8744
8745  /* We used to store value in static_chain_rtx, but that fails if pointers
8746     are smaller than integers.  We instead require that the user must pass
8747     a second argument of 1, because that is what builtin_setjmp will
8748     return.  This also makes EH slightly more efficient, since we are no
8749     longer copying around a value that we don't care about.  */
8750  if (value != const1_rtx)
8751    abort ();
8752
8753#ifdef HAVE_builtin_longjmp
8754  if (HAVE_builtin_longjmp)
8755    emit_insn (gen_builtin_longjmp (buf_addr));
8756  else
8757#endif
8758    {
8759      fp = gen_rtx_MEM (Pmode, buf_addr);
8760      lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8761					       GET_MODE_SIZE (Pmode)));
8762
8763      stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8764						   2 * GET_MODE_SIZE (Pmode)));
8765
8766      /* Pick up FP, label, and SP from the block and jump.  This code is
8767	 from expand_goto in stmt.c; see there for detailed comments.  */
8768#if HAVE_nonlocal_goto
8769      if (HAVE_nonlocal_goto)
8770	/* We have to pass a value to the nonlocal_goto pattern that will
8771	   get copied into the static_chain pointer, but it does not matter
8772	   what that value is, because builtin_setjmp does not use it.  */
8773	emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8774      else
8775#endif
8776	{
8777	  lab = copy_to_reg (lab);
8778
8779	  emit_move_insn (hard_frame_pointer_rtx, fp);
8780	  emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8781
8782	  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8783	  emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8784	  emit_indirect_jump (lab);
8785	}
8786    }
8787}
8788
8789static rtx
8790get_memory_rtx (exp)
8791     tree exp;
8792{
8793  rtx mem;
8794  int is_aggregate;
8795
8796  mem = gen_rtx_MEM (BLKmode,
8797		     memory_address (BLKmode,
8798				     expand_expr (exp, NULL_RTX,
8799						  ptr_mode, EXPAND_SUM)));
8800
8801  RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8802
8803  /* Figure out the type of the object pointed to.  Set MEM_IN_STRUCT_P
8804     if the value is the address of a structure or if the expression is
8805     cast to a pointer to structure type.  */
8806  is_aggregate = 0;
8807
8808  while (TREE_CODE (exp) == NOP_EXPR)
8809    {
8810      tree cast_type = TREE_TYPE (exp);
8811      if (TREE_CODE (cast_type) == POINTER_TYPE
8812	  && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8813	{
8814	  is_aggregate = 1;
8815	  break;
8816	}
8817      exp = TREE_OPERAND (exp, 0);
8818    }
8819
8820  if (is_aggregate == 0)
8821    {
8822      tree type;
8823
8824      if (TREE_CODE (exp) == ADDR_EXPR)
8825	/* If this is the address of an object, check whether the
8826	   object is an array.  */
8827	type = TREE_TYPE (TREE_OPERAND (exp, 0));
8828      else
8829	type = TREE_TYPE (TREE_TYPE (exp));
8830      is_aggregate = AGGREGATE_TYPE_P (type);
8831    }
8832
8833  MEM_SET_IN_STRUCT_P (mem, is_aggregate);
8834  return mem;
8835}
8836
8837
8838/* Expand an expression EXP that calls a built-in function,
8839   with result going to TARGET if that's convenient
8840   (and in mode MODE if that's convenient).
8841   SUBTARGET may be used as the target for computing one of EXP's operands.
8842   IGNORE is nonzero if the value is to be ignored.  */
8843
8844#define CALLED_AS_BUILT_IN(NODE) \
8845   (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8846
8847static rtx
8848expand_builtin (exp, target, subtarget, mode, ignore)
8849     tree exp;
8850     rtx target;
8851     rtx subtarget;
8852     enum machine_mode mode;
8853     int ignore;
8854{
8855  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8856  tree arglist = TREE_OPERAND (exp, 1);
8857  rtx op0;
8858  rtx lab1, insns;
8859  enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8860  optab builtin_optab;
8861
8862  switch (DECL_FUNCTION_CODE (fndecl))
8863    {
8864    case BUILT_IN_ABS:
8865    case BUILT_IN_LABS:
8866    case BUILT_IN_FABS:
8867      /* build_function_call changes these into ABS_EXPR.  */
8868      abort ();
8869
8870    case BUILT_IN_SIN:
8871    case BUILT_IN_COS:
8872      /* Treat these like sqrt, but only if the user asks for them.  */
8873      if (! flag_fast_math)
8874	break;
8875    case BUILT_IN_FSQRT:
8876      /* If not optimizing, call the library function.  */
8877      if (! optimize)
8878	break;
8879
8880      if (arglist == 0
8881	  /* Arg could be wrong type if user redeclared this fcn wrong.  */
8882	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8883	break;
8884
8885      /* Stabilize and compute the argument.  */
8886      if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8887	  && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8888	{
8889	  exp = copy_node (exp);
8890	  arglist = copy_node (arglist);
8891	  TREE_OPERAND (exp, 1) = arglist;
8892	  TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8893	}
8894      op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8895
8896      /* Make a suitable register to place result in.  */
8897      target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8898
8899      emit_queue ();
8900      start_sequence ();
8901
8902      switch (DECL_FUNCTION_CODE (fndecl))
8903	{
8904	case BUILT_IN_SIN:
8905	  builtin_optab = sin_optab; break;
8906	case BUILT_IN_COS:
8907	  builtin_optab = cos_optab; break;
8908	case BUILT_IN_FSQRT:
8909	  builtin_optab = sqrt_optab; break;
8910	default:
8911	  abort ();
8912	}
8913
8914      /* Compute into TARGET.
8915	 Set TARGET to wherever the result comes back.  */
8916      target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8917			    builtin_optab, op0, target, 0);
8918
8919      /* If we were unable to expand via the builtin, stop the
8920	 sequence (without outputting the insns) and break, causing
8921	 a call to the library function.  */
8922      if (target == 0)
8923	{
8924	  end_sequence ();
8925	  break;
8926        }
8927
8928      /* Check the results by default.  But if flag_fast_math is turned on,
8929	 then assume sqrt will always be called with valid arguments.  */
8930
8931      if (flag_errno_math && ! flag_fast_math)
8932	{
8933	  /* Don't define the builtin FP instructions
8934	     if your machine is not IEEE.  */
8935	  if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8936	    abort ();
8937
8938	  lab1 = gen_label_rtx ();
8939
8940	  /* Test the result; if it is NaN, set errno=EDOM because
8941	     the argument was not in the domain.  */
8942	  emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
8943				   0, 0, lab1);
8944
8945#ifdef TARGET_EDOM
8946	  {
8947#ifdef GEN_ERRNO_RTX
8948	    rtx errno_rtx = GEN_ERRNO_RTX;
8949#else
8950	    rtx errno_rtx
8951	      = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8952#endif
8953
8954	    emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8955	  }
8956#else
8957	  /* We can't set errno=EDOM directly; let the library call do it.
8958	     Pop the arguments right away in case the call gets deleted.  */
8959	  NO_DEFER_POP;
8960	  expand_call (exp, target, 0);
8961	  OK_DEFER_POP;
8962#endif
8963
8964	  emit_label (lab1);
8965	}
8966
8967      /* Output the entire sequence.  */
8968      insns = get_insns ();
8969      end_sequence ();
8970      emit_insns (insns);
8971
8972      return target;
8973
8974    case BUILT_IN_FMOD:
8975      break;
8976
8977      /* __builtin_apply_args returns block of memory allocated on
8978	 the stack into which is stored the arg pointer, structure
8979	 value address, static chain, and all the registers that might
8980	 possibly be used in performing a function call.  The code is
8981	 moved to the start of the function so the incoming values are
8982	 saved.  */
8983    case BUILT_IN_APPLY_ARGS:
8984      /* Don't do __builtin_apply_args more than once in a function.
8985	 Save the result of the first call and reuse it.  */
8986      if (apply_args_value != 0)
8987	return apply_args_value;
8988      {
8989	/* When this function is called, it means that registers must be
8990	   saved on entry to this function.  So we migrate the
8991	   call to the first insn of this function.  */
8992	rtx temp;
8993	rtx seq;
8994
8995	start_sequence ();
8996	temp = expand_builtin_apply_args ();
8997	seq = get_insns ();
8998	end_sequence ();
8999
9000	apply_args_value = temp;
9001
9002	/* Put the sequence after the NOTE that starts the function.
9003	   If this is inside a SEQUENCE, make the outer-level insn
9004	   chain current, so the code is placed at the start of the
9005	   function.  */
9006	push_topmost_sequence ();
9007	emit_insns_before (seq, NEXT_INSN (get_insns ()));
9008	pop_topmost_sequence ();
9009	return temp;
9010      }
9011
9012      /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
9013	 FUNCTION with a copy of the parameters described by
9014	 ARGUMENTS, and ARGSIZE.  It returns a block of memory
9015	 allocated on the stack into which is stored all the registers
9016	 that might possibly be used for returning the result of a
9017	 function.  ARGUMENTS is the value returned by
9018	 __builtin_apply_args.  ARGSIZE is the number of bytes of
9019	 arguments that must be copied.  ??? How should this value be
9020	 computed?  We'll also need a safe worst case value for varargs
9021	 functions.  */
9022    case BUILT_IN_APPLY:
9023      if (arglist == 0
9024	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9025	  || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
9026	  || TREE_CHAIN (arglist) == 0
9027	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9028	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9029	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9030	return const0_rtx;
9031      else
9032	{
9033	  int i;
9034	  tree t;
9035	  rtx ops[3];
9036
9037	  for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
9038	    ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
9039
9040	  return expand_builtin_apply (ops[0], ops[1], ops[2]);
9041	}
9042
9043      /* __builtin_return (RESULT) causes the function to return the
9044	 value described by RESULT.  RESULT is address of the block of
9045	 memory returned by __builtin_apply.  */
9046    case BUILT_IN_RETURN:
9047      if (arglist
9048	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9049	  && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
9050	expand_builtin_return (expand_expr (TREE_VALUE (arglist),
9051					    NULL_RTX, VOIDmode, 0));
9052      return const0_rtx;
9053
9054    case BUILT_IN_SAVEREGS:
9055      /* Don't do __builtin_saveregs more than once in a function.
9056	 Save the result of the first call and reuse it.  */
9057      if (saveregs_value != 0)
9058	return saveregs_value;
9059      {
9060	/* When this function is called, it means that registers must be
9061	   saved on entry to this function.  So we migrate the
9062	   call to the first insn of this function.  */
9063	rtx temp;
9064	rtx seq;
9065
9066	/* Now really call the function.  `expand_call' does not call
9067	   expand_builtin, so there is no danger of infinite recursion here.  */
9068	start_sequence ();
9069
9070#ifdef EXPAND_BUILTIN_SAVEREGS
9071	/* Do whatever the machine needs done in this case.  */
9072	temp = EXPAND_BUILTIN_SAVEREGS (arglist);
9073#else
9074	/* The register where the function returns its value
9075	   is likely to have something else in it, such as an argument.
9076	   So preserve that register around the call.  */
9077
9078	if (value_mode != VOIDmode)
9079	  {
9080	    rtx valreg = hard_libcall_value (value_mode);
9081	    rtx saved_valreg = gen_reg_rtx (value_mode);
9082
9083	    emit_move_insn (saved_valreg, valreg);
9084	    temp = expand_call (exp, target, ignore);
9085	    emit_move_insn (valreg, saved_valreg);
9086	  }
9087	else
9088	  /* Generate the call, putting the value in a pseudo.  */
9089	  temp = expand_call (exp, target, ignore);
9090#endif
9091
9092	seq = get_insns ();
9093	end_sequence ();
9094
9095	saveregs_value = temp;
9096
9097	/* Put the sequence after the NOTE that starts the function.
9098	   If this is inside a SEQUENCE, make the outer-level insn
9099	   chain current, so the code is placed at the start of the
9100	   function.  */
9101	push_topmost_sequence ();
9102	emit_insns_before (seq, NEXT_INSN (get_insns ()));
9103	pop_topmost_sequence ();
9104	return temp;
9105      }
9106
9107      /* __builtin_args_info (N) returns word N of the arg space info
9108	 for the current function.  The number and meanings of words
9109	 is controlled by the definition of CUMULATIVE_ARGS.  */
9110    case BUILT_IN_ARGS_INFO:
9111      {
9112	int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
9113	int *word_ptr = (int *) &current_function_args_info;
9114#if 0
9115	/* These are used by the code below that is if 0'ed away */
9116	int i;
9117	tree type, elts, result;
9118#endif
9119
9120	if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
9121	  fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
9122		 __FILE__, __LINE__);
9123
9124	if (arglist != 0)
9125	  {
9126	    tree arg = TREE_VALUE (arglist);
9127	    if (TREE_CODE (arg) != INTEGER_CST)
9128	      error ("argument of `__builtin_args_info' must be constant");
9129	    else
9130	      {
9131		int wordnum = TREE_INT_CST_LOW (arg);
9132
9133		if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
9134		  error ("argument of `__builtin_args_info' out of range");
9135		else
9136		  return GEN_INT (word_ptr[wordnum]);
9137	      }
9138	  }
9139	else
9140	  error ("missing argument in `__builtin_args_info'");
9141
9142	return const0_rtx;
9143
9144#if 0
9145	for (i = 0; i < nwords; i++)
9146	  elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
9147
9148	type = build_array_type (integer_type_node,
9149				 build_index_type (build_int_2 (nwords, 0)));
9150	result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
9151	TREE_CONSTANT (result) = 1;
9152	TREE_STATIC (result) = 1;
9153	result = build (INDIRECT_REF, build_pointer_type (type), result);
9154	TREE_CONSTANT (result) = 1;
9155	return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
9156#endif
9157      }
9158
9159      /* Return the address of the first anonymous stack arg.  */
9160    case BUILT_IN_NEXT_ARG:
9161      {
9162	tree fntype = TREE_TYPE (current_function_decl);
9163
9164	if ((TYPE_ARG_TYPES (fntype) == 0
9165	     || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
9166		 == void_type_node))
9167	    && ! current_function_varargs)
9168	  {
9169	    error ("`va_start' used in function with fixed args");
9170	    return const0_rtx;
9171	  }
9172
9173	if (arglist)
9174	  {
9175	    tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9176	    tree arg = TREE_VALUE (arglist);
9177
9178	    /* Strip off all nops for the sake of the comparison.  This
9179	       is not quite the same as STRIP_NOPS.  It does more.
9180	       We must also strip off INDIRECT_EXPR for C++ reference
9181	       parameters.  */
9182	    while (TREE_CODE (arg) == NOP_EXPR
9183		   || TREE_CODE (arg) == CONVERT_EXPR
9184		   || TREE_CODE (arg) == NON_LVALUE_EXPR
9185		   || TREE_CODE (arg) == INDIRECT_REF)
9186	      arg = TREE_OPERAND (arg, 0);
9187	    if (arg != last_parm)
9188	      warning ("second parameter of `va_start' not last named argument");
9189	  }
9190	else if (! current_function_varargs)
9191	  /* Evidently an out of date version of <stdarg.h>; can't validate
9192	     va_start's second argument, but can still work as intended.  */
9193	  warning ("`__builtin_next_arg' called without an argument");
9194      }
9195
9196      return expand_binop (Pmode, add_optab,
9197			   current_function_internal_arg_pointer,
9198			   current_function_arg_offset_rtx,
9199			   NULL_RTX, 0, OPTAB_LIB_WIDEN);
9200
9201    case BUILT_IN_CLASSIFY_TYPE:
9202      if (arglist != 0)
9203	{
9204	  tree type = TREE_TYPE (TREE_VALUE (arglist));
9205	  enum tree_code code = TREE_CODE (type);
9206	  if (code == VOID_TYPE)
9207	    return GEN_INT (void_type_class);
9208	  if (code == INTEGER_TYPE)
9209	    return GEN_INT (integer_type_class);
9210	  if (code == CHAR_TYPE)
9211	    return GEN_INT (char_type_class);
9212	  if (code == ENUMERAL_TYPE)
9213	    return GEN_INT (enumeral_type_class);
9214	  if (code == BOOLEAN_TYPE)
9215	    return GEN_INT (boolean_type_class);
9216	  if (code == POINTER_TYPE)
9217	    return GEN_INT (pointer_type_class);
9218	  if (code == REFERENCE_TYPE)
9219	    return GEN_INT (reference_type_class);
9220	  if (code == OFFSET_TYPE)
9221	    return GEN_INT (offset_type_class);
9222	  if (code == REAL_TYPE)
9223	    return GEN_INT (real_type_class);
9224	  if (code == COMPLEX_TYPE)
9225	    return GEN_INT (complex_type_class);
9226	  if (code == FUNCTION_TYPE)
9227	    return GEN_INT (function_type_class);
9228	  if (code == METHOD_TYPE)
9229	    return GEN_INT (method_type_class);
9230	  if (code == RECORD_TYPE)
9231	    return GEN_INT (record_type_class);
9232	  if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
9233	    return GEN_INT (union_type_class);
9234	  if (code == ARRAY_TYPE)
9235	    {
9236	      if (TYPE_STRING_FLAG (type))
9237		return GEN_INT (string_type_class);
9238	      else
9239		return GEN_INT (array_type_class);
9240	    }
9241	  if (code == SET_TYPE)
9242	    return GEN_INT (set_type_class);
9243	  if (code == FILE_TYPE)
9244	    return GEN_INT (file_type_class);
9245	  if (code == LANG_TYPE)
9246	    return GEN_INT (lang_type_class);
9247	}
9248      return GEN_INT (no_type_class);
9249
9250    case BUILT_IN_CONSTANT_P:
9251      if (arglist == 0)
9252	return const0_rtx;
9253      else
9254	{
9255	  tree arg = TREE_VALUE (arglist);
9256	  rtx tmp;
9257
9258	  /* We return 1 for a numeric type that's known to be a constant
9259	     value at compile-time or for an aggregate type that's a
9260	     literal constant.  */
9261	  STRIP_NOPS (arg);
9262
9263	  /* If we know this is a constant, emit the constant of one.  */
9264	  if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
9265	      || (TREE_CODE (arg) == CONSTRUCTOR
9266		  && TREE_CONSTANT (arg))
9267	      || (TREE_CODE (arg) == ADDR_EXPR
9268		  && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
9269	    return const1_rtx;
9270
9271	  /* If we aren't going to be running CSE or this expression
9272	     has side effects, show we don't know it to be a constant.
9273	     Likewise if it's a pointer or aggregate type since in those
9274	     case we only want literals, since those are only optimized
9275	     when generating RTL, not later.  */
9276	  if (TREE_SIDE_EFFECTS (arg) || cse_not_expected
9277	      || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9278	      || POINTER_TYPE_P (TREE_TYPE (arg)))
9279	    return const0_rtx;
9280
9281	  /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a
9282	     chance to see if it can deduce whether ARG is constant.  */
9283
9284	  tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0);
9285	  tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
9286	  return tmp;
9287	}
9288
9289    case BUILT_IN_FRAME_ADDRESS:
9290      /* The argument must be a nonnegative integer constant.
9291	 It counts the number of frames to scan up the stack.
9292	 The value is the address of that frame.  */
9293    case BUILT_IN_RETURN_ADDRESS:
9294      /* The argument must be a nonnegative integer constant.
9295	 It counts the number of frames to scan up the stack.
9296	 The value is the return address saved in that frame.  */
9297      if (arglist == 0)
9298	/* Warning about missing arg was already issued.  */
9299	return const0_rtx;
9300      else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
9301	       || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
9302	{
9303	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9304	    error ("invalid arg to `__builtin_frame_address'");
9305	  else
9306	    error ("invalid arg to `__builtin_return_address'");
9307	  return const0_rtx;
9308	}
9309      else
9310	{
9311	  rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
9312						TREE_INT_CST_LOW (TREE_VALUE (arglist)),
9313						hard_frame_pointer_rtx);
9314
9315	  /* Some ports cannot access arbitrary stack frames.  */
9316	  if (tem == NULL)
9317	    {
9318	      if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9319		warning ("unsupported arg to `__builtin_frame_address'");
9320	      else
9321		warning ("unsupported arg to `__builtin_return_address'");
9322	      return const0_rtx;
9323	    }
9324
9325	  /* For __builtin_frame_address, return what we've got.  */
9326	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9327	    return tem;
9328
9329	  if (GET_CODE (tem) != REG
9330	      && ! CONSTANT_P (tem))
9331	    tem = copy_to_mode_reg (Pmode, tem);
9332	  return tem;
9333	}
9334
9335    /* Returns the address of the area where the structure is returned.
9336       0 otherwise.  */
9337    case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9338      if (arglist != 0
9339          || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9340          || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
9341        return const0_rtx;
9342      else
9343        return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9344
9345    case BUILT_IN_ALLOCA:
9346      if (arglist == 0
9347	  /* Arg could be non-integer if user redeclared this fcn wrong.  */
9348	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9349	break;
9350
9351      /* Compute the argument.  */
9352      op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
9353
9354      /* Allocate the desired space.  */
9355      return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
9356
9357    case BUILT_IN_FFS:
9358      /* If not optimizing, call the library function.  */
9359      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9360	break;
9361
9362      if (arglist == 0
9363	  /* Arg could be non-integer if user redeclared this fcn wrong.  */
9364	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9365	break;
9366
9367      /* Compute the argument.  */
9368      op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
9369      /* Compute ffs, into TARGET if possible.
9370	 Set TARGET to wherever the result comes back.  */
9371      target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
9372			    ffs_optab, op0, target, 1);
9373      if (target == 0)
9374	abort ();
9375      return target;
9376
9377    case BUILT_IN_STRLEN:
9378      /* If not optimizing, call the library function.  */
9379      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9380	break;
9381
9382      if (arglist == 0
9383	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9384	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9385	break;
9386      else
9387	{
9388	  tree src = TREE_VALUE (arglist);
9389	  tree len = c_strlen (src);
9390
9391	  int align
9392	    = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9393
9394	  rtx result, src_rtx, char_rtx;
9395	  enum machine_mode insn_mode = value_mode, char_mode;
9396	  enum insn_code icode;
9397
9398	  /* If the length is known, just return it.  */
9399	  if (len != 0)
9400	    return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
9401
9402	  /* If SRC is not a pointer type, don't do this operation inline.  */
9403	  if (align == 0)
9404	    break;
9405
9406	  /* Call a function if we can't compute strlen in the right mode.  */
9407
9408	  while (insn_mode != VOIDmode)
9409	    {
9410	      icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9411	      if (icode != CODE_FOR_nothing)
9412		break;
9413
9414	      insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9415	    }
9416	  if (insn_mode == VOIDmode)
9417	    break;
9418
9419	  /* Make a place to write the result of the instruction.  */
9420	  result = target;
9421	  if (! (result != 0
9422		 && GET_CODE (result) == REG
9423		 && GET_MODE (result) == insn_mode
9424		 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9425	    result = gen_reg_rtx (insn_mode);
9426
9427	  /* Make sure the operands are acceptable to the predicates.  */
9428
9429	  if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9430	    result = gen_reg_rtx (insn_mode);
9431	  src_rtx = memory_address (BLKmode,
9432				    expand_expr (src, NULL_RTX, ptr_mode,
9433						 EXPAND_NORMAL));
9434
9435	  if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9436	    src_rtx = copy_to_mode_reg (Pmode, src_rtx);
9437
9438	  /* Check the string is readable and has an end.  */
9439	  if (current_function_check_memory_usage)
9440	    emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9441			       src_rtx, Pmode,
9442			       GEN_INT (MEMORY_USE_RO),
9443			       TYPE_MODE (integer_type_node));
9444
9445	  char_rtx = const0_rtx;
9446	  char_mode = insn_operand_mode[(int)icode][2];
9447	  if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9448	    char_rtx = copy_to_mode_reg (char_mode, char_rtx);
9449
9450	  emit_insn (GEN_FCN (icode) (result,
9451				      gen_rtx_MEM (BLKmode, src_rtx),
9452				      char_rtx, GEN_INT (align)));
9453
9454	  /* Return the value in the proper mode for this function.  */
9455	  if (GET_MODE (result) == value_mode)
9456	    return result;
9457	  else if (target != 0)
9458	    {
9459	      convert_move (target, result, 0);
9460	      return target;
9461	    }
9462	  else
9463	    return convert_to_mode (value_mode, result, 0);
9464	}
9465
9466    case BUILT_IN_STRCPY:
9467      /* If not optimizing, call the library function.  */
9468      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9469	break;
9470
9471      if (arglist == 0
9472	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9473	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9474	  || TREE_CHAIN (arglist) == 0
9475	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9476	break;
9477      else
9478	{
9479	  tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
9480
9481	  if (len == 0)
9482	    break;
9483
9484	  len = size_binop (PLUS_EXPR, len, integer_one_node);
9485
9486	  chainon (arglist, build_tree_list (NULL_TREE, len));
9487	}
9488
9489      /* Drops in.  */
9490    case BUILT_IN_MEMCPY:
9491      /* If not optimizing, call the library function.  */
9492      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9493	break;
9494
9495      if (arglist == 0
9496	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9497	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9498	  || TREE_CHAIN (arglist) == 0
9499	  || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9500	      != POINTER_TYPE)
9501	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9502	  || (TREE_CODE (TREE_TYPE (TREE_VALUE
9503				    (TREE_CHAIN (TREE_CHAIN (arglist)))))
9504	      != INTEGER_TYPE))
9505	break;
9506      else
9507	{
9508	  tree dest = TREE_VALUE (arglist);
9509	  tree src = TREE_VALUE (TREE_CHAIN (arglist));
9510	  tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9511
9512	  int src_align
9513	    = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9514	  int dest_align
9515	    = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9516	  rtx dest_mem, src_mem, dest_addr, len_rtx;
9517
9518	  /* If either SRC or DEST is not a pointer type, don't do
9519	     this operation in-line.  */
9520	  if (src_align == 0 || dest_align == 0)
9521	    {
9522	      if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9523		TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9524	      break;
9525	    }
9526
9527	  dest_mem = get_memory_rtx (dest);
9528	  src_mem = get_memory_rtx (src);
9529	  len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9530
9531	  /* Just copy the rights of SRC to the rights of DEST.  */
9532	  if (current_function_check_memory_usage)
9533	    emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
9534			       XEXP (dest_mem, 0), Pmode,
9535			       XEXP (src_mem, 0), Pmode,
9536			       len_rtx, TYPE_MODE (sizetype));
9537
9538	  /* Copy word part most expediently.  */
9539	  dest_addr
9540	    = emit_block_move (dest_mem, src_mem, len_rtx,
9541			       MIN (src_align, dest_align));
9542
9543	  if (dest_addr == 0)
9544	    dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9545
9546	  return dest_addr;
9547	}
9548
9549    case BUILT_IN_MEMSET:
9550      /* If not optimizing, call the library function.  */
9551      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9552	break;
9553
9554      if (arglist == 0
9555	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9556	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9557	  || TREE_CHAIN (arglist) == 0
9558	  || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9559	      != INTEGER_TYPE)
9560	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9561	  || (INTEGER_TYPE
9562	      != (TREE_CODE (TREE_TYPE
9563			     (TREE_VALUE
9564			      (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9565	break;
9566      else
9567	{
9568	  tree dest = TREE_VALUE (arglist);
9569	  tree val = TREE_VALUE (TREE_CHAIN (arglist));
9570	  tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9571
9572	  int dest_align
9573	    = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9574	  rtx dest_mem, dest_addr, len_rtx;
9575
9576	  /* If DEST is not a pointer type, don't do this
9577	     operation in-line.  */
9578	  if (dest_align == 0)
9579	    break;
9580
9581	  /* If the arguments have side-effects, then we can only evaluate
9582	     them at most once.  The following code evaluates them twice if
9583	     they are not constants because we break out to expand_call
9584	     in that case.  They can't be constants if they have side-effects
9585	     so we can check for that first.  Alternatively, we could call
9586	     save_expr to make multiple evaluation safe.  */
9587	  if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9588	    break;
9589
9590	  /* If VAL is not 0, don't do this operation in-line. */
9591	  if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9592	    break;
9593
9594	  /* If LEN does not expand to a constant, don't do this
9595	     operation in-line.  */
9596	  len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9597	  if (GET_CODE (len_rtx) != CONST_INT)
9598	    break;
9599
9600	  dest_mem = get_memory_rtx (dest);
9601
9602	  /* Just check DST is writable and mark it as readable.  */
9603	  if (current_function_check_memory_usage)
9604	    emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9605			       XEXP (dest_mem, 0), Pmode,
9606			       len_rtx, TYPE_MODE (sizetype),
9607			       GEN_INT (MEMORY_USE_WO),
9608			       TYPE_MODE (integer_type_node));
9609
9610
9611	  dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9612
9613	  if (dest_addr == 0)
9614	    dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9615
9616	  return dest_addr;
9617	}
9618
9619/* These comparison functions need an instruction that returns an actual
9620   index.  An ordinary compare that just sets the condition codes
9621   is not enough.  */
9622#ifdef HAVE_cmpstrsi
9623    case BUILT_IN_STRCMP:
9624      /* If not optimizing, call the library function.  */
9625      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9626	break;
9627
9628      /* If we need to check memory accesses, call the library function.  */
9629      if (current_function_check_memory_usage)
9630	break;
9631
9632      if (arglist == 0
9633	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9634	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9635	  || TREE_CHAIN (arglist) == 0
9636	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9637	break;
9638      else if (!HAVE_cmpstrsi)
9639	break;
9640      {
9641	tree arg1 = TREE_VALUE (arglist);
9642	tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9643	tree len, len2;
9644
9645	len = c_strlen (arg1);
9646	if (len)
9647	  len = size_binop (PLUS_EXPR, integer_one_node, len);
9648	len2 = c_strlen (arg2);
9649	if (len2)
9650	  len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9651
9652	/* If we don't have a constant length for the first, use the length
9653	   of the second, if we know it.  We don't require a constant for
9654	   this case; some cost analysis could be done if both are available
9655	   but neither is constant.  For now, assume they're equally cheap.
9656
9657	   If both strings have constant lengths, use the smaller.  This
9658	   could arise if optimization results in strcpy being called with
9659	   two fixed strings, or if the code was machine-generated.  We should
9660	   add some code to the `memcmp' handler below to deal with such
9661	   situations, someday.  */
9662	if (!len || TREE_CODE (len) != INTEGER_CST)
9663	  {
9664	    if (len2)
9665	      len = len2;
9666	    else if (len == 0)
9667	      break;
9668	  }
9669	else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9670	  {
9671	    if (tree_int_cst_lt (len2, len))
9672	      len = len2;
9673	  }
9674
9675	chainon (arglist, build_tree_list (NULL_TREE, len));
9676      }
9677
9678      /* Drops in.  */
9679    case BUILT_IN_MEMCMP:
9680      /* If not optimizing, call the library function.  */
9681      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9682	break;
9683
9684      /* If we need to check memory accesses, call the library function.  */
9685      if (current_function_check_memory_usage)
9686	break;
9687
9688      if (arglist == 0
9689	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9690	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9691	  || TREE_CHAIN (arglist) == 0
9692	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9693	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9694	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9695	break;
9696      else if (!HAVE_cmpstrsi)
9697	break;
9698      {
9699	tree arg1 = TREE_VALUE (arglist);
9700	tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9701	tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9702	rtx result;
9703
9704	int arg1_align
9705	  = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9706	int arg2_align
9707	  = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9708	enum machine_mode insn_mode
9709	  = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9710
9711	/* If we don't have POINTER_TYPE, call the function.  */
9712	if (arg1_align == 0 || arg2_align == 0)
9713	  {
9714	    if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9715	      TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9716	    break;
9717	  }
9718
9719	/* Make a place to write the result of the instruction.  */
9720	result = target;
9721	if (! (result != 0
9722	       && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9723	       && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9724	  result = gen_reg_rtx (insn_mode);
9725
9726	emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9727				 get_memory_rtx (arg2),
9728				 expand_expr (len, NULL_RTX, VOIDmode, 0),
9729				 GEN_INT (MIN (arg1_align, arg2_align))));
9730
9731	/* Return the value in the proper mode for this function.  */
9732	mode = TYPE_MODE (TREE_TYPE (exp));
9733	if (GET_MODE (result) == mode)
9734	  return result;
9735	else if (target != 0)
9736	  {
9737	    convert_move (target, result, 0);
9738	    return target;
9739	  }
9740	else
9741	  return convert_to_mode (mode, result, 0);
9742      }
9743#else
9744    case BUILT_IN_STRCMP:
9745    case BUILT_IN_MEMCMP:
9746      break;
9747#endif
9748
9749    case BUILT_IN_SETJMP:
9750      target = expand_builtin_setjmp (arglist, target);
9751      if (target)
9752	return target;
9753      break;
9754
9755      /* __builtin_longjmp is passed a pointer to an array of five words.
9756	 It's similar to the C library longjmp function but works with
9757	 __builtin_setjmp above.  */
9758    case BUILT_IN_LONGJMP:
9759      if (arglist == 0 || TREE_CHAIN (arglist) == 0
9760	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9761	break;
9762      else
9763	{
9764	  rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9765				      VOIDmode, 0);
9766	  rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9767				   NULL_RTX, VOIDmode, 0);
9768
9769	  if (value != const1_rtx)
9770	    {
9771	      error ("__builtin_longjmp second argument must be 1");
9772	      return const0_rtx;
9773	    }
9774
9775	  expand_builtin_longjmp (buf_addr, value);
9776	  return const0_rtx;
9777	}
9778
9779    case BUILT_IN_TRAP:
9780#ifdef HAVE_trap
9781      if (HAVE_trap)
9782	emit_insn (gen_trap ());
9783      else
9784#endif
9785	error ("__builtin_trap not supported by this target");
9786      emit_barrier ();
9787      return const0_rtx;
9788
9789      /* Various hooks for the DWARF 2 __throw routine.  */
9790    case BUILT_IN_UNWIND_INIT:
9791      expand_builtin_unwind_init ();
9792      return const0_rtx;
9793    case BUILT_IN_DWARF_CFA:
9794      return virtual_cfa_rtx;
9795#ifdef DWARF2_UNWIND_INFO
9796    case BUILT_IN_DWARF_FP_REGNUM:
9797      return expand_builtin_dwarf_fp_regnum ();
9798    case BUILT_IN_DWARF_REG_SIZE:
9799      return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9800#endif
9801    case BUILT_IN_FROB_RETURN_ADDR:
9802      return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9803    case BUILT_IN_EXTRACT_RETURN_ADDR:
9804      return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9805    case BUILT_IN_EH_RETURN:
9806      expand_builtin_eh_return (TREE_VALUE (arglist),
9807				TREE_VALUE (TREE_CHAIN (arglist)),
9808				TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
9809      return const0_rtx;
9810
9811    default:			/* just do library call, if unknown builtin */
9812      error ("built-in function `%s' not currently supported",
9813	     IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9814    }
9815
9816  /* The switch statement above can drop through to cause the function
9817     to be called normally.  */
9818
9819  return expand_call (exp, target, ignore);
9820}
9821
9822/* Built-in functions to perform an untyped call and return.  */
9823
9824/* For each register that may be used for calling a function, this
9825   gives a mode used to copy the register's value.  VOIDmode indicates
9826   the register is not used for calling a function.  If the machine
9827   has register windows, this gives only the outbound registers.
9828   INCOMING_REGNO gives the corresponding inbound register.  */
9829static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9830
9831/* For each register that may be used for returning values, this gives
9832   a mode used to copy the register's value.  VOIDmode indicates the
9833   register is not used for returning values.  If the machine has
9834   register windows, this gives only the outbound registers.
9835   INCOMING_REGNO gives the corresponding inbound register.  */
9836static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9837
9838/* For each register that may be used for calling a function, this
9839   gives the offset of that register into the block returned by
9840   __builtin_apply_args.  0 indicates that the register is not
9841   used for calling a function.  */
9842static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9843
9844/* Return the offset of register REGNO into the block returned by
9845   __builtin_apply_args.  This is not declared static, since it is
9846   needed in objc-act.c.  */
9847
9848int
9849apply_args_register_offset (regno)
9850     int regno;
9851{
9852  apply_args_size ();
9853
9854  /* Arguments are always put in outgoing registers (in the argument
9855     block) if such make sense.  */
9856#ifdef OUTGOING_REGNO
9857  regno = OUTGOING_REGNO(regno);
9858#endif
9859  return apply_args_reg_offset[regno];
9860}
9861
9862/* Return the size required for the block returned by __builtin_apply_args,
9863   and initialize apply_args_mode.  */
9864
9865static int
9866apply_args_size ()
9867{
9868  static int size = -1;
9869  int align, regno;
9870  enum machine_mode mode;
9871
9872  /* The values computed by this function never change.  */
9873  if (size < 0)
9874    {
9875      /* The first value is the incoming arg-pointer.  */
9876      size = GET_MODE_SIZE (Pmode);
9877
9878      /* The second value is the structure value address unless this is
9879	 passed as an "invisible" first argument.  */
9880      if (struct_value_rtx)
9881	size += GET_MODE_SIZE (Pmode);
9882
9883      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9884	if (FUNCTION_ARG_REGNO_P (regno))
9885	  {
9886	    /* Search for the proper mode for copying this register's
9887	       value.  I'm not sure this is right, but it works so far.  */
9888	    enum machine_mode best_mode = VOIDmode;
9889
9890	    for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9891		 mode != VOIDmode;
9892		 mode = GET_MODE_WIDER_MODE (mode))
9893	      if (HARD_REGNO_MODE_OK (regno, mode)
9894		  && HARD_REGNO_NREGS (regno, mode) == 1)
9895		best_mode = mode;
9896
9897	    if (best_mode == VOIDmode)
9898	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9899		   mode != VOIDmode;
9900		   mode = GET_MODE_WIDER_MODE (mode))
9901		if (HARD_REGNO_MODE_OK (regno, mode)
9902		    && (mov_optab->handlers[(int) mode].insn_code
9903			!= CODE_FOR_nothing))
9904		  best_mode = mode;
9905
9906	    mode = best_mode;
9907	    if (mode == VOIDmode)
9908	      abort ();
9909
9910	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9911	    if (size % align != 0)
9912	      size = CEIL (size, align) * align;
9913	    apply_args_reg_offset[regno] = size;
9914	    size += GET_MODE_SIZE (mode);
9915	    apply_args_mode[regno] = mode;
9916	  }
9917	else
9918	  {
9919	    apply_args_mode[regno] = VOIDmode;
9920	    apply_args_reg_offset[regno] = 0;
9921	  }
9922    }
9923  return size;
9924}
9925
9926/* Return the size required for the block returned by __builtin_apply,
9927   and initialize apply_result_mode.  */
9928
9929static int
9930apply_result_size ()
9931{
9932  static int size = -1;
9933  int align, regno;
9934  enum machine_mode mode;
9935
9936  /* The values computed by this function never change.  */
9937  if (size < 0)
9938    {
9939      size = 0;
9940
9941      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9942	if (FUNCTION_VALUE_REGNO_P (regno))
9943	  {
9944	    /* Search for the proper mode for copying this register's
9945	       value.  I'm not sure this is right, but it works so far.  */
9946	    enum machine_mode best_mode = VOIDmode;
9947
9948	    for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9949		 mode != TImode;
9950		 mode = GET_MODE_WIDER_MODE (mode))
9951	      if (HARD_REGNO_MODE_OK (regno, mode))
9952		best_mode = mode;
9953
9954	    if (best_mode == VOIDmode)
9955	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9956		   mode != VOIDmode;
9957		   mode = GET_MODE_WIDER_MODE (mode))
9958		if (HARD_REGNO_MODE_OK (regno, mode)
9959		    && (mov_optab->handlers[(int) mode].insn_code
9960			!= CODE_FOR_nothing))
9961		  best_mode = mode;
9962
9963	    mode = best_mode;
9964	    if (mode == VOIDmode)
9965	      abort ();
9966
9967	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9968	    if (size % align != 0)
9969	      size = CEIL (size, align) * align;
9970	    size += GET_MODE_SIZE (mode);
9971	    apply_result_mode[regno] = mode;
9972	  }
9973	else
9974	  apply_result_mode[regno] = VOIDmode;
9975
9976      /* Allow targets that use untyped_call and untyped_return to override
9977	 the size so that machine-specific information can be stored here.  */
9978#ifdef APPLY_RESULT_SIZE
9979      size = APPLY_RESULT_SIZE;
9980#endif
9981    }
9982  return size;
9983}
9984
9985#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9986/* Create a vector describing the result block RESULT.  If SAVEP is true,
9987   the result block is used to save the values; otherwise it is used to
9988   restore the values.  */
9989
9990static rtx
9991result_vector (savep, result)
9992     int savep;
9993     rtx result;
9994{
9995  int regno, size, align, nelts;
9996  enum machine_mode mode;
9997  rtx reg, mem;
9998  rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9999
10000  size = nelts = 0;
10001  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10002    if ((mode = apply_result_mode[regno]) != VOIDmode)
10003      {
10004	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10005	if (size % align != 0)
10006	  size = CEIL (size, align) * align;
10007	reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
10008	mem = change_address (result, mode,
10009			      plus_constant (XEXP (result, 0), size));
10010	savevec[nelts++] = (savep
10011			    ? gen_rtx_SET (VOIDmode, mem, reg)
10012			    : gen_rtx_SET (VOIDmode, reg, mem));
10013	size += GET_MODE_SIZE (mode);
10014      }
10015  return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
10016}
10017#endif /* HAVE_untyped_call or HAVE_untyped_return */
10018
10019/* Save the state required to perform an untyped call with the same
10020   arguments as were passed to the current function.  */
10021
10022static rtx
10023expand_builtin_apply_args ()
10024{
10025  rtx registers;
10026  int size, align, regno;
10027  enum machine_mode mode;
10028
10029  /* Create a block where the arg-pointer, structure value address,
10030     and argument registers can be saved.  */
10031  registers = assign_stack_local (BLKmode, apply_args_size (), -1);
10032
10033  /* Walk past the arg-pointer and structure value address.  */
10034  size = GET_MODE_SIZE (Pmode);
10035  if (struct_value_rtx)
10036    size += GET_MODE_SIZE (Pmode);
10037
10038  /* Save each register used in calling a function to the block.  */
10039  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10040    if ((mode = apply_args_mode[regno]) != VOIDmode)
10041      {
10042	rtx tem;
10043
10044	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10045	if (size % align != 0)
10046	  size = CEIL (size, align) * align;
10047
10048	tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10049
10050#ifdef STACK_REGS
10051        /* For reg-stack.c's stack register household.
10052	   Compare with a similar piece of code in function.c.  */
10053
10054        emit_insn (gen_rtx_USE (mode, tem));
10055#endif
10056
10057	emit_move_insn (change_address (registers, mode,
10058					plus_constant (XEXP (registers, 0),
10059						       size)),
10060			tem);
10061	size += GET_MODE_SIZE (mode);
10062      }
10063
10064  /* Save the arg pointer to the block.  */
10065  emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
10066		  copy_to_reg (virtual_incoming_args_rtx));
10067  size = GET_MODE_SIZE (Pmode);
10068
10069  /* Save the structure value address unless this is passed as an
10070     "invisible" first argument.  */
10071  if (struct_value_incoming_rtx)
10072    {
10073      emit_move_insn (change_address (registers, Pmode,
10074				      plus_constant (XEXP (registers, 0),
10075						     size)),
10076		      copy_to_reg (struct_value_incoming_rtx));
10077      size += GET_MODE_SIZE (Pmode);
10078    }
10079
10080  /* Return the address of the block.  */
10081  return copy_addr_to_reg (XEXP (registers, 0));
10082}
10083
10084/* Perform an untyped call and save the state required to perform an
10085   untyped return of whatever value was returned by the given function.  */
10086
10087static rtx
10088expand_builtin_apply (function, arguments, argsize)
10089     rtx function, arguments, argsize;
10090{
10091  int size, align, regno;
10092  enum machine_mode mode;
10093  rtx incoming_args, result, reg, dest, call_insn;
10094  rtx old_stack_level = 0;
10095  rtx call_fusage = 0;
10096
10097  /* Create a block where the return registers can be saved.  */
10098  result = assign_stack_local (BLKmode, apply_result_size (), -1);
10099
10100  /* ??? The argsize value should be adjusted here.  */
10101
10102  /* Fetch the arg pointer from the ARGUMENTS block.  */
10103  incoming_args = gen_reg_rtx (Pmode);
10104  emit_move_insn (incoming_args,
10105		  gen_rtx_MEM (Pmode, arguments));
10106#ifndef STACK_GROWS_DOWNWARD
10107  incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
10108				incoming_args, 0, OPTAB_LIB_WIDEN);
10109#endif
10110
10111  /* Perform postincrements before actually calling the function.  */
10112  emit_queue ();
10113
10114  /* Push a new argument block and copy the arguments.  */
10115  do_pending_stack_adjust ();
10116
10117  /* Save the stack with nonlocal if available */
10118#ifdef HAVE_save_stack_nonlocal
10119  if (HAVE_save_stack_nonlocal)
10120    emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
10121  else
10122#endif
10123    emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
10124
10125  /* Push a block of memory onto the stack to store the memory arguments.
10126     Save the address in a register, and copy the memory arguments.  ??? I
10127     haven't figured out how the calling convention macros effect this,
10128     but it's likely that the source and/or destination addresses in
10129     the block copy will need updating in machine specific ways.  */
10130  dest = allocate_dynamic_stack_space (argsize, 0, 0);
10131  emit_block_move (gen_rtx_MEM (BLKmode, dest),
10132		   gen_rtx_MEM (BLKmode, incoming_args),
10133		   argsize,
10134		   PARM_BOUNDARY / BITS_PER_UNIT);
10135
10136  /* Refer to the argument block.  */
10137  apply_args_size ();
10138  arguments = gen_rtx_MEM (BLKmode, arguments);
10139
10140  /* Walk past the arg-pointer and structure value address.  */
10141  size = GET_MODE_SIZE (Pmode);
10142  if (struct_value_rtx)
10143    size += GET_MODE_SIZE (Pmode);
10144
10145  /* Restore each of the registers previously saved.  Make USE insns
10146     for each of these registers for use in making the call.  */
10147  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10148    if ((mode = apply_args_mode[regno]) != VOIDmode)
10149      {
10150	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10151	if (size % align != 0)
10152	  size = CEIL (size, align) * align;
10153	reg = gen_rtx_REG (mode, regno);
10154	emit_move_insn (reg,
10155			change_address (arguments, mode,
10156					plus_constant (XEXP (arguments, 0),
10157						       size)));
10158
10159	use_reg (&call_fusage, reg);
10160	size += GET_MODE_SIZE (mode);
10161      }
10162
10163  /* Restore the structure value address unless this is passed as an
10164     "invisible" first argument.  */
10165  size = GET_MODE_SIZE (Pmode);
10166  if (struct_value_rtx)
10167    {
10168      rtx value = gen_reg_rtx (Pmode);
10169      emit_move_insn (value,
10170		      change_address (arguments, Pmode,
10171				      plus_constant (XEXP (arguments, 0),
10172						     size)));
10173      emit_move_insn (struct_value_rtx, value);
10174      if (GET_CODE (struct_value_rtx) == REG)
10175	  use_reg (&call_fusage, struct_value_rtx);
10176      size += GET_MODE_SIZE (Pmode);
10177    }
10178
10179  /* All arguments and registers used for the call are set up by now!  */
10180  function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
10181
10182  /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
10183     and we don't want to load it into a register as an optimization,
10184     because prepare_call_address already did it if it should be done.  */
10185  if (GET_CODE (function) != SYMBOL_REF)
10186    function = memory_address (FUNCTION_MODE, function);
10187
10188  /* Generate the actual call instruction and save the return value.  */
10189#ifdef HAVE_untyped_call
10190  if (HAVE_untyped_call)
10191    emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
10192				      result, result_vector (1, result)));
10193  else
10194#endif
10195#ifdef HAVE_call_value
10196  if (HAVE_call_value)
10197    {
10198      rtx valreg = 0;
10199
10200      /* Locate the unique return register.  It is not possible to
10201	 express a call that sets more than one return register using
10202	 call_value; use untyped_call for that.  In fact, untyped_call
10203	 only needs to save the return registers in the given block.  */
10204      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10205	if ((mode = apply_result_mode[regno]) != VOIDmode)
10206	  {
10207	    if (valreg)
10208	      abort (); /* HAVE_untyped_call required.  */
10209	    valreg = gen_rtx_REG (mode, regno);
10210	  }
10211
10212      emit_call_insn (gen_call_value (valreg,
10213				      gen_rtx_MEM (FUNCTION_MODE, function),
10214				      const0_rtx, NULL_RTX, const0_rtx));
10215
10216      emit_move_insn (change_address (result, GET_MODE (valreg),
10217				      XEXP (result, 0)),
10218		      valreg);
10219    }
10220  else
10221#endif
10222    abort ();
10223
10224  /* Find the CALL insn we just emitted.  */
10225  for (call_insn = get_last_insn ();
10226       call_insn && GET_CODE (call_insn) != CALL_INSN;
10227       call_insn = PREV_INSN (call_insn))
10228    ;
10229
10230  if (! call_insn)
10231    abort ();
10232
10233  /* Put the register usage information on the CALL.  If there is already
10234     some usage information, put ours at the end.  */
10235  if (CALL_INSN_FUNCTION_USAGE (call_insn))
10236    {
10237      rtx link;
10238
10239      for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
10240	   link = XEXP (link, 1))
10241	;
10242
10243      XEXP (link, 1) = call_fusage;
10244    }
10245  else
10246    CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
10247
10248  /* Restore the stack.  */
10249#ifdef HAVE_save_stack_nonlocal
10250  if (HAVE_save_stack_nonlocal)
10251    emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
10252  else
10253#endif
10254    emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
10255
10256  /* Return the address of the result block.  */
10257  return copy_addr_to_reg (XEXP (result, 0));
10258}
10259
10260/* Perform an untyped return.  */
10261
10262static void
10263expand_builtin_return (result)
10264     rtx result;
10265{
10266  int size, align, regno;
10267  enum machine_mode mode;
10268  rtx reg;
10269  rtx call_fusage = 0;
10270
10271  apply_result_size ();
10272  result = gen_rtx_MEM (BLKmode, result);
10273
10274#ifdef HAVE_untyped_return
10275  if (HAVE_untyped_return)
10276    {
10277      emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
10278      emit_barrier ();
10279      return;
10280    }
10281#endif
10282
10283  /* Restore the return value and note that each value is used.  */
10284  size = 0;
10285  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10286    if ((mode = apply_result_mode[regno]) != VOIDmode)
10287      {
10288	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10289	if (size % align != 0)
10290	  size = CEIL (size, align) * align;
10291	reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10292	emit_move_insn (reg,
10293			change_address (result, mode,
10294					plus_constant (XEXP (result, 0),
10295						       size)));
10296
10297	push_to_sequence (call_fusage);
10298	emit_insn (gen_rtx_USE (VOIDmode, reg));
10299	call_fusage = get_insns ();
10300	end_sequence ();
10301	size += GET_MODE_SIZE (mode);
10302      }
10303
10304  /* Put the USE insns before the return.  */
10305  emit_insns (call_fusage);
10306
10307  /* Return whatever values was restored by jumping directly to the end
10308     of the function.  */
10309  expand_null_return ();
10310}
10311
10312/* Expand code for a post- or pre- increment or decrement
10313   and return the RTX for the result.
10314   POST is 1 for postinc/decrements and 0 for preinc/decrements.  */
10315
10316static rtx
10317expand_increment (exp, post, ignore)
10318     register tree exp;
10319     int post, ignore;
10320{
10321  register rtx op0, op1;
10322  register rtx temp, value;
10323  register tree incremented = TREE_OPERAND (exp, 0);
10324  optab this_optab = add_optab;
10325  int icode;
10326  enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
10327  int op0_is_copy = 0;
10328  int single_insn = 0;
10329  /* 1 means we can't store into OP0 directly,
10330     because it is a subreg narrower than a word,
10331     and we don't dare clobber the rest of the word.  */
10332  int bad_subreg = 0;
10333
10334  /* Stabilize any component ref that might need to be
10335     evaluated more than once below.  */
10336  if (!post
10337      || TREE_CODE (incremented) == BIT_FIELD_REF
10338      || (TREE_CODE (incremented) == COMPONENT_REF
10339	  && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
10340	      || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
10341    incremented = stabilize_reference (incremented);
10342  /* Nested *INCREMENT_EXPRs can happen in C++.  We must force innermost
10343     ones into save exprs so that they don't accidentally get evaluated
10344     more than once by the code below.  */
10345  if (TREE_CODE (incremented) == PREINCREMENT_EXPR
10346      || TREE_CODE (incremented) == PREDECREMENT_EXPR)
10347    incremented = save_expr (incremented);
10348
10349  /* Compute the operands as RTX.
10350     Note whether OP0 is the actual lvalue or a copy of it:
10351     I believe it is a copy iff it is a register or subreg
10352     and insns were generated in computing it.   */
10353
10354  temp = get_last_insn ();
10355  op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
10356
10357  /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10358     in place but instead must do sign- or zero-extension during assignment,
10359     so we copy it into a new register and let the code below use it as
10360     a copy.
10361
10362     Note that we can safely modify this SUBREG since it is know not to be
10363     shared (it was made by the expand_expr call above).  */
10364
10365  if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
10366    {
10367      if (post)
10368	SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
10369      else
10370	bad_subreg = 1;
10371    }
10372  else if (GET_CODE (op0) == SUBREG
10373	   && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10374    {
10375      /* We cannot increment this SUBREG in place.  If we are
10376	 post-incrementing, get a copy of the old value.  Otherwise,
10377	 just mark that we cannot increment in place.  */
10378      if (post)
10379	op0 = copy_to_reg (op0);
10380      else
10381	bad_subreg = 1;
10382    }
10383
10384  op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10385		 && temp != get_last_insn ());
10386  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10387		     EXPAND_MEMORY_USE_BAD);
10388
10389  /* Decide whether incrementing or decrementing.  */
10390  if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10391      || TREE_CODE (exp) == PREDECREMENT_EXPR)
10392    this_optab = sub_optab;
10393
10394  /* Convert decrement by a constant into a negative increment.  */
10395  if (this_optab == sub_optab
10396      && GET_CODE (op1) == CONST_INT)
10397    {
10398      op1 = GEN_INT (- INTVAL (op1));
10399      this_optab = add_optab;
10400    }
10401
10402  /* For a preincrement, see if we can do this with a single instruction.  */
10403  if (!post)
10404    {
10405      icode = (int) this_optab->handlers[(int) mode].insn_code;
10406      if (icode != (int) CODE_FOR_nothing
10407	  /* Make sure that OP0 is valid for operands 0 and 1
10408	     of the insn we want to queue.  */
10409	  && (*insn_operand_predicate[icode][0]) (op0, mode)
10410	  && (*insn_operand_predicate[icode][1]) (op0, mode)
10411	  && (*insn_operand_predicate[icode][2]) (op1, mode))
10412	single_insn = 1;
10413    }
10414
10415  /* If OP0 is not the actual lvalue, but rather a copy in a register,
10416     then we cannot just increment OP0.  We must therefore contrive to
10417     increment the original value.  Then, for postincrement, we can return
10418     OP0 since it is a copy of the old value.  For preincrement, expand here
10419     unless we can do it with a single insn.
10420
10421     Likewise if storing directly into OP0 would clobber high bits
10422     we need to preserve (bad_subreg).  */
10423  if (op0_is_copy || (!post && !single_insn) || bad_subreg)
10424    {
10425      /* This is the easiest way to increment the value wherever it is.
10426	 Problems with multiple evaluation of INCREMENTED are prevented
10427	 because either (1) it is a component_ref or preincrement,
10428	 in which case it was stabilized above, or (2) it is an array_ref
10429	 with constant index in an array in a register, which is
10430	 safe to reevaluate.  */
10431      tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10432			     || TREE_CODE (exp) == PREDECREMENT_EXPR)
10433			    ? MINUS_EXPR : PLUS_EXPR),
10434			   TREE_TYPE (exp),
10435			   incremented,
10436			   TREE_OPERAND (exp, 1));
10437
10438      while (TREE_CODE (incremented) == NOP_EXPR
10439	     || TREE_CODE (incremented) == CONVERT_EXPR)
10440	{
10441	  newexp = convert (TREE_TYPE (incremented), newexp);
10442	  incremented = TREE_OPERAND (incremented, 0);
10443	}
10444
10445      temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10446      return post ? op0 : temp;
10447    }
10448
10449  if (post)
10450    {
10451      /* We have a true reference to the value in OP0.
10452	 If there is an insn to add or subtract in this mode, queue it.
10453	 Queueing the increment insn avoids the register shuffling
10454	 that often results if we must increment now and first save
10455	 the old value for subsequent use.  */
10456
10457#if 0  /* Turned off to avoid making extra insn for indexed memref.  */
10458      op0 = stabilize (op0);
10459#endif
10460
10461      icode = (int) this_optab->handlers[(int) mode].insn_code;
10462      if (icode != (int) CODE_FOR_nothing
10463	  /* Make sure that OP0 is valid for operands 0 and 1
10464	     of the insn we want to queue.  */
10465	  && (*insn_operand_predicate[icode][0]) (op0, mode)
10466	  && (*insn_operand_predicate[icode][1]) (op0, mode))
10467	{
10468	  if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10469	    op1 = force_reg (mode, op1);
10470
10471	  return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10472	}
10473      if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10474	{
10475	  rtx addr = (general_operand (XEXP (op0, 0), mode)
10476		      ? force_reg (Pmode, XEXP (op0, 0))
10477		      : copy_to_reg (XEXP (op0, 0)));
10478	  rtx temp, result;
10479
10480	  op0 = change_address (op0, VOIDmode, addr);
10481	  temp = force_reg (GET_MODE (op0), op0);
10482	  if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10483	    op1 = force_reg (mode, op1);
10484
10485	  /* The increment queue is LIFO, thus we have to `queue'
10486	     the instructions in reverse order.  */
10487	  enqueue_insn (op0, gen_move_insn (op0, temp));
10488	  result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10489	  return result;
10490	}
10491    }
10492
10493  /* Preincrement, or we can't increment with one simple insn.  */
10494  if (post)
10495    /* Save a copy of the value before inc or dec, to return it later.  */
10496    temp = value = copy_to_reg (op0);
10497  else
10498    /* Arrange to return the incremented value.  */
10499    /* Copy the rtx because expand_binop will protect from the queue,
10500       and the results of that would be invalid for us to return
10501       if our caller does emit_queue before using our result.  */
10502    temp = copy_rtx (value = op0);
10503
10504  /* Increment however we can.  */
10505  op1 = expand_binop (mode, this_optab, value, op1,
10506  		      current_function_check_memory_usage ? NULL_RTX : op0,
10507		      TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10508  /* Make sure the value is stored into OP0.  */
10509  if (op1 != op0)
10510    emit_move_insn (op0, op1);
10511
10512  return temp;
10513}
10514
10515/* Expand all function calls contained within EXP, innermost ones first.
10516   But don't look within expressions that have sequence points.
10517   For each CALL_EXPR, record the rtx for its value
10518   in the CALL_EXPR_RTL field.  */
10519
10520static void
10521preexpand_calls (exp)
10522     tree exp;
10523{
10524  register int nops, i;
10525  int type = TREE_CODE_CLASS (TREE_CODE (exp));
10526
10527  if (! do_preexpand_calls)
10528    return;
10529
10530  /* Only expressions and references can contain calls.  */
10531
10532  if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10533    return;
10534
10535  switch (TREE_CODE (exp))
10536    {
10537    case CALL_EXPR:
10538      /* Do nothing if already expanded.  */
10539      if (CALL_EXPR_RTL (exp) != 0
10540	  /* Do nothing if the call returns a variable-sized object.  */
10541	  || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10542	  /* Do nothing to built-in functions.  */
10543	  || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10544	      && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10545		  == FUNCTION_DECL)
10546	      && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10547	return;
10548
10549      CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10550      return;
10551
10552    case COMPOUND_EXPR:
10553    case COND_EXPR:
10554    case TRUTH_ANDIF_EXPR:
10555    case TRUTH_ORIF_EXPR:
10556      /* If we find one of these, then we can be sure
10557	 the adjust will be done for it (since it makes jumps).
10558	 Do it now, so that if this is inside an argument
10559	 of a function, we don't get the stack adjustment
10560	 after some other args have already been pushed.  */
10561      do_pending_stack_adjust ();
10562      return;
10563
10564    case BLOCK:
10565    case RTL_EXPR:
10566    case WITH_CLEANUP_EXPR:
10567    case CLEANUP_POINT_EXPR:
10568    case TRY_CATCH_EXPR:
10569      return;
10570
10571    case SAVE_EXPR:
10572      if (SAVE_EXPR_RTL (exp) != 0)
10573	return;
10574
10575    default:
10576      break;
10577    }
10578
10579  nops = tree_code_length[(int) TREE_CODE (exp)];
10580  for (i = 0; i < nops; i++)
10581    if (TREE_OPERAND (exp, i) != 0)
10582      {
10583	type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10584	if (type == 'e' || type == '<' || type == '1' || type == '2'
10585	    || type == 'r')
10586	  preexpand_calls (TREE_OPERAND (exp, i));
10587      }
10588}
10589
10590/* At the start of a function, record that we have no previously-pushed
10591   arguments waiting to be popped.  */
10592
10593void
10594init_pending_stack_adjust ()
10595{
10596  pending_stack_adjust = 0;
10597}
10598
10599/* When exiting from function, if safe, clear out any pending stack adjust
10600   so the adjustment won't get done.
10601
10602   Note, if the current function calls alloca, then it must have a
10603   frame pointer regardless of the value of flag_omit_frame_pointer.  */
10604
10605void
10606clear_pending_stack_adjust ()
10607{
10608#ifdef EXIT_IGNORE_STACK
10609  if (optimize > 0
10610      && (! flag_omit_frame_pointer || current_function_calls_alloca)
10611      && EXIT_IGNORE_STACK
10612      && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10613      && ! flag_inline_functions)
10614    pending_stack_adjust = 0;
10615#endif
10616}
10617
10618/* Pop any previously-pushed arguments that have not been popped yet.  */
10619
10620void
10621do_pending_stack_adjust ()
10622{
10623  if (inhibit_defer_pop == 0)
10624    {
10625      if (pending_stack_adjust != 0)
10626	adjust_stack (GEN_INT (pending_stack_adjust));
10627      pending_stack_adjust = 0;
10628    }
10629}
10630
10631/* Expand conditional expressions.  */
10632
10633/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10634   LABEL is an rtx of code CODE_LABEL, in this function and all the
10635   functions here.  */
10636
10637void
10638jumpifnot (exp, label)
10639     tree exp;
10640     rtx label;
10641{
10642  do_jump (exp, label, NULL_RTX);
10643}
10644
10645/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero.  */
10646
10647void
10648jumpif (exp, label)
10649     tree exp;
10650     rtx label;
10651{
10652  do_jump (exp, NULL_RTX, label);
10653}
10654
10655/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10656   the result is zero, or IF_TRUE_LABEL if the result is one.
10657   Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10658   meaning fall through in that case.
10659
10660   do_jump always does any pending stack adjust except when it does not
10661   actually perform a jump.  An example where there is no jump
10662   is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10663
10664   This function is responsible for optimizing cases such as
10665   &&, || and comparison operators in EXP.  */
10666
10667void
10668do_jump (exp, if_false_label, if_true_label)
10669     tree exp;
10670     rtx if_false_label, if_true_label;
10671{
10672  register enum tree_code code = TREE_CODE (exp);
10673  /* Some cases need to create a label to jump to
10674     in order to properly fall through.
10675     These cases set DROP_THROUGH_LABEL nonzero.  */
10676  rtx drop_through_label = 0;
10677  rtx temp;
10678  rtx comparison = 0;
10679  int i;
10680  tree type;
10681  enum machine_mode mode;
10682
10683#ifdef MAX_INTEGER_COMPUTATION_MODE
10684  check_max_integer_computation_mode (exp);
10685#endif
10686
10687  emit_queue ();
10688
10689  switch (code)
10690    {
10691    case ERROR_MARK:
10692      break;
10693
10694    case INTEGER_CST:
10695      temp = integer_zerop (exp) ? if_false_label : if_true_label;
10696      if (temp)
10697	emit_jump (temp);
10698      break;
10699
10700#if 0
10701      /* This is not true with #pragma weak  */
10702    case ADDR_EXPR:
10703      /* The address of something can never be zero.  */
10704      if (if_true_label)
10705	emit_jump (if_true_label);
10706      break;
10707#endif
10708
10709    case NOP_EXPR:
10710      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10711	  || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10712	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10713	goto normal;
10714    case CONVERT_EXPR:
10715      /* If we are narrowing the operand, we have to do the compare in the
10716	 narrower mode.  */
10717      if ((TYPE_PRECISION (TREE_TYPE (exp))
10718	   < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10719	goto normal;
10720    case NON_LVALUE_EXPR:
10721    case REFERENCE_EXPR:
10722    case ABS_EXPR:
10723    case NEGATE_EXPR:
10724    case LROTATE_EXPR:
10725    case RROTATE_EXPR:
10726      /* These cannot change zero->non-zero or vice versa.  */
10727      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10728      break;
10729
10730#if 0
10731      /* This is never less insns than evaluating the PLUS_EXPR followed by
10732	 a test and can be longer if the test is eliminated.  */
10733    case PLUS_EXPR:
10734      /* Reduce to minus.  */
10735      exp = build (MINUS_EXPR, TREE_TYPE (exp),
10736		   TREE_OPERAND (exp, 0),
10737		   fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10738				 TREE_OPERAND (exp, 1))));
10739      /* Process as MINUS.  */
10740#endif
10741
10742    case MINUS_EXPR:
10743      /* Non-zero iff operands of minus differ.  */
10744      comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10745				   TREE_OPERAND (exp, 0),
10746				   TREE_OPERAND (exp, 1)),
10747			    NE, NE);
10748      break;
10749
10750    case BIT_AND_EXPR:
10751      /* If we are AND'ing with a small constant, do this comparison in the
10752	 smallest type that fits.  If the machine doesn't have comparisons
10753	 that small, it will be converted back to the wider comparison.
10754	 This helps if we are testing the sign bit of a narrower object.
10755	 combine can't do this for us because it can't know whether a
10756	 ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */
10757
10758      if (! SLOW_BYTE_ACCESS
10759	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10760	  && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10761	  && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10762	  && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10763	  && (type = type_for_mode (mode, 1)) != 0
10764	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10765	  && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10766	      != CODE_FOR_nothing))
10767	{
10768	  do_jump (convert (type, exp), if_false_label, if_true_label);
10769	  break;
10770	}
10771      goto normal;
10772
10773    case TRUTH_NOT_EXPR:
10774      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10775      break;
10776
10777    case TRUTH_ANDIF_EXPR:
10778      if (if_false_label == 0)
10779	if_false_label = drop_through_label = gen_label_rtx ();
10780      do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10781      start_cleanup_deferral ();
10782      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10783      end_cleanup_deferral ();
10784      break;
10785
10786    case TRUTH_ORIF_EXPR:
10787      if (if_true_label == 0)
10788	if_true_label = drop_through_label = gen_label_rtx ();
10789      do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10790      start_cleanup_deferral ();
10791      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10792      end_cleanup_deferral ();
10793      break;
10794
10795    case COMPOUND_EXPR:
10796      push_temp_slots ();
10797      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10798      preserve_temp_slots (NULL_RTX);
10799      free_temp_slots ();
10800      pop_temp_slots ();
10801      emit_queue ();
10802      do_pending_stack_adjust ();
10803      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10804      break;
10805
10806    case COMPONENT_REF:
10807    case BIT_FIELD_REF:
10808    case ARRAY_REF:
10809      {
10810	int bitsize, bitpos, unsignedp;
10811	enum machine_mode mode;
10812	tree type;
10813	tree offset;
10814	int volatilep = 0;
10815	int alignment;
10816
10817	/* Get description of this reference.  We don't actually care
10818	   about the underlying object here.  */
10819	get_inner_reference (exp, &bitsize, &bitpos, &offset,
10820			     &mode, &unsignedp, &volatilep,
10821			     &alignment);
10822
10823	type = type_for_size (bitsize, unsignedp);
10824	if (! SLOW_BYTE_ACCESS
10825	    && type != 0 && bitsize >= 0
10826	    && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10827	    && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10828		!= CODE_FOR_nothing))
10829	  {
10830	    do_jump (convert (type, exp), if_false_label, if_true_label);
10831	    break;
10832	  }
10833	goto normal;
10834      }
10835
10836    case COND_EXPR:
10837      /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases.  */
10838      if (integer_onep (TREE_OPERAND (exp, 1))
10839	  && integer_zerop (TREE_OPERAND (exp, 2)))
10840	do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10841
10842      else if (integer_zerop (TREE_OPERAND (exp, 1))
10843	       && integer_onep (TREE_OPERAND (exp, 2)))
10844	do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10845
10846      else
10847	{
10848	  register rtx label1 = gen_label_rtx ();
10849	  drop_through_label = gen_label_rtx ();
10850
10851	  do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10852
10853	  start_cleanup_deferral ();
10854	  /* Now the THEN-expression.  */
10855	  do_jump (TREE_OPERAND (exp, 1),
10856		   if_false_label ? if_false_label : drop_through_label,
10857		   if_true_label ? if_true_label : drop_through_label);
10858	  /* In case the do_jump just above never jumps.  */
10859	  do_pending_stack_adjust ();
10860	  emit_label (label1);
10861
10862	  /* Now the ELSE-expression.  */
10863	  do_jump (TREE_OPERAND (exp, 2),
10864		   if_false_label ? if_false_label : drop_through_label,
10865		   if_true_label ? if_true_label : drop_through_label);
10866	  end_cleanup_deferral ();
10867	}
10868      break;
10869
10870    case EQ_EXPR:
10871      {
10872	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10873
10874	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10875	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10876	  {
10877	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10878	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10879	    do_jump
10880	      (fold
10881	       (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10882		       fold (build (EQ_EXPR, TREE_TYPE (exp),
10883				    fold (build1 (REALPART_EXPR,
10884						  TREE_TYPE (inner_type),
10885						  exp0)),
10886				    fold (build1 (REALPART_EXPR,
10887						  TREE_TYPE (inner_type),
10888						  exp1)))),
10889		       fold (build (EQ_EXPR, TREE_TYPE (exp),
10890				    fold (build1 (IMAGPART_EXPR,
10891						  TREE_TYPE (inner_type),
10892						  exp0)),
10893				    fold (build1 (IMAGPART_EXPR,
10894						  TREE_TYPE (inner_type),
10895						  exp1)))))),
10896	       if_false_label, if_true_label);
10897	  }
10898
10899	else if (integer_zerop (TREE_OPERAND (exp, 1)))
10900	  do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10901
10902	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10903		 && !can_compare_p (TYPE_MODE (inner_type)))
10904	  do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10905	else
10906	  comparison = compare (exp, EQ, EQ);
10907	break;
10908      }
10909
10910    case NE_EXPR:
10911      {
10912	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10913
10914	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10915	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10916	  {
10917	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10918	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10919	    do_jump
10920	      (fold
10921	       (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10922		       fold (build (NE_EXPR, TREE_TYPE (exp),
10923				    fold (build1 (REALPART_EXPR,
10924						  TREE_TYPE (inner_type),
10925						  exp0)),
10926				    fold (build1 (REALPART_EXPR,
10927						  TREE_TYPE (inner_type),
10928						  exp1)))),
10929		       fold (build (NE_EXPR, TREE_TYPE (exp),
10930				    fold (build1 (IMAGPART_EXPR,
10931						  TREE_TYPE (inner_type),
10932						  exp0)),
10933				    fold (build1 (IMAGPART_EXPR,
10934						  TREE_TYPE (inner_type),
10935						  exp1)))))),
10936	       if_false_label, if_true_label);
10937	  }
10938
10939	else if (integer_zerop (TREE_OPERAND (exp, 1)))
10940	  do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10941
10942	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10943		 && !can_compare_p (TYPE_MODE (inner_type)))
10944	  do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10945	else
10946	  comparison = compare (exp, NE, NE);
10947	break;
10948      }
10949
10950    case LT_EXPR:
10951      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10952	   == MODE_INT)
10953	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10954	do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10955      else
10956	comparison = compare (exp, LT, LTU);
10957      break;
10958
10959    case LE_EXPR:
10960      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10961	   == MODE_INT)
10962	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10963	do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10964      else
10965	comparison = compare (exp, LE, LEU);
10966      break;
10967
10968    case GT_EXPR:
10969      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10970	   == MODE_INT)
10971	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10972	do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10973      else
10974	comparison = compare (exp, GT, GTU);
10975      break;
10976
10977    case GE_EXPR:
10978      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10979	   == MODE_INT)
10980	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10981	do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10982      else
10983	comparison = compare (exp, GE, GEU);
10984      break;
10985
10986    default:
10987    normal:
10988      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10989#if 0
10990      /* This is not needed any more and causes poor code since it causes
10991	 comparisons and tests from non-SI objects to have different code
10992	 sequences.  */
10993      /* Copy to register to avoid generating bad insns by cse
10994	 from (set (mem ...) (arithop))  (set (cc0) (mem ...)).  */
10995      if (!cse_not_expected && GET_CODE (temp) == MEM)
10996	temp = copy_to_reg (temp);
10997#endif
10998      do_pending_stack_adjust ();
10999      if (GET_CODE (temp) == CONST_INT)
11000	comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
11001      else if (GET_CODE (temp) == LABEL_REF)
11002	comparison = const_true_rtx;
11003      else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
11004	       && !can_compare_p (GET_MODE (temp)))
11005	/* Note swapping the labels gives us not-equal.  */
11006	do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
11007      else if (GET_MODE (temp) != VOIDmode)
11008	comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
11009				       NE, TREE_UNSIGNED (TREE_TYPE (exp)),
11010				       GET_MODE (temp), NULL_RTX, 0);
11011      else
11012	abort ();
11013    }
11014
11015  /* Do any postincrements in the expression that was tested.  */
11016  emit_queue ();
11017
11018  /* If COMPARISON is nonzero here, it is an rtx that can be substituted
11019     straight into a conditional jump instruction as the jump condition.
11020     Otherwise, all the work has been done already.  */
11021
11022  if (comparison == const_true_rtx)
11023    {
11024      if (if_true_label)
11025	emit_jump (if_true_label);
11026    }
11027  else if (comparison == const0_rtx)
11028    {
11029      if (if_false_label)
11030	emit_jump (if_false_label);
11031    }
11032  else if (comparison)
11033    do_jump_for_compare (comparison, if_false_label, if_true_label);
11034
11035  if (drop_through_label)
11036    {
11037      /* If do_jump produces code that might be jumped around,
11038	 do any stack adjusts from that code, before the place
11039	 where control merges in.  */
11040      do_pending_stack_adjust ();
11041      emit_label (drop_through_label);
11042    }
11043}
11044
11045/* Given a comparison expression EXP for values too wide to be compared
11046   with one insn, test the comparison and jump to the appropriate label.
11047   The code of EXP is ignored; we always test GT if SWAP is 0,
11048   and LT if SWAP is 1.  */
11049
11050static void
11051do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
11052     tree exp;
11053     int swap;
11054     rtx if_false_label, if_true_label;
11055{
11056  rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
11057  rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
11058  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
11059  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11060  rtx drop_through_label = 0;
11061  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
11062  int i;
11063
11064  if (! if_true_label || ! if_false_label)
11065    drop_through_label = gen_label_rtx ();
11066  if (! if_true_label)
11067    if_true_label = drop_through_label;
11068  if (! if_false_label)
11069    if_false_label = drop_through_label;
11070
11071  /* Compare a word at a time, high order first.  */
11072  for (i = 0; i < nwords; i++)
11073    {
11074      rtx comp;
11075      rtx op0_word, op1_word;
11076
11077      if (WORDS_BIG_ENDIAN)
11078	{
11079	  op0_word = operand_subword_force (op0, i, mode);
11080	  op1_word = operand_subword_force (op1, i, mode);
11081	}
11082      else
11083	{
11084	  op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
11085	  op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
11086	}
11087
11088      /* All but high-order word must be compared as unsigned.  */
11089      comp = compare_from_rtx (op0_word, op1_word,
11090			       (unsignedp || i > 0) ? GTU : GT,
11091			       unsignedp, word_mode, NULL_RTX, 0);
11092      if (comp == const_true_rtx)
11093	emit_jump (if_true_label);
11094      else if (comp != const0_rtx)
11095	do_jump_for_compare (comp, NULL_RTX, if_true_label);
11096
11097      /* Consider lower words only if these are equal.  */
11098      comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
11099			       NULL_RTX, 0);
11100      if (comp == const_true_rtx)
11101	emit_jump (if_false_label);
11102      else if (comp != const0_rtx)
11103	do_jump_for_compare (comp, NULL_RTX, if_false_label);
11104    }
11105
11106  if (if_false_label)
11107    emit_jump (if_false_label);
11108  if (drop_through_label)
11109    emit_label (drop_through_label);
11110}
11111
11112/* Compare OP0 with OP1, word at a time, in mode MODE.
11113   UNSIGNEDP says to do unsigned comparison.
11114   Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise.  */
11115
11116void
11117do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
11118     enum machine_mode mode;
11119     int unsignedp;
11120     rtx op0, op1;
11121     rtx if_false_label, if_true_label;
11122{
11123  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11124  rtx drop_through_label = 0;
11125  int i;
11126
11127  if (! if_true_label || ! if_false_label)
11128    drop_through_label = gen_label_rtx ();
11129  if (! if_true_label)
11130    if_true_label = drop_through_label;
11131  if (! if_false_label)
11132    if_false_label = drop_through_label;
11133
11134  /* Compare a word at a time, high order first.  */
11135  for (i = 0; i < nwords; i++)
11136    {
11137      rtx comp;
11138      rtx op0_word, op1_word;
11139
11140      if (WORDS_BIG_ENDIAN)
11141	{
11142	  op0_word = operand_subword_force (op0, i, mode);
11143	  op1_word = operand_subword_force (op1, i, mode);
11144	}
11145      else
11146	{
11147	  op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
11148	  op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
11149	}
11150
11151      /* All but high-order word must be compared as unsigned.  */
11152      comp = compare_from_rtx (op0_word, op1_word,
11153			       (unsignedp || i > 0) ? GTU : GT,
11154			       unsignedp, word_mode, NULL_RTX, 0);
11155      if (comp == const_true_rtx)
11156	emit_jump (if_true_label);
11157      else if (comp != const0_rtx)
11158	do_jump_for_compare (comp, NULL_RTX, if_true_label);
11159
11160      /* Consider lower words only if these are equal.  */
11161      comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
11162			       NULL_RTX, 0);
11163      if (comp == const_true_rtx)
11164	emit_jump (if_false_label);
11165      else if (comp != const0_rtx)
11166	do_jump_for_compare (comp, NULL_RTX, if_false_label);
11167    }
11168
11169  if (if_false_label)
11170    emit_jump (if_false_label);
11171  if (drop_through_label)
11172    emit_label (drop_through_label);
11173}
11174
11175/* Given an EQ_EXPR expression EXP for values too wide to be compared
11176   with one insn, test the comparison and jump to the appropriate label.  */
11177
11178static void
11179do_jump_by_parts_equality (exp, if_false_label, if_true_label)
11180     tree exp;
11181     rtx if_false_label, if_true_label;
11182{
11183  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11184  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11185  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
11186  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11187  int i;
11188  rtx drop_through_label = 0;
11189
11190  if (! if_false_label)
11191    drop_through_label = if_false_label = gen_label_rtx ();
11192
11193  for (i = 0; i < nwords; i++)
11194    {
11195      rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
11196				   operand_subword_force (op1, i, mode),
11197				   EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
11198				   word_mode, NULL_RTX, 0);
11199      if (comp == const_true_rtx)
11200	emit_jump (if_false_label);
11201      else if (comp != const0_rtx)
11202	do_jump_for_compare (comp, if_false_label, NULL_RTX);
11203    }
11204
11205  if (if_true_label)
11206    emit_jump (if_true_label);
11207  if (drop_through_label)
11208    emit_label (drop_through_label);
11209}
11210
11211/* Jump according to whether OP0 is 0.
11212   We assume that OP0 has an integer mode that is too wide
11213   for the available compare insns.  */
11214
11215void
11216do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
11217     rtx op0;
11218     rtx if_false_label, if_true_label;
11219{
11220  int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
11221  rtx part;
11222  int i;
11223  rtx drop_through_label = 0;
11224
11225  /* The fastest way of doing this comparison on almost any machine is to
11226     "or" all the words and compare the result.  If all have to be loaded
11227     from memory and this is a very wide item, it's possible this may
11228     be slower, but that's highly unlikely.  */
11229
11230  part = gen_reg_rtx (word_mode);
11231  emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
11232  for (i = 1; i < nwords && part != 0; i++)
11233    part = expand_binop (word_mode, ior_optab, part,
11234			 operand_subword_force (op0, i, GET_MODE (op0)),
11235			 part, 1, OPTAB_WIDEN);
11236
11237  if (part != 0)
11238    {
11239      rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
11240				   NULL_RTX, 0);
11241
11242      if (comp == const_true_rtx)
11243	emit_jump (if_false_label);
11244      else if (comp == const0_rtx)
11245	emit_jump (if_true_label);
11246      else
11247	do_jump_for_compare (comp, if_false_label, if_true_label);
11248
11249      return;
11250    }
11251
11252  /* If we couldn't do the "or" simply, do this with a series of compares.  */
11253  if (! if_false_label)
11254    drop_through_label = if_false_label = gen_label_rtx ();
11255
11256  for (i = 0; i < nwords; i++)
11257    {
11258      rtx comp = compare_from_rtx (operand_subword_force (op0, i,
11259							  GET_MODE (op0)),
11260				   const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
11261      if (comp == const_true_rtx)
11262	emit_jump (if_false_label);
11263      else if (comp != const0_rtx)
11264	do_jump_for_compare (comp, if_false_label, NULL_RTX);
11265    }
11266
11267  if (if_true_label)
11268    emit_jump (if_true_label);
11269
11270  if (drop_through_label)
11271    emit_label (drop_through_label);
11272}
11273
11274/* Given a comparison expression in rtl form, output conditional branches to
11275   IF_TRUE_LABEL, IF_FALSE_LABEL, or both.  */
11276
11277static void
11278do_jump_for_compare (comparison, if_false_label, if_true_label)
11279     rtx comparison, if_false_label, if_true_label;
11280{
11281  if (if_true_label)
11282    {
11283      if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11284	emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11285			  (if_true_label));
11286      else
11287	abort ();
11288
11289      if (if_false_label)
11290	emit_jump (if_false_label);
11291    }
11292  else if (if_false_label)
11293    {
11294      rtx first = get_last_insn (), insn, branch;
11295      int br_count;
11296
11297      /* Output the branch with the opposite condition.  Then try to invert
11298	 what is generated.  If more than one insn is a branch, or if the
11299	 branch is not the last insn written, abort. If we can't invert
11300	 the branch, emit make a true label, redirect this jump to that,
11301	 emit a jump to the false label and define the true label.  */
11302      /* ??? Note that we wouldn't have to do any of this nonsense if
11303	 we passed both labels into a combined compare-and-branch.
11304	 Ah well, jump threading does a good job of repairing the damage.  */
11305
11306      if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11307	emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11308			  (if_false_label));
11309      else
11310	abort ();
11311
11312      /* Here we get the first insn that was just emitted.  It used to be the
11313	 case that, on some machines, emitting the branch would discard
11314	 the previous compare insn and emit a replacement.  This isn't
11315	 done anymore, but abort if we see that FIRST is deleted.  */
11316
11317      if (first == 0)
11318	first = get_insns ();
11319      else if (INSN_DELETED_P (first))
11320	abort ();
11321      else
11322	first = NEXT_INSN (first);
11323
11324      /* Look for multiple branches in this sequence, as might be generated
11325	 for a multi-word integer comparison.  */
11326
11327      br_count = 0;
11328      branch = NULL_RTX;
11329      for (insn = first; insn ; insn = NEXT_INSN (insn))
11330	if (GET_CODE (insn) == JUMP_INSN)
11331	  {
11332	    branch = insn;
11333	    br_count += 1;
11334	  }
11335
11336      /* If we've got one branch at the end of the sequence,
11337	 we can try to reverse it.  */
11338
11339      if (br_count == 1 && NEXT_INSN (branch) == NULL_RTX)
11340	{
11341	  rtx insn_label;
11342	  insn_label = XEXP (condjump_label (branch), 0);
11343	  JUMP_LABEL (branch) = insn_label;
11344
11345	  if (insn_label != if_false_label)
11346	    abort ();
11347
11348	  if (invert_jump (branch, if_false_label))
11349	    return;
11350	}
11351
11352      /* Multiple branches, or reversion failed.  Convert to branches
11353	 around an unconditional jump.  */
11354
11355      if_true_label = gen_label_rtx ();
11356      for (insn = first; insn; insn = NEXT_INSN (insn))
11357	if (GET_CODE (insn) == JUMP_INSN)
11358	  {
11359	    rtx insn_label;
11360	    insn_label = XEXP (condjump_label (insn), 0);
11361	    JUMP_LABEL (insn) = insn_label;
11362
11363	    if (insn_label == if_false_label)
11364	      redirect_jump (insn, if_true_label);
11365	  }
11366	emit_jump (if_false_label);
11367	emit_label (if_true_label);
11368    }
11369}
11370
11371/* Generate code for a comparison expression EXP
11372   (including code to compute the values to be compared)
11373   and set (CC0) according to the result.
11374   SIGNED_CODE should be the rtx operation for this comparison for
11375   signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
11376
11377   We force a stack adjustment unless there are currently
11378   things pushed on the stack that aren't yet used.  */
11379
11380static rtx
11381compare (exp, signed_code, unsigned_code)
11382     register tree exp;
11383     enum rtx_code signed_code, unsigned_code;
11384{
11385  register rtx op0, op1;
11386  register tree type;
11387  register enum machine_mode mode;
11388  int unsignedp;
11389  enum rtx_code code;
11390
11391  /* Don't crash if the comparison was erroneous.  */
11392  op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11393  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
11394    return op0;
11395
11396  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11397  type = TREE_TYPE (TREE_OPERAND (exp, 0));
11398  mode = TYPE_MODE (type);
11399  unsignedp = TREE_UNSIGNED (type);
11400  code = unsignedp ? unsigned_code : signed_code;
11401
11402#ifdef HAVE_canonicalize_funcptr_for_compare
11403  /* If function pointers need to be "canonicalized" before they can
11404     be reliably compared, then canonicalize them.  */
11405  if (HAVE_canonicalize_funcptr_for_compare
11406      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11407      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11408	  == FUNCTION_TYPE))
11409    {
11410      rtx new_op0 = gen_reg_rtx (mode);
11411
11412      emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11413      op0 = new_op0;
11414    }
11415
11416  if (HAVE_canonicalize_funcptr_for_compare
11417      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11418      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11419	  == FUNCTION_TYPE))
11420    {
11421      rtx new_op1 = gen_reg_rtx (mode);
11422
11423      emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11424      op1 = new_op1;
11425    }
11426#endif
11427
11428  return compare_from_rtx (op0, op1, code, unsignedp, mode,
11429			   ((mode == BLKmode)
11430			    ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11431			   TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
11432}
11433
11434/* Like compare but expects the values to compare as two rtx's.
11435   The decision as to signed or unsigned comparison must be made by the caller.
11436
11437   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11438   compared.
11439
11440   If ALIGN is non-zero, it is the alignment of this type; if zero, the
11441   size of MODE should be used.  */
11442
11443rtx
11444compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11445     register rtx op0, op1;
11446     enum rtx_code code;
11447     int unsignedp;
11448     enum machine_mode mode;
11449     rtx size;
11450     int align;
11451{
11452  rtx tem;
11453
11454  /* If one operand is constant, make it the second one.  Only do this
11455     if the other operand is not constant as well.  */
11456
11457  if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11458      || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
11459    {
11460      tem = op0;
11461      op0 = op1;
11462      op1 = tem;
11463      code = swap_condition (code);
11464    }
11465
11466  if (flag_force_mem)
11467    {
11468      op0 = force_not_mem (op0);
11469      op1 = force_not_mem (op1);
11470    }
11471
11472  do_pending_stack_adjust ();
11473
11474  if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11475      && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11476    return tem;
11477
11478#if 0
11479  /* There's no need to do this now that combine.c can eliminate lots of
11480     sign extensions.  This can be less efficient in certain cases on other
11481     machines.  */
11482
11483  /* If this is a signed equality comparison, we can do it as an
11484     unsigned comparison since zero-extension is cheaper than sign
11485     extension and comparisons with zero are done as unsigned.  This is
11486     the case even on machines that can do fast sign extension, since
11487     zero-extension is easier to combine with other operations than
11488     sign-extension is.  If we are comparing against a constant, we must
11489     convert it to what it would look like unsigned.  */
11490  if ((code == EQ || code == NE) && ! unsignedp
11491      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11492    {
11493      if (GET_CODE (op1) == CONST_INT
11494	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11495	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11496      unsignedp = 1;
11497    }
11498#endif
11499
11500  emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
11501
11502  return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11503}
11504
11505/* Generate code to calculate EXP using a store-flag instruction
11506   and return an rtx for the result.  EXP is either a comparison
11507   or a TRUTH_NOT_EXPR whose operand is a comparison.
11508
11509   If TARGET is nonzero, store the result there if convenient.
11510
11511   If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11512   cheap.
11513
11514   Return zero if there is no suitable set-flag instruction
11515   available on this machine.
11516
11517   Once expand_expr has been called on the arguments of the comparison,
11518   we are committed to doing the store flag, since it is not safe to
11519   re-evaluate the expression.  We emit the store-flag insn by calling
11520   emit_store_flag, but only expand the arguments if we have a reason
11521   to believe that emit_store_flag will be successful.  If we think that
11522   it will, but it isn't, we have to simulate the store-flag with a
11523   set/jump/set sequence.  */
11524
11525static rtx
11526do_store_flag (exp, target, mode, only_cheap)
11527     tree exp;
11528     rtx target;
11529     enum machine_mode mode;
11530     int only_cheap;
11531{
11532  enum rtx_code code;
11533  tree arg0, arg1, type;
11534  tree tem;
11535  enum machine_mode operand_mode;
11536  int invert = 0;
11537  int unsignedp;
11538  rtx op0, op1;
11539  enum insn_code icode;
11540  rtx subtarget = target;
11541  rtx result, label;
11542
11543  /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11544     result at the end.  We can't simply invert the test since it would
11545     have already been inverted if it were valid.  This case occurs for
11546     some floating-point comparisons.  */
11547
11548  if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11549    invert = 1, exp = TREE_OPERAND (exp, 0);
11550
11551  arg0 = TREE_OPERAND (exp, 0);
11552  arg1 = TREE_OPERAND (exp, 1);
11553  type = TREE_TYPE (arg0);
11554  operand_mode = TYPE_MODE (type);
11555  unsignedp = TREE_UNSIGNED (type);
11556
11557  /* We won't bother with BLKmode store-flag operations because it would mean
11558     passing a lot of information to emit_store_flag.  */
11559  if (operand_mode == BLKmode)
11560    return 0;
11561
11562  /* We won't bother with store-flag operations involving function pointers
11563     when function pointers must be canonicalized before comparisons.  */
11564#ifdef HAVE_canonicalize_funcptr_for_compare
11565  if (HAVE_canonicalize_funcptr_for_compare
11566      && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11567	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11568	       == FUNCTION_TYPE))
11569	  || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11570	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11571		  == FUNCTION_TYPE))))
11572    return 0;
11573#endif
11574
11575  STRIP_NOPS (arg0);
11576  STRIP_NOPS (arg1);
11577
11578  /* Get the rtx comparison code to use.  We know that EXP is a comparison
11579     operation of some type.  Some comparisons against 1 and -1 can be
11580     converted to comparisons with zero.  Do so here so that the tests
11581     below will be aware that we have a comparison with zero.   These
11582     tests will not catch constants in the first operand, but constants
11583     are rarely passed as the first operand.  */
11584
11585  switch (TREE_CODE (exp))
11586    {
11587    case EQ_EXPR:
11588      code = EQ;
11589      break;
11590    case NE_EXPR:
11591      code = NE;
11592      break;
11593    case LT_EXPR:
11594      if (integer_onep (arg1))
11595	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11596      else
11597	code = unsignedp ? LTU : LT;
11598      break;
11599    case LE_EXPR:
11600      if (! unsignedp && integer_all_onesp (arg1))
11601	arg1 = integer_zero_node, code = LT;
11602      else
11603	code = unsignedp ? LEU : LE;
11604      break;
11605    case GT_EXPR:
11606      if (! unsignedp && integer_all_onesp (arg1))
11607	arg1 = integer_zero_node, code = GE;
11608      else
11609	code = unsignedp ? GTU : GT;
11610      break;
11611    case GE_EXPR:
11612      if (integer_onep (arg1))
11613	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11614      else
11615	code = unsignedp ? GEU : GE;
11616      break;
11617    default:
11618      abort ();
11619    }
11620
11621  /* Put a constant second.  */
11622  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11623    {
11624      tem = arg0; arg0 = arg1; arg1 = tem;
11625      code = swap_condition (code);
11626    }
11627
11628  /* If this is an equality or inequality test of a single bit, we can
11629     do this by shifting the bit being tested to the low-order bit and
11630     masking the result with the constant 1.  If the condition was EQ,
11631     we xor it with 1.  This does not require an scc insn and is faster
11632     than an scc insn even if we have it.  */
11633
11634  if ((code == NE || code == EQ)
11635      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11636      && integer_pow2p (TREE_OPERAND (arg0, 1)))
11637    {
11638      tree inner = TREE_OPERAND (arg0, 0);
11639      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11640      int ops_unsignedp;
11641
11642      /* If INNER is a right shift of a constant and it plus BITNUM does
11643	 not overflow, adjust BITNUM and INNER.  */
11644
11645      if (TREE_CODE (inner) == RSHIFT_EXPR
11646	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11647	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11648	  && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11649	      < TYPE_PRECISION (type)))
11650	{
11651	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11652	  inner = TREE_OPERAND (inner, 0);
11653	}
11654
11655      /* If we are going to be able to omit the AND below, we must do our
11656	 operations as unsigned.  If we must use the AND, we have a choice.
11657	 Normally unsigned is faster, but for some machines signed is.  */
11658      ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11659#ifdef LOAD_EXTEND_OP
11660		       : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11661#else
11662		       : 1
11663#endif
11664		       );
11665
11666      if (subtarget == 0 || GET_CODE (subtarget) != REG
11667	  || GET_MODE (subtarget) != operand_mode
11668	  || ! safe_from_p (subtarget, inner, 1))
11669	subtarget = 0;
11670
11671      op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11672
11673      if (bitnum != 0)
11674	op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11675			    size_int (bitnum), subtarget, ops_unsignedp);
11676
11677      if (GET_MODE (op0) != mode)
11678	op0 = convert_to_mode (mode, op0, ops_unsignedp);
11679
11680      if ((code == EQ && ! invert) || (code == NE && invert))
11681	op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11682			    ops_unsignedp, OPTAB_LIB_WIDEN);
11683
11684      /* Put the AND last so it can combine with more things.  */
11685      if (bitnum != TYPE_PRECISION (type) - 1)
11686	op0 = expand_and (op0, const1_rtx, subtarget);
11687
11688      return op0;
11689    }
11690
11691  /* Now see if we are likely to be able to do this.  Return if not.  */
11692  if (! can_compare_p (operand_mode))
11693    return 0;
11694  icode = setcc_gen_code[(int) code];
11695  if (icode == CODE_FOR_nothing
11696      || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11697    {
11698      /* We can only do this if it is one of the special cases that
11699	 can be handled without an scc insn.  */
11700      if ((code == LT && integer_zerop (arg1))
11701	  || (! only_cheap && code == GE && integer_zerop (arg1)))
11702	;
11703      else if (BRANCH_COST >= 0
11704	       && ! only_cheap && (code == NE || code == EQ)
11705	       && TREE_CODE (type) != REAL_TYPE
11706	       && ((abs_optab->handlers[(int) operand_mode].insn_code
11707		    != CODE_FOR_nothing)
11708		   || (ffs_optab->handlers[(int) operand_mode].insn_code
11709		       != CODE_FOR_nothing)))
11710	;
11711      else
11712	return 0;
11713    }
11714
11715  preexpand_calls (exp);
11716  if (subtarget == 0 || GET_CODE (subtarget) != REG
11717      || GET_MODE (subtarget) != operand_mode
11718      || ! safe_from_p (subtarget, arg1, 1))
11719    subtarget = 0;
11720
11721  op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11722  op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11723
11724  if (target == 0)
11725    target = gen_reg_rtx (mode);
11726
11727  /* Pass copies of OP0 and OP1 in case they contain a QUEUED.  This is safe
11728     because, if the emit_store_flag does anything it will succeed and
11729     OP0 and OP1 will not be used subsequently.  */
11730
11731  result = emit_store_flag (target, code,
11732			    queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11733			    queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11734			    operand_mode, unsignedp, 1);
11735
11736  if (result)
11737    {
11738      if (invert)
11739	result = expand_binop (mode, xor_optab, result, const1_rtx,
11740			       result, 0, OPTAB_LIB_WIDEN);
11741      return result;
11742    }
11743
11744  /* If this failed, we have to do this with set/compare/jump/set code.  */
11745  if (GET_CODE (target) != REG
11746      || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11747    target = gen_reg_rtx (GET_MODE (target));
11748
11749  emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11750  result = compare_from_rtx (op0, op1, code, unsignedp,
11751			     operand_mode, NULL_RTX, 0);
11752  if (GET_CODE (result) == CONST_INT)
11753    return (((result == const0_rtx && ! invert)
11754	     || (result != const0_rtx && invert))
11755	    ? const0_rtx : const1_rtx);
11756
11757  label = gen_label_rtx ();
11758  if (bcc_gen_fctn[(int) code] == 0)
11759    abort ();
11760
11761  emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11762  emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11763  emit_label (label);
11764
11765  return target;
11766}
11767
11768/* Generate a tablejump instruction (used for switch statements).  */
11769
11770#ifdef HAVE_tablejump
11771
11772/* INDEX is the value being switched on, with the lowest value
11773   in the table already subtracted.
11774   MODE is its expected mode (needed if INDEX is constant).
11775   RANGE is the length of the jump table.
11776   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11777
11778   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11779   index value is out of range.  */
11780
11781void
11782do_tablejump (index, mode, range, table_label, default_label)
11783     rtx index, range, table_label, default_label;
11784     enum machine_mode mode;
11785{
11786  register rtx temp, vector;
11787
11788  /* Do an unsigned comparison (in the proper mode) between the index
11789     expression and the value which represents the length of the range.
11790     Since we just finished subtracting the lower bound of the range
11791     from the index expression, this comparison allows us to simultaneously
11792     check that the original index expression value is both greater than
11793     or equal to the minimum value of the range and less than or equal to
11794     the maximum value of the range.  */
11795
11796  emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11797			   0, default_label);
11798
11799  /* If index is in range, it must fit in Pmode.
11800     Convert to Pmode so we can index with it.  */
11801  if (mode != Pmode)
11802    index = convert_to_mode (Pmode, index, 1);
11803
11804  /* Don't let a MEM slip thru, because then INDEX that comes
11805     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11806     and break_out_memory_refs will go to work on it and mess it up.  */
11807#ifdef PIC_CASE_VECTOR_ADDRESS
11808  if (flag_pic && GET_CODE (index) != REG)
11809    index = copy_to_mode_reg (Pmode, index);
11810#endif
11811
11812  /* If flag_force_addr were to affect this address
11813     it could interfere with the tricky assumptions made
11814     about addresses that contain label-refs,
11815     which may be valid only very near the tablejump itself.  */
11816  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11817     GET_MODE_SIZE, because this indicates how large insns are.  The other
11818     uses should all be Pmode, because they are addresses.  This code
11819     could fail if addresses and insns are not the same size.  */
11820  index = gen_rtx_PLUS (Pmode,
11821			gen_rtx_MULT (Pmode, index,
11822				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11823			gen_rtx_LABEL_REF (Pmode, table_label));
11824#ifdef PIC_CASE_VECTOR_ADDRESS
11825  if (flag_pic)
11826    index = PIC_CASE_VECTOR_ADDRESS (index);
11827  else
11828#endif
11829    index = memory_address_noforce (CASE_VECTOR_MODE, index);
11830  temp = gen_reg_rtx (CASE_VECTOR_MODE);
11831  vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11832  RTX_UNCHANGING_P (vector) = 1;
11833  convert_move (temp, vector, 0);
11834
11835  emit_jump_insn (gen_tablejump (temp, table_label));
11836
11837  /* If we are generating PIC code or if the table is PC-relative, the
11838     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
11839  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11840    emit_barrier ();
11841}
11842
11843#endif /* HAVE_tablejump */
11844