expr.c revision 146895
1275970Scy/* Convert tree expression to rtl instructions, for GNU compiler.
2275970Scy   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3275970Scy   2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4275970Scy
5275970ScyThis file is part of GCC.
6275970Scy
7275970ScyGCC is free software; you can redistribute it and/or modify it under
8275970Scythe terms of the GNU General Public License as published by the Free
9275970ScySoftware Foundation; either version 2, or (at your option) any later
10275970Scyversion.
11275970Scy
12275970ScyGCC is distributed in the hope that it will be useful, but WITHOUT ANY
13330567SgordonWARRANTY; without even the implied warranty of MERCHANTABILITY or
14275970ScyFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15330567Sgordonfor more details.
16275970Scy
17330567SgordonYou should have received a copy of the GNU General Public License
18275970Scyalong with GCC; see the file COPYING.  If not, write to the Free
19275970ScySoftware Foundation, 59 Temple Place - Suite 330, Boston, MA
20275970Scy02111-1307, USA.  */
21275970Scy
22275970Scy#include "config.h"
23275970Scy#include "system.h"
24275970Scy#include "coretypes.h"
25275970Scy#include "tm.h"
26275970Scy#include "machmode.h"
27275970Scy#include "real.h"
28275970Scy#include "rtl.h"
29275970Scy#include "tree.h"
30275970Scy#include "flags.h"
31275970Scy#include "regs.h"
32275970Scy#include "hard-reg-set.h"
33275970Scy#include "except.h"
34275970Scy#include "function.h"
35275970Scy#include "insn-config.h"
36275970Scy#include "insn-attr.h"
37275970Scy/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
38275970Scy#include "expr.h"
39275970Scy#include "optabs.h"
40275970Scy#include "libfuncs.h"
41275970Scy#include "recog.h"
42275970Scy#include "reload.h"
43275970Scy#include "output.h"
44275970Scy#include "typeclass.h"
45275970Scy#include "toplev.h"
46275970Scy#include "ggc.h"
47275970Scy#include "langhooks.h"
48275970Scy#include "intl.h"
49275970Scy#include "tm_p.h"
50275970Scy#include "target.h"
51275970Scy
52275970Scy/* Decide whether a function's arguments should be processed
53275970Scy   from first to last or from last to first.
54275970Scy
55275970Scy   They should if the stack and args grow in opposite directions, but
56275970Scy   only if we have push insns.  */
57275970Scy
58275970Scy#ifdef PUSH_ROUNDING
59275970Scy
60275970Scy#ifndef PUSH_ARGS_REVERSED
61275970Scy#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62285612Sdelphij#define PUSH_ARGS_REVERSED	/* If it's last to first.  */
63275970Scy#endif
64275970Scy#endif
65275970Scy
66275970Scy#endif
67275970Scy
68275970Scy#ifndef STACK_PUSH_CODE
69275970Scy#ifdef STACK_GROWS_DOWNWARD
70275970Scy#define STACK_PUSH_CODE PRE_DEC
71275970Scy#else
72275970Scy#define STACK_PUSH_CODE PRE_INC
73275970Scy#endif
74275970Scy#endif
75275970Scy
76275970Scy/* Assume that case vectors are not pc-relative.  */
77275970Scy#ifndef CASE_VECTOR_PC_RELATIVE
78275970Scy#define CASE_VECTOR_PC_RELATIVE 0
79275970Scy#endif
80275970Scy
81285612Sdelphij/* Convert defined/undefined to boolean.  */
82275970Scy#ifdef TARGET_MEM_FUNCTIONS
83275970Scy#undef TARGET_MEM_FUNCTIONS
84275970Scy#define TARGET_MEM_FUNCTIONS 1
85275970Scy#else
86275970Scy#define TARGET_MEM_FUNCTIONS 0
87275970Scy#endif
88275970Scy
89275970Scy
90275970Scy/* If this is nonzero, we do not bother generating VOLATILE
91275970Scy   around volatile memory references, and we are willing to
92275970Scy   output indirect addresses.  If cse is to follow, we reject
93275970Scy   indirect addresses so a useful potential cse is generated;
94275970Scy   if it is used only once, instruction combination will produce
95275970Scy   the same indirect address eventually.  */
96285612Sdelphijint cse_not_expected;
97275970Scy
98285612Sdelphij/* Chain of pending expressions for PLACEHOLDER_EXPR to replace.  */
99275970Scytree placeholder_list = 0;
100275970Scy
101275970Scy/* This structure is used by move_by_pieces to describe the move to
102275970Scy   be performed.  */
103275970Scystruct move_by_pieces
104275970Scy{
105275970Scy  rtx to;
106285612Sdelphij  rtx to_addr;
107275970Scy  int autinc_to;
108275970Scy  int explicit_inc_to;
109275970Scy  rtx from;
110285612Sdelphij  rtx from_addr;
111275970Scy  int autinc_from;
112275970Scy  int explicit_inc_from;
113275970Scy  unsigned HOST_WIDE_INT len;
114275970Scy  HOST_WIDE_INT offset;
115275970Scy  int reverse;
116275970Scy};
117275970Scy
118275970Scy/* This structure is used by store_by_pieces to describe the clear to
119275970Scy   be performed.  */
120275970Scy
121275970Scystruct store_by_pieces
122275970Scy{
123275970Scy  rtx to;
124275970Scy  rtx to_addr;
125275970Scy  int autinc_to;
126275970Scy  int explicit_inc_to;
127275970Scy  unsigned HOST_WIDE_INT len;
128275970Scy  HOST_WIDE_INT offset;
129275970Scy  rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
130275970Scy  void *constfundata;
131275970Scy  int reverse;
132275970Scy};
133275970Scy
134275970Scystatic rtx enqueue_insn (rtx, rtx);
135275970Scystatic unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
136275970Scy						     unsigned int);
137275970Scystatic void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
138275970Scy			      struct move_by_pieces *);
139275970Scystatic bool block_move_libcall_safe_for_call_parm (void);
140275970Scystatic bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
141275970Scystatic rtx emit_block_move_via_libcall (rtx, rtx, rtx);
142275970Scystatic tree emit_block_move_libcall_fn (int);
143275970Scystatic void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
144275970Scystatic rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
145275970Scystatic void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
146275970Scystatic void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
147275970Scystatic void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
148275970Scy			       struct store_by_pieces *);
149275970Scystatic bool clear_storage_via_clrstr (rtx, rtx, unsigned);
150275970Scystatic rtx clear_storage_via_libcall (rtx, rtx);
151275970Scystatic tree clear_storage_libcall_fn (int);
152275970Scystatic rtx compress_float_constant (rtx, rtx);
153275970Scystatic rtx get_subtarget (rtx);
154275970Scystatic int is_zeros_p (tree);
155275970Scystatic void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
156275970Scy				     HOST_WIDE_INT, enum machine_mode,
157275970Scy				     tree, tree, int, int);
158275970Scystatic void store_constructor (tree, rtx, int, HOST_WIDE_INT);
159275970Scystatic rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
160275970Scy			tree, enum machine_mode, int, tree, int);
161275970Scystatic rtx var_rtx (tree);
162275970Scy
163275970Scystatic unsigned HOST_WIDE_INT highest_pow2_factor (tree);
164275970Scystatic unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
165275970Scy
166275970Scystatic int is_aligning_offset (tree, tree);
167275970Scystatic rtx expand_increment (tree, int, int);
168275970Scystatic void expand_operands (tree, tree, rtx, rtx*, rtx*,
169275970Scy			     enum expand_modifier);
170275970Scystatic rtx do_store_flag (tree, rtx, enum machine_mode, int);
171275970Scy#ifdef PUSH_ROUNDING
172275970Scystatic void emit_single_push_insn (enum machine_mode, rtx, tree);
173275970Scy#endif
174275970Scystatic void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
175275970Scystatic rtx const_vector_from_tree (tree);
176275970Scy
177275970Scy/* Record for each mode whether we can move a register directly to or
178285612Sdelphij   from an object of that mode in memory.  If we can't, we won't try
179275970Scy   to use that mode directly when accessing a field of that mode.  */
180275970Scy
181275970Scystatic char direct_load[NUM_MACHINE_MODES];
182275970Scystatic char direct_store[NUM_MACHINE_MODES];
183275970Scy
184275970Scy/* Record for each mode whether we can float-extend from memory.  */
185275970Scy
186275970Scystatic bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
187275970Scy
188275970Scy/* This macro is used to determine whether move_by_pieces should be called
189275970Scy   to perform a structure copy.  */
190275970Scy#ifndef MOVE_BY_PIECES_P
191275970Scy#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
192275970Scy  (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
193275970Scy#endif
194275970Scy
195275970Scy/* This macro is used to determine whether clear_by_pieces should be
196275970Scy   called to clear storage.  */
197275970Scy#ifndef CLEAR_BY_PIECES_P
198275970Scy#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
199285612Sdelphij  (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
200285612Sdelphij#endif
201285612Sdelphij
202285612Sdelphij/* This macro is used to determine whether store_by_pieces should be
203285612Sdelphij   called to "memset" storage with byte values other than zero, or
204285612Sdelphij   to "memcpy" storage when the source is a constant string.  */
205285612Sdelphij#ifndef STORE_BY_PIECES_P
206285612Sdelphij#define STORE_BY_PIECES_P(SIZE, ALIGN)	MOVE_BY_PIECES_P (SIZE, ALIGN)
207285612Sdelphij#endif
208285612Sdelphij
209285612Sdelphij/* This array records the insn_code of insns to perform block moves.  */
210285612Sdelphijenum insn_code movstr_optab[NUM_MACHINE_MODES];
211285612Sdelphij
212285612Sdelphij/* This array records the insn_code of insns to perform block clears.  */
213275970Scyenum insn_code clrstr_optab[NUM_MACHINE_MODES];
214275970Scy
215275970Scy/* These arrays record the insn_code of two different kinds of insns
216275970Scy   to perform block compares.  */
217275970Scyenum insn_code cmpstr_optab[NUM_MACHINE_MODES];
218275970Scyenum insn_code cmpmem_optab[NUM_MACHINE_MODES];
219275970Scy
220275970Scy/* Stack of EXPR_WITH_FILE_LOCATION nested expressions.  */
221275970Scystruct file_stack *expr_wfl_stack;
222275970Scy
223275970Scy/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow.  */
224275970Scy
225275970Scy#ifndef SLOW_UNALIGNED_ACCESS
226275970Scy#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
227275970Scy#endif
228275970Scy
229275970Scy/* This is run once per compilation to set up which modes can be used
230275970Scy   directly in memory and to initialize the block move optab.  */
231275970Scy
232275970Scyvoid
233275970Scyinit_expr_once (void)
234275970Scy{
235275970Scy  rtx insn, pat;
236275970Scy  enum machine_mode mode;
237285612Sdelphij  int num_clobbers;
238275970Scy  rtx mem, mem1;
239275970Scy  rtx reg;
240275970Scy
241275970Scy  /* Try indexing by frame ptr and try by stack ptr.
242275970Scy     It is known that on the Convex the stack ptr isn't a valid index.
243275970Scy     With luck, one or the other is valid on any machine.  */
244275970Scy  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
245275970Scy  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
246275970Scy
247275970Scy  /* A scratch register we can modify in-place below to avoid
248275970Scy     useless RTL allocations.  */
249275970Scy  reg = gen_rtx_REG (VOIDmode, -1);
250275970Scy
251275970Scy  insn = rtx_alloc (INSN);
252275970Scy  pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
253275970Scy  PATTERN (insn) = pat;
254275970Scy
255275970Scy  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
256275970Scy       mode = (enum machine_mode) ((int) mode + 1))
257275970Scy    {
258275970Scy      int regno;
259275970Scy
260275970Scy      direct_load[(int) mode] = direct_store[(int) mode] = 0;
261275970Scy      PUT_MODE (mem, mode);
262275970Scy      PUT_MODE (mem1, mode);
263275970Scy      PUT_MODE (reg, mode);
264275970Scy
265275970Scy      /* See if there is some register that can be used in this mode and
266275970Scy	 directly loaded or stored from memory.  */
267275970Scy
268275970Scy      if (mode != VOIDmode && mode != BLKmode)
269275970Scy	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
270275970Scy	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
271275970Scy	     regno++)
272275970Scy	  {
273275970Scy	    if (! HARD_REGNO_MODE_OK (regno, mode))
274275970Scy	      continue;
275275970Scy
276275970Scy	    REGNO (reg) = regno;
277275970Scy
278275970Scy	    SET_SRC (pat) = mem;
279275970Scy	    SET_DEST (pat) = reg;
280275970Scy	    if (recog (pat, insn, &num_clobbers) >= 0)
281275970Scy	      direct_load[(int) mode] = 1;
282275970Scy
283275970Scy	    SET_SRC (pat) = mem1;
284275970Scy	    SET_DEST (pat) = reg;
285275970Scy	    if (recog (pat, insn, &num_clobbers) >= 0)
286275970Scy	      direct_load[(int) mode] = 1;
287275970Scy
288275970Scy	    SET_SRC (pat) = reg;
289275970Scy	    SET_DEST (pat) = mem;
290275970Scy	    if (recog (pat, insn, &num_clobbers) >= 0)
291275970Scy	      direct_store[(int) mode] = 1;
292275970Scy
293275970Scy	    SET_SRC (pat) = reg;
294275970Scy	    SET_DEST (pat) = mem1;
295275970Scy	    if (recog (pat, insn, &num_clobbers) >= 0)
296275970Scy	      direct_store[(int) mode] = 1;
297275970Scy	  }
298275970Scy    }
299275970Scy
300275970Scy  mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
301275970Scy
302275970Scy  for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
303275970Scy       mode = GET_MODE_WIDER_MODE (mode))
304275970Scy    {
305275970Scy      enum machine_mode srcmode;
306275970Scy      for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
307275970Scy	   srcmode = GET_MODE_WIDER_MODE (srcmode))
308275970Scy	{
309275970Scy	  enum insn_code ic;
310275970Scy
311275970Scy	  ic = can_extend_p (mode, srcmode, 0);
312275970Scy	  if (ic == CODE_FOR_nothing)
313275970Scy	    continue;
314275970Scy
315275970Scy	  PUT_MODE (mem, srcmode);
316275970Scy
317275970Scy	  if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
318275970Scy	    float_extend_from_mem[mode][srcmode] = true;
319275970Scy	}
320275970Scy    }
321275970Scy}
322275970Scy
323275970Scy/* This is run at the start of compiling a function.  */
324275970Scy
325275970Scyvoid
326275970Scyinit_expr (void)
327275970Scy{
328275970Scy  cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
329275970Scy}
330275970Scy
331275970Scy/* Small sanity check that the queue is empty at the end of a function.  */
332275970Scy
333275970Scyvoid
334275970Scyfinish_expr_for_function (void)
335275970Scy{
336275970Scy  if (pending_chain)
337275970Scy    abort ();
338275970Scy}
339275970Scy
340275970Scy/* Manage the queue of increment instructions to be output
341275970Scy   for POSTINCREMENT_EXPR expressions, etc.  */
342275970Scy
343275970Scy/* Queue up to increment (or change) VAR later.  BODY says how:
344275970Scy   BODY should be the same thing you would pass to emit_insn
345275970Scy   to increment right away.  It will go to emit_insn later on.
346275970Scy
347275970Scy   The value is a QUEUED expression to be used in place of VAR
348275970Scy   where you want to guarantee the pre-incrementation value of VAR.  */
349275970Scy
350275970Scystatic rtx
351275970Scyenqueue_insn (rtx var, rtx body)
352275970Scy{
353275970Scy  pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
354275970Scy				  body, pending_chain);
355275970Scy  return pending_chain;
356275970Scy}
357275970Scy
358275970Scy/* Use protect_from_queue to convert a QUEUED expression
359275970Scy   into something that you can put immediately into an instruction.
360275970Scy   If the queued incrementation has not happened yet,
361275970Scy   protect_from_queue returns the variable itself.
362275970Scy   If the incrementation has happened, protect_from_queue returns a temp
363275970Scy   that contains a copy of the old value of the variable.
364275970Scy
365275970Scy   Any time an rtx which might possibly be a QUEUED is to be put
366275970Scy   into an instruction, it must be passed through protect_from_queue first.
367275970Scy   QUEUED expressions are not meaningful in instructions.
368275970Scy
369275970Scy   Do not pass a value through protect_from_queue and then hold
370275970Scy   on to it for a while before putting it in an instruction!
371275970Scy   If the queue is flushed in between, incorrect code will result.  */
372275970Scy
373275970Scyrtx
374275970Scyprotect_from_queue (rtx x, int modify)
375275970Scy{
376275970Scy  RTX_CODE code = GET_CODE (x);
377275970Scy
378275970Scy#if 0  /* A QUEUED can hang around after the queue is forced out.  */
379275970Scy  /* Shortcut for most common case.  */
380275970Scy  if (pending_chain == 0)
381275970Scy    return x;
382275970Scy#endif
383275970Scy
384275970Scy  if (code != QUEUED)
385275970Scy    {
386275970Scy      /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
387275970Scy	 use of autoincrement.  Make a copy of the contents of the memory
388275970Scy	 location rather than a copy of the address, but not if the value is
389275970Scy	 of mode BLKmode.  Don't modify X in place since it might be
390275970Scy	 shared.  */
391275970Scy      if (code == MEM && GET_MODE (x) != BLKmode
392275970Scy	  && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
393275970Scy	{
394275970Scy	  rtx y = XEXP (x, 0);
395275970Scy	  rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
396275970Scy
397275970Scy	  if (QUEUED_INSN (y))
398275970Scy	    {
399275970Scy	      rtx temp = gen_reg_rtx (GET_MODE (x));
400275970Scy
401275970Scy	      emit_insn_before (gen_move_insn (temp, new),
402275970Scy				QUEUED_INSN (y));
403275970Scy	      return temp;
404275970Scy	    }
405275970Scy
406275970Scy	  /* Copy the address into a pseudo, so that the returned value
407275970Scy	     remains correct across calls to emit_queue.  */
408275970Scy	  return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
409275970Scy	}
410275970Scy
411275970Scy      /* Otherwise, recursively protect the subexpressions of all
412275970Scy	 the kinds of rtx's that can contain a QUEUED.  */
413275970Scy      if (code == MEM)
414275970Scy	{
415275970Scy	  rtx tem = protect_from_queue (XEXP (x, 0), 0);
416275970Scy	  if (tem != XEXP (x, 0))
417275970Scy	    {
418275970Scy	      x = copy_rtx (x);
419275970Scy	      XEXP (x, 0) = tem;
420275970Scy	    }
421275970Scy	}
422275970Scy      else if (code == PLUS || code == MULT)
423275970Scy	{
424275970Scy	  rtx new0 = protect_from_queue (XEXP (x, 0), 0);
425275970Scy	  rtx new1 = protect_from_queue (XEXP (x, 1), 0);
426275970Scy	  if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
427275970Scy	    {
428275970Scy	      x = copy_rtx (x);
429275970Scy	      XEXP (x, 0) = new0;
430275970Scy	      XEXP (x, 1) = new1;
431275970Scy	    }
432275970Scy	}
433275970Scy      return x;
434275970Scy    }
435275970Scy  /* If the increment has not happened, use the variable itself.  Copy it
436275970Scy     into a new pseudo so that the value remains correct across calls to
437275970Scy     emit_queue.  */
438275970Scy  if (QUEUED_INSN (x) == 0)
439275970Scy    return copy_to_reg (QUEUED_VAR (x));
440275970Scy  /* If the increment has happened and a pre-increment copy exists,
441275970Scy     use that copy.  */
442275970Scy  if (QUEUED_COPY (x) != 0)
443275970Scy    return QUEUED_COPY (x);
444275970Scy  /* The increment has happened but we haven't set up a pre-increment copy.
445275970Scy     Set one up now, and use it.  */
446275970Scy  QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
447275970Scy  emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
448275970Scy		    QUEUED_INSN (x));
449275970Scy  return QUEUED_COPY (x);
450275970Scy}
451275970Scy
452275970Scy/* Return nonzero if X contains a QUEUED expression:
453275970Scy   if it contains anything that will be altered by a queued increment.
454275970Scy   We handle only combinations of MEM, PLUS, MINUS and MULT operators
455275970Scy   since memory addresses generally contain only those.  */
456275970Scy
457275970Scyint
458275970Scyqueued_subexp_p (rtx x)
459275970Scy{
460275970Scy  enum rtx_code code = GET_CODE (x);
461275970Scy  switch (code)
462275970Scy    {
463275970Scy    case QUEUED:
464275970Scy      return 1;
465275970Scy    case MEM:
466275970Scy      return queued_subexp_p (XEXP (x, 0));
467275970Scy    case MULT:
468275970Scy    case PLUS:
469275970Scy    case MINUS:
470275970Scy      return (queued_subexp_p (XEXP (x, 0))
471275970Scy	      || queued_subexp_p (XEXP (x, 1)));
472275970Scy    default:
473275970Scy      return 0;
474275970Scy    }
475285612Sdelphij}
476275970Scy
477275970Scy/* Retrieve a mark on the queue.  */
478275970Scy
479275970Scystatic rtx
480275970Scymark_queue (void)
481275970Scy{
482275970Scy  return pending_chain;
483275970Scy}
484275970Scy
485275970Scy/* Perform all the pending incrementations that have been enqueued
486275970Scy   after MARK was retrieved.  If MARK is null, perform all the
487275970Scy   pending incrementations.  */
488275970Scy
489275970Scystatic void
490275970Scyemit_insns_enqueued_after_mark (rtx mark)
491275970Scy{
492275970Scy  rtx p;
493275970Scy
494275970Scy  /* The marked incrementation may have been emitted in the meantime
495275970Scy     through a call to emit_queue.  In this case, the mark is not valid
496275970Scy     anymore so do nothing.  */
497275970Scy  if (mark && ! QUEUED_BODY (mark))
498275970Scy    return;
499275970Scy
500275970Scy  while ((p = pending_chain) != mark)
501275970Scy    {
502275970Scy      rtx body = QUEUED_BODY (p);
503275970Scy
504275970Scy      switch (GET_CODE (body))
505275970Scy	{
506275970Scy	case INSN:
507275970Scy	case JUMP_INSN:
508275970Scy	case CALL_INSN:
509275970Scy	case CODE_LABEL:
510275970Scy	case BARRIER:
511275970Scy	case NOTE:
512275970Scy	  QUEUED_INSN (p) = body;
513275970Scy	  emit_insn (body);
514275970Scy	  break;
515275970Scy
516275970Scy#ifdef ENABLE_CHECKING
517275970Scy	case SEQUENCE:
518275970Scy	  abort ();
519275970Scy	  break;
520275970Scy#endif
521275970Scy
522275970Scy	default:
523275970Scy	  QUEUED_INSN (p) = emit_insn (body);
524275970Scy	  break;
525275970Scy	}
526275970Scy
527275970Scy      QUEUED_BODY (p) = 0;
528275970Scy      pending_chain = QUEUED_NEXT (p);
529275970Scy    }
530275970Scy}
531275970Scy
532275970Scy/* Perform all the pending incrementations.  */
533275970Scy
534275970Scyvoid
535275970Scyemit_queue (void)
536275970Scy{
537275970Scy  emit_insns_enqueued_after_mark (NULL_RTX);
538275970Scy}
539275970Scy
540275970Scy/* Copy data from FROM to TO, where the machine modes are not the same.
541275970Scy   Both modes may be integer, or both may be floating.
542275970Scy   UNSIGNEDP should be nonzero if FROM is an unsigned type.
543275970Scy   This causes zero-extension instead of sign-extension.  */
544275970Scy
545275970Scyvoid
546275970Scyconvert_move (rtx to, rtx from, int unsignedp)
547275970Scy{
548275970Scy  enum machine_mode to_mode = GET_MODE (to);
549275970Scy  enum machine_mode from_mode = GET_MODE (from);
550275970Scy  int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
551275970Scy  int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
552275970Scy  enum insn_code code;
553275970Scy  rtx libcall;
554275970Scy
555275970Scy  /* rtx code for making an equivalent value.  */
556275970Scy  enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
557275970Scy			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
558275970Scy
559275970Scy  to = protect_from_queue (to, 1);
560275970Scy  from = protect_from_queue (from, 0);
561275970Scy
562275970Scy  if (to_real != from_real)
563275970Scy    abort ();
564275970Scy
565275970Scy  /* If FROM is a SUBREG that indicates that we have already done at least
566275970Scy     the required extension, strip it.  We don't handle such SUBREGs as
567275970Scy     TO here.  */
568275970Scy
569275970Scy  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
570275970Scy      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
571275970Scy	  >= GET_MODE_SIZE (to_mode))
572275970Scy      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
573275970Scy    from = gen_lowpart (to_mode, from), from_mode = to_mode;
574275970Scy
575275970Scy  if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
576275970Scy    abort ();
577275970Scy
578275970Scy  if (to_mode == from_mode
579275970Scy      || (from_mode == VOIDmode && CONSTANT_P (from)))
580275970Scy    {
581275970Scy      emit_move_insn (to, from);
582275970Scy      return;
583275970Scy    }
584275970Scy
585275970Scy  if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
586275970Scy    {
587275970Scy      if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
588275970Scy	abort ();
589275970Scy
590275970Scy      if (VECTOR_MODE_P (to_mode))
591275970Scy	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
592275970Scy      else
593275970Scy	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
594275970Scy
595275970Scy      emit_move_insn (to, from);
596275970Scy      return;
597275970Scy    }
598275970Scy
599275970Scy  if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
600275970Scy    {
601275970Scy      convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
602275970Scy      convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
603275970Scy      return;
604285612Sdelphij    }
605289997Sglebius
606289997Sglebius  if (to_real)
607275970Scy    {
608285612Sdelphij      rtx value, insns;
609275970Scy      convert_optab tab;
610275970Scy
611275970Scy      if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
612275970Scy	tab = sext_optab;
613275970Scy      else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
614275970Scy	tab = trunc_optab;
615275970Scy      else
616289997Sglebius	abort ();
617289997Sglebius
618289997Sglebius      /* Try converting directly if the insn is supported.  */
619275970Scy
620275970Scy      code = tab->handlers[to_mode][from_mode].insn_code;
621275970Scy      if (code != CODE_FOR_nothing)
622275970Scy	{
623285612Sdelphij	  emit_unop_insn (code, to, from,
624275970Scy			  tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
625275970Scy	  return;
626275970Scy	}
627275970Scy
628275970Scy      /* Otherwise use a libcall.  */
629275970Scy      libcall = tab->handlers[to_mode][from_mode].libfunc;
630275970Scy
631275970Scy      if (!libcall)
632275970Scy	/* This conversion is not implemented yet.  */
633275970Scy	abort ();
634275970Scy
635275970Scy      start_sequence ();
636275970Scy      value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
637285612Sdelphij				       1, from, from_mode);
638275970Scy      insns = get_insns ();
639275970Scy      end_sequence ();
640275970Scy      emit_libcall_block (insns, to, value,
641275970Scy			  tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
642275970Scy								       from)
643275970Scy			  : gen_rtx_FLOAT_EXTEND (to_mode, from));
644285612Sdelphij      return;
645275970Scy    }
646275970Scy
647275970Scy  /* Handle pointer conversion.  */			/* SPEE 900220.  */
648275970Scy  /* Targets are expected to provide conversion insns between PxImode and
649275970Scy     xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
650275970Scy  if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
651275970Scy    {
652275970Scy      enum machine_mode full_mode
653285612Sdelphij	= smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
654275970Scy
655275970Scy      if (trunc_optab->handlers[to_mode][full_mode].insn_code
656275970Scy	  == CODE_FOR_nothing)
657275970Scy	abort ();
658275970Scy
659275970Scy      if (full_mode != from_mode)
660275970Scy	from = convert_to_mode (full_mode, from, unsignedp);
661275970Scy      emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
662275970Scy		      to, from, UNKNOWN);
663275970Scy      return;
664275970Scy    }
665285612Sdelphij  if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
666275970Scy    {
667275970Scy      enum machine_mode full_mode
668275970Scy	= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
669275970Scy
670275970Scy      if (sext_optab->handlers[full_mode][from_mode].insn_code
671285612Sdelphij	  == CODE_FOR_nothing)
672275970Scy	abort ();
673275970Scy
674275970Scy      emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
675275970Scy		      to, from, UNKNOWN);
676275970Scy      if (to_mode == full_mode)
677275970Scy	return;
678275970Scy
679275970Scy      /* else proceed to integer conversions below */
680275970Scy      from_mode = full_mode;
681275970Scy    }
682275970Scy
683275970Scy  /* Now both modes are integers.  */
684275970Scy
685275970Scy  /* Handle expanding beyond a word.  */
686275970Scy  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
687275970Scy      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
688275970Scy    {
689275970Scy      rtx insns;
690275970Scy      rtx lowpart;
691275970Scy      rtx fill_value;
692275970Scy      rtx lowfrom;
693275970Scy      int i;
694275970Scy      enum machine_mode lowpart_mode;
695275970Scy      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
696275970Scy
697275970Scy      /* Try converting directly if the insn is supported.  */
698275970Scy      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
699275970Scy	  != CODE_FOR_nothing)
700275970Scy	{
701275970Scy	  /* If FROM is a SUBREG, put it into a register.  Do this
702275970Scy	     so that we always generate the same set of insns for
703275970Scy	     better cse'ing; if an intermediate assignment occurred,
704275970Scy	     we won't be doing the operation directly on the SUBREG.  */
705275970Scy	  if (optimize > 0 && GET_CODE (from) == SUBREG)
706275970Scy	    from = force_reg (from_mode, from);
707275970Scy	  emit_unop_insn (code, to, from, equiv_code);
708275970Scy	  return;
709275970Scy	}
710275970Scy      /* Next, try converting via full word.  */
711275970Scy      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
712275970Scy	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
713275970Scy		   != CODE_FOR_nothing))
714275970Scy	{
715275970Scy	  if (GET_CODE (to) == REG)
716275970Scy	    {
717275970Scy	      if (reg_overlap_mentioned_p (to, from))
718275970Scy		from = force_reg (from_mode, from);
719275970Scy	      emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
720275970Scy	    }
721275970Scy	  convert_move (gen_lowpart (word_mode, to), from, unsignedp);
722275970Scy	  emit_unop_insn (code, to,
723275970Scy			  gen_lowpart (word_mode, to), equiv_code);
724275970Scy	  return;
725275970Scy	}
726275970Scy
727275970Scy      /* No special multiword conversion insn; do it by hand.  */
728275970Scy      start_sequence ();
729275970Scy
730275970Scy      /* Since we will turn this into a no conflict block, we must ensure
731275970Scy	 that the source does not overlap the target.  */
732275970Scy
733275970Scy      if (reg_overlap_mentioned_p (to, from))
734275970Scy	from = force_reg (from_mode, from);
735275970Scy
736275970Scy      /* Get a copy of FROM widened to a word, if necessary.  */
737275970Scy      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
738275970Scy	lowpart_mode = word_mode;
739275970Scy      else
740275970Scy	lowpart_mode = from_mode;
741275970Scy
742275970Scy      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
743275970Scy
744275970Scy      lowpart = gen_lowpart (lowpart_mode, to);
745275970Scy      emit_move_insn (lowpart, lowfrom);
746275970Scy
747275970Scy      /* Compute the value to put in each remaining word.  */
748275970Scy      if (unsignedp)
749275970Scy	fill_value = const0_rtx;
750275970Scy      else
751275970Scy	{
752275970Scy#ifdef HAVE_slt
753275970Scy	  if (HAVE_slt
754275970Scy	      && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
755275970Scy	      && STORE_FLAG_VALUE == -1)
756285612Sdelphij	    {
757275970Scy	      emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
758285612Sdelphij			     lowpart_mode, 0);
759275970Scy	      fill_value = gen_reg_rtx (word_mode);
760285612Sdelphij	      emit_insn (gen_slt (fill_value));
761275970Scy	    }
762275970Scy	  else
763275970Scy#endif
764275970Scy	    {
765275970Scy	      fill_value
766275970Scy		= expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
767275970Scy				size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
768275970Scy				NULL_RTX, 0);
769275970Scy	      fill_value = convert_to_mode (word_mode, fill_value, 1);
770275970Scy	    }
771275970Scy	}
772275970Scy
773275970Scy      /* Fill the remaining words.  */
774275970Scy      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
775275970Scy	{
776275970Scy	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
777275970Scy	  rtx subword = operand_subword (to, index, 1, to_mode);
778275970Scy
779275970Scy	  if (subword == 0)
780275970Scy	    abort ();
781275970Scy
782275970Scy	  if (fill_value != subword)
783275970Scy	    emit_move_insn (subword, fill_value);
784275970Scy	}
785275970Scy
786275970Scy      insns = get_insns ();
787285612Sdelphij      end_sequence ();
788275970Scy
789275970Scy      emit_no_conflict_block (insns, to, from, NULL_RTX,
790275970Scy			      gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
791275970Scy      return;
792275970Scy    }
793275970Scy
794275970Scy  /* Truncating multi-word to a word or less.  */
795275970Scy  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
796275970Scy      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
797275970Scy    {
798275970Scy      if (!((GET_CODE (from) == MEM
799275970Scy	     && ! MEM_VOLATILE_P (from)
800275970Scy	     && direct_load[(int) to_mode]
801275970Scy	     && ! mode_dependent_address_p (XEXP (from, 0)))
802275970Scy	    || GET_CODE (from) == REG
803275970Scy	    || GET_CODE (from) == SUBREG))
804275970Scy	from = force_reg (from_mode, from);
805275970Scy      convert_move (to, gen_lowpart (word_mode, from), 0);
806275970Scy      return;
807275970Scy    }
808275970Scy
809275970Scy  /* Now follow all the conversions between integers
810275970Scy     no more than a word long.  */
811275970Scy
812275970Scy  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
813275970Scy  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
814275970Scy      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
815275970Scy				GET_MODE_BITSIZE (from_mode)))
816275970Scy    {
817275970Scy      if (!((GET_CODE (from) == MEM
818275970Scy	     && ! MEM_VOLATILE_P (from)
819275970Scy	     && direct_load[(int) to_mode]
820275970Scy	     && ! mode_dependent_address_p (XEXP (from, 0)))
821275970Scy	    || GET_CODE (from) == REG
822275970Scy	    || GET_CODE (from) == SUBREG))
823275970Scy	from = force_reg (from_mode, from);
824275970Scy      if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
825275970Scy	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
826275970Scy	from = copy_to_reg (from);
827275970Scy      emit_move_insn (to, gen_lowpart (to_mode, from));
828275970Scy      return;
829275970Scy    }
830275970Scy
831275970Scy  /* Handle extension.  */
832275970Scy  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
833275970Scy    {
834275970Scy      /* Convert directly if that works.  */
835275970Scy      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
836275970Scy	  != CODE_FOR_nothing)
837275970Scy	{
838275970Scy	  if (flag_force_mem)
839275970Scy	    from = force_not_mem (from);
840275970Scy
841275970Scy	  emit_unop_insn (code, to, from, equiv_code);
842275970Scy	  return;
843275970Scy	}
844275970Scy      else
845275970Scy	{
846275970Scy	  enum machine_mode intermediate;
847275970Scy	  rtx tmp;
848275970Scy	  tree shift_amount;
849275970Scy
850275970Scy	  /* Search for a mode to convert via.  */
851275970Scy	  for (intermediate = from_mode; intermediate != VOIDmode;
852275970Scy	       intermediate = GET_MODE_WIDER_MODE (intermediate))
853275970Scy	    if (((can_extend_p (to_mode, intermediate, unsignedp)
854275970Scy		  != CODE_FOR_nothing)
855275970Scy		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
856275970Scy		     && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
857275970Scy					       GET_MODE_BITSIZE (intermediate))))
858275970Scy		&& (can_extend_p (intermediate, from_mode, unsignedp)
859275970Scy		    != CODE_FOR_nothing))
860275970Scy	      {
861275970Scy		convert_move (to, convert_to_mode (intermediate, from,
862275970Scy						   unsignedp), unsignedp);
863275970Scy		return;
864275970Scy	      }
865275970Scy
866275970Scy	  /* No suitable intermediate mode.
867275970Scy	     Generate what we need with	shifts.  */
868275970Scy	  shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
869275970Scy				      - GET_MODE_BITSIZE (from_mode), 0);
870275970Scy	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
871275970Scy	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
872275970Scy			      to, unsignedp);
873275970Scy	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
874275970Scy			      to, unsignedp);
875275970Scy	  if (tmp != to)
876275970Scy	    emit_move_insn (to, tmp);
877275970Scy	  return;
878275970Scy	}
879275970Scy    }
880275970Scy
881275970Scy  /* Support special truncate insns for certain modes.  */
882275970Scy  if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
883275970Scy    {
884275970Scy      emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
885275970Scy		      to, from, UNKNOWN);
886275970Scy      return;
887275970Scy    }
888275970Scy
889275970Scy  /* Handle truncation of volatile memrefs, and so on;
890275970Scy     the things that couldn't be truncated directly,
891275970Scy     and for which there was no special instruction.
892275970Scy
893275970Scy     ??? Code above formerly short-circuited this, for most integer
894275970Scy     mode pairs, with a force_reg in from_mode followed by a recursive
895275970Scy     call to this routine.  Appears always to have been wrong.  */
896275970Scy  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
897275970Scy    {
898285612Sdelphij      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
899275970Scy      emit_move_insn (to, temp);
900275970Scy      return;
901275970Scy    }
902275970Scy
903275970Scy  /* Mode combination is not recognized.  */
904275970Scy  abort ();
905275970Scy}
906275970Scy
907275970Scy/* Return an rtx for a value that would result
908275970Scy   from converting X to mode MODE.
909275970Scy   Both X and MODE may be floating, or both integer.
910275970Scy   UNSIGNEDP is nonzero if X is an unsigned value.
911275970Scy   This can be done by referring to a part of X in place
912275970Scy   or by copying to a new temporary with conversion.
913275970Scy
914275970Scy   This function *must not* call protect_from_queue
915275970Scy   except when putting X into an insn (in which case convert_move does it).  */
916275970Scy
917275970Scyrtx
918275970Scyconvert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
919275970Scy{
920275970Scy  return convert_modes (mode, VOIDmode, x, unsignedp);
921275970Scy}
922275970Scy
923275970Scy/* Return an rtx for a value that would result
924275970Scy   from converting X from mode OLDMODE to mode MODE.
925275970Scy   Both modes may be floating, or both integer.
926275970Scy   UNSIGNEDP is nonzero if X is an unsigned value.
927275970Scy
928275970Scy   This can be done by referring to a part of X in place
929275970Scy   or by copying to a new temporary with conversion.
930285612Sdelphij
931285612Sdelphij   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
932285612Sdelphij
933285612Sdelphij   This function *must not* call protect_from_queue
934285612Sdelphij   except when putting X into an insn (in which case convert_move does it).  */
935275970Scy
936275970Scyrtx
937275970Scyconvert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
938275970Scy{
939275970Scy  rtx temp;
940275970Scy
941275970Scy  /* If FROM is a SUBREG that indicates that we have already done at least
942275970Scy     the required extension, strip it.  */
943275970Scy
944275970Scy  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
945275970Scy      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
946275970Scy      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
947275970Scy    x = gen_lowpart (mode, x);
948275970Scy
949275970Scy  if (GET_MODE (x) != VOIDmode)
950275970Scy    oldmode = GET_MODE (x);
951275970Scy
952275970Scy  if (mode == oldmode)
953275970Scy    return x;
954275970Scy
955275970Scy  /* There is one case that we must handle specially: If we are converting
956275970Scy     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
957275970Scy     we are to interpret the constant as unsigned, gen_lowpart will do
958275970Scy     the wrong if the constant appears negative.  What we want to do is
959275970Scy     make the high-order word of the constant zero, not all ones.  */
960275970Scy
961275970Scy  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
962275970Scy      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
963275970Scy      && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
964275970Scy    {
965275970Scy      HOST_WIDE_INT val = INTVAL (x);
966275970Scy
967275970Scy      if (oldmode != VOIDmode
968275970Scy	  && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
969275970Scy	{
970275970Scy	  int width = GET_MODE_BITSIZE (oldmode);
971275970Scy
972275970Scy	  /* We need to zero extend VAL.  */
973275970Scy	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
974275970Scy	}
975275970Scy
976275970Scy      return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
977275970Scy    }
978275970Scy
979275970Scy  /* We can do this with a gen_lowpart if both desired and current modes
980280849Scy     are integer, and this is either a constant integer, a register, or a
981275970Scy     non-volatile MEM.  Except for the constant case where MODE is no
982316722Sdelphij     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
983275970Scy
984275970Scy  if ((GET_CODE (x) == CONST_INT
985275970Scy       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
986275970Scy      || (GET_MODE_CLASS (mode) == MODE_INT
987275970Scy	  && GET_MODE_CLASS (oldmode) == MODE_INT
988275970Scy	  && (GET_CODE (x) == CONST_DOUBLE
989275970Scy	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
990275970Scy		  && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
991275970Scy		       && direct_load[(int) mode])
992275970Scy		      || (GET_CODE (x) == REG
993275970Scy			  && (! HARD_REGISTER_P (x)
994275970Scy			      || HARD_REGNO_MODE_OK (REGNO (x), mode))
995275970Scy			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
996275970Scy						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
997275970Scy    {
998275970Scy      /* ?? If we don't know OLDMODE, we have to assume here that
999275970Scy	 X does not need sign- or zero-extension.   This may not be
1000275970Scy	 the case, but it's the best we can do.  */
1001275970Scy      if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1002275970Scy	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1003275970Scy	{
1004275970Scy	  HOST_WIDE_INT val = INTVAL (x);
1005275970Scy	  int width = GET_MODE_BITSIZE (oldmode);
1006
1007	  /* We must sign or zero-extend in this case.  Start by
1008	     zero-extending, then sign extend if we need to.  */
1009	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1010	  if (! unsignedp
1011	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1012	    val |= (HOST_WIDE_INT) (-1) << width;
1013
1014	  return gen_int_mode (val, mode);
1015	}
1016
1017      return gen_lowpart (mode, x);
1018    }
1019
1020  /* Converting from integer constant into mode is always equivalent to an
1021     subreg operation.  */
1022  if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1023    {
1024      if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1025	abort ();
1026      return simplify_gen_subreg (mode, x, oldmode, 0);
1027    }
1028
1029  temp = gen_reg_rtx (mode);
1030  convert_move (temp, x, unsignedp);
1031  return temp;
1032}
1033
1034/* STORE_MAX_PIECES is the number of bytes at a time that we can
1035   store efficiently.  Due to internal GCC limitations, this is
1036   MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1037   for an immediate constant.  */
1038
1039#define STORE_MAX_PIECES  MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1040
1041/* Determine whether the LEN bytes can be moved by using several move
1042   instructions.  Return nonzero if a call to move_by_pieces should
1043   succeed.  */
1044
1045int
1046can_move_by_pieces (unsigned HOST_WIDE_INT len,
1047		    unsigned int align ATTRIBUTE_UNUSED)
1048{
1049  return MOVE_BY_PIECES_P (len, align);
1050}
1051
1052/* Generate several move instructions to copy LEN bytes from block FROM to
1053   block TO.  (These are MEM rtx's with BLKmode).  The caller must pass FROM
1054   and TO through protect_from_queue before calling.
1055
1056   If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1057   used to push FROM to the stack.
1058
1059   ALIGN is maximum stack alignment we can assume.
1060
1061   If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1062   mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1063   stpcpy.  */
1064
1065rtx
1066move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1067		unsigned int align, int endp)
1068{
1069  struct move_by_pieces data;
1070  rtx to_addr, from_addr = XEXP (from, 0);
1071  unsigned int max_size = MOVE_MAX_PIECES + 1;
1072  enum machine_mode mode = VOIDmode, tmode;
1073  enum insn_code icode;
1074
1075  align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1076
1077  data.offset = 0;
1078  data.from_addr = from_addr;
1079  if (to)
1080    {
1081      to_addr = XEXP (to, 0);
1082      data.to = to;
1083      data.autinc_to
1084	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1085	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1086      data.reverse
1087	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1088    }
1089  else
1090    {
1091      to_addr = NULL_RTX;
1092      data.to = NULL_RTX;
1093      data.autinc_to = 1;
1094#ifdef STACK_GROWS_DOWNWARD
1095      data.reverse = 1;
1096#else
1097      data.reverse = 0;
1098#endif
1099    }
1100  data.to_addr = to_addr;
1101  data.from = from;
1102  data.autinc_from
1103    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1104       || GET_CODE (from_addr) == POST_INC
1105       || GET_CODE (from_addr) == POST_DEC);
1106
1107  data.explicit_inc_from = 0;
1108  data.explicit_inc_to = 0;
1109  if (data.reverse) data.offset = len;
1110  data.len = len;
1111
1112  /* If copying requires more than two move insns,
1113     copy addresses to registers (to make displacements shorter)
1114     and use post-increment if available.  */
1115  if (!(data.autinc_from && data.autinc_to)
1116      && move_by_pieces_ninsns (len, align) > 2)
1117    {
1118      /* Find the mode of the largest move...  */
1119      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1120	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1121	if (GET_MODE_SIZE (tmode) < max_size)
1122	  mode = tmode;
1123
1124      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1125	{
1126	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1127	  data.autinc_from = 1;
1128	  data.explicit_inc_from = -1;
1129	}
1130      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1131	{
1132	  data.from_addr = copy_addr_to_reg (from_addr);
1133	  data.autinc_from = 1;
1134	  data.explicit_inc_from = 1;
1135	}
1136      if (!data.autinc_from && CONSTANT_P (from_addr))
1137	data.from_addr = copy_addr_to_reg (from_addr);
1138      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1139	{
1140	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1141	  data.autinc_to = 1;
1142	  data.explicit_inc_to = -1;
1143	}
1144      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1145	{
1146	  data.to_addr = copy_addr_to_reg (to_addr);
1147	  data.autinc_to = 1;
1148	  data.explicit_inc_to = 1;
1149	}
1150      if (!data.autinc_to && CONSTANT_P (to_addr))
1151	data.to_addr = copy_addr_to_reg (to_addr);
1152    }
1153
1154  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1155      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1156    align = MOVE_MAX * BITS_PER_UNIT;
1157
1158  /* First move what we can in the largest integer mode, then go to
1159     successively smaller modes.  */
1160
1161  while (max_size > 1)
1162    {
1163      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1164	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1165	if (GET_MODE_SIZE (tmode) < max_size)
1166	  mode = tmode;
1167
1168      if (mode == VOIDmode)
1169	break;
1170
1171      icode = mov_optab->handlers[(int) mode].insn_code;
1172      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1173	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1174
1175      max_size = GET_MODE_SIZE (mode);
1176    }
1177
1178  /* The code above should have handled everything.  */
1179  if (data.len > 0)
1180    abort ();
1181
1182  if (endp)
1183    {
1184      rtx to1;
1185
1186      if (data.reverse)
1187	abort ();
1188      if (data.autinc_to)
1189	{
1190	  if (endp == 2)
1191	    {
1192	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1193		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1194	      else
1195		data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1196								-1));
1197	    }
1198	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1199					   data.offset);
1200	}
1201      else
1202	{
1203	  if (endp == 2)
1204	    --data.offset;
1205	  to1 = adjust_address (data.to, QImode, data.offset);
1206	}
1207      return to1;
1208    }
1209  else
1210    return data.to;
1211}
1212
1213/* Return number of insns required to move L bytes by pieces.
1214   ALIGN (in bits) is maximum alignment we can assume.  */
1215
1216static unsigned HOST_WIDE_INT
1217move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1218{
1219  unsigned HOST_WIDE_INT n_insns = 0;
1220  unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1221
1222  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1223      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1224    align = MOVE_MAX * BITS_PER_UNIT;
1225
1226  while (max_size > 1)
1227    {
1228      enum machine_mode mode = VOIDmode, tmode;
1229      enum insn_code icode;
1230
1231      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1232	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1233	if (GET_MODE_SIZE (tmode) < max_size)
1234	  mode = tmode;
1235
1236      if (mode == VOIDmode)
1237	break;
1238
1239      icode = mov_optab->handlers[(int) mode].insn_code;
1240      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1241	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1242
1243      max_size = GET_MODE_SIZE (mode);
1244    }
1245
1246  if (l)
1247    abort ();
1248  return n_insns;
1249}
1250
1251/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1252   with move instructions for mode MODE.  GENFUN is the gen_... function
1253   to make a move insn for that mode.  DATA has all the other info.  */
1254
1255static void
1256move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1257		  struct move_by_pieces *data)
1258{
1259  unsigned int size = GET_MODE_SIZE (mode);
1260  rtx to1 = NULL_RTX, from1;
1261
1262  while (data->len >= size)
1263    {
1264      if (data->reverse)
1265	data->offset -= size;
1266
1267      if (data->to)
1268	{
1269	  if (data->autinc_to)
1270	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1271					     data->offset);
1272	  else
1273	    to1 = adjust_address (data->to, mode, data->offset);
1274	}
1275
1276      if (data->autinc_from)
1277	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1278					   data->offset);
1279      else
1280	from1 = adjust_address (data->from, mode, data->offset);
1281
1282      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1283	emit_insn (gen_add2_insn (data->to_addr,
1284				  GEN_INT (-(HOST_WIDE_INT)size)));
1285      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1286	emit_insn (gen_add2_insn (data->from_addr,
1287				  GEN_INT (-(HOST_WIDE_INT)size)));
1288
1289      if (data->to)
1290	emit_insn ((*genfun) (to1, from1));
1291      else
1292	{
1293#ifdef PUSH_ROUNDING
1294	  emit_single_push_insn (mode, from1, NULL);
1295#else
1296	  abort ();
1297#endif
1298	}
1299
1300      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1301	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1302      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1303	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1304
1305      if (! data->reverse)
1306	data->offset += size;
1307
1308      data->len -= size;
1309    }
1310}
1311
1312/* Emit code to move a block Y to a block X.  This may be done with
1313   string-move instructions, with multiple scalar move instructions,
1314   or with a library call.
1315
1316   Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1317   SIZE is an rtx that says how long they are.
1318   ALIGN is the maximum alignment we can assume they have.
1319   METHOD describes what kind of copy this is, and what mechanisms may be used.
1320
1321   Return the address of the new block, if memcpy is called and returns it,
1322   0 otherwise.  */
1323
1324rtx
1325emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1326{
1327  bool may_use_call;
1328  rtx retval = 0;
1329  unsigned int align;
1330
1331  switch (method)
1332    {
1333    case BLOCK_OP_NORMAL:
1334      may_use_call = true;
1335      break;
1336
1337    case BLOCK_OP_CALL_PARM:
1338      may_use_call = block_move_libcall_safe_for_call_parm ();
1339
1340      /* Make inhibit_defer_pop nonzero around the library call
1341	 to force it to pop the arguments right away.  */
1342      NO_DEFER_POP;
1343      break;
1344
1345    case BLOCK_OP_NO_LIBCALL:
1346      may_use_call = false;
1347      break;
1348
1349    default:
1350      abort ();
1351    }
1352
1353  align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1354
1355  if (GET_MODE (x) != BLKmode)
1356    abort ();
1357  if (GET_MODE (y) != BLKmode)
1358    abort ();
1359
1360  x = protect_from_queue (x, 1);
1361  y = protect_from_queue (y, 0);
1362  size = protect_from_queue (size, 0);
1363
1364  if (GET_CODE (x) != MEM)
1365    abort ();
1366  if (GET_CODE (y) != MEM)
1367    abort ();
1368  if (size == 0)
1369    abort ();
1370
1371  /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1372     can be incorrect is coming from __builtin_memcpy.  */
1373  if (GET_CODE (size) == CONST_INT)
1374    {
1375      if (INTVAL (size) == 0)
1376	return 0;
1377
1378      x = shallow_copy_rtx (x);
1379      y = shallow_copy_rtx (y);
1380      set_mem_size (x, size);
1381      set_mem_size (y, size);
1382    }
1383
1384  if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1385    move_by_pieces (x, y, INTVAL (size), align, 0);
1386  else if (emit_block_move_via_movstr (x, y, size, align))
1387    ;
1388  else if (may_use_call)
1389    retval = emit_block_move_via_libcall (x, y, size);
1390  else
1391    emit_block_move_via_loop (x, y, size, align);
1392
1393  if (method == BLOCK_OP_CALL_PARM)
1394    OK_DEFER_POP;
1395
1396  return retval;
1397}
1398
1399/* A subroutine of emit_block_move.  Returns true if calling the
1400   block move libcall will not clobber any parameters which may have
1401   already been placed on the stack.  */
1402
1403static bool
1404block_move_libcall_safe_for_call_parm (void)
1405{
1406  /* If arguments are pushed on the stack, then they're safe.  */
1407  if (PUSH_ARGS)
1408    return true;
1409
1410  /* If registers go on the stack anyway, any argument is sure to clobber
1411     an outgoing argument.  */
1412#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1413  {
1414    tree fn = emit_block_move_libcall_fn (false);
1415    (void) fn;
1416    if (REG_PARM_STACK_SPACE (fn) != 0)
1417      return false;
1418  }
1419#endif
1420
1421  /* If any argument goes in memory, then it might clobber an outgoing
1422     argument.  */
1423  {
1424    CUMULATIVE_ARGS args_so_far;
1425    tree fn, arg;
1426
1427    fn = emit_block_move_libcall_fn (false);
1428    INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1429
1430    arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1431    for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1432      {
1433	enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1434	rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1435	if (!tmp || !REG_P (tmp))
1436	  return false;
1437#ifdef FUNCTION_ARG_PARTIAL_NREGS
1438	if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1439					NULL_TREE, 1))
1440	  return false;
1441#endif
1442	FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1443      }
1444  }
1445  return true;
1446}
1447
1448/* A subroutine of emit_block_move.  Expand a movstr pattern;
1449   return true if successful.  */
1450
1451static bool
1452emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1453{
1454  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1455  enum machine_mode mode;
1456
1457  /* Since this is a move insn, we don't care about volatility.  */
1458  volatile_ok = 1;
1459
1460  /* Try the most limited insn first, because there's no point
1461     including more than one in the machine description unless
1462     the more limited one has some advantage.  */
1463
1464  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1465       mode = GET_MODE_WIDER_MODE (mode))
1466    {
1467      enum insn_code code = movstr_optab[(int) mode];
1468      insn_operand_predicate_fn pred;
1469
1470      if (code != CODE_FOR_nothing
1471	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1472	     here because if SIZE is less than the mode mask, as it is
1473	     returned by the macro, it will definitely be less than the
1474	     actual mode mask.  */
1475	  && ((GET_CODE (size) == CONST_INT
1476	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1477		   <= (GET_MODE_MASK (mode) >> 1)))
1478	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1479	  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1480	      || (*pred) (x, BLKmode))
1481	  && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1482	      || (*pred) (y, BLKmode))
1483	  && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1484	      || (*pred) (opalign, VOIDmode)))
1485	{
1486	  rtx op2;
1487	  rtx last = get_last_insn ();
1488	  rtx pat;
1489
1490	  op2 = convert_to_mode (mode, size, 1);
1491	  pred = insn_data[(int) code].operand[2].predicate;
1492	  if (pred != 0 && ! (*pred) (op2, mode))
1493	    op2 = copy_to_mode_reg (mode, op2);
1494
1495	  /* ??? When called via emit_block_move_for_call, it'd be
1496	     nice if there were some way to inform the backend, so
1497	     that it doesn't fail the expansion because it thinks
1498	     emitting the libcall would be more efficient.  */
1499
1500	  pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1501	  if (pat)
1502	    {
1503	      emit_insn (pat);
1504	      volatile_ok = 0;
1505	      return true;
1506	    }
1507	  else
1508	    delete_insns_since (last);
1509	}
1510    }
1511
1512  volatile_ok = 0;
1513  return false;
1514}
1515
1516/* A subroutine of emit_block_move.  Expand a call to memcpy or bcopy.
1517   Return the return value from memcpy, 0 otherwise.  */
1518
1519static rtx
1520emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1521{
1522  rtx dst_addr, src_addr;
1523  tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1524  enum machine_mode size_mode;
1525  rtx retval;
1526
1527  /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1528
1529     It is unsafe to save the value generated by protect_from_queue and reuse
1530     it later.  Consider what happens if emit_queue is called before the
1531     return value from protect_from_queue is used.
1532
1533     Expansion of the CALL_EXPR below will call emit_queue before we are
1534     finished emitting RTL for argument setup.  So if we are not careful we
1535     could get the wrong value for an argument.
1536
1537     To avoid this problem we go ahead and emit code to copy the addresses of
1538     DST and SRC and SIZE into new pseudos.  We can then place those new
1539     pseudos into an RTL_EXPR and use them later, even after a call to
1540     emit_queue.
1541
1542     Note this is not strictly needed for library calls since they do not call
1543     emit_queue before loading their arguments.  However, we may need to have
1544     library calls call emit_queue in the future since failing to do so could
1545     cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1546     arguments in registers.  */
1547
1548  dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1549  src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1550
1551  dst_addr = convert_memory_address (ptr_mode, dst_addr);
1552  src_addr = convert_memory_address (ptr_mode, src_addr);
1553
1554  dst_tree = make_tree (ptr_type_node, dst_addr);
1555  src_tree = make_tree (ptr_type_node, src_addr);
1556
1557  if (TARGET_MEM_FUNCTIONS)
1558    size_mode = TYPE_MODE (sizetype);
1559  else
1560    size_mode = TYPE_MODE (unsigned_type_node);
1561
1562  size = convert_to_mode (size_mode, size, 1);
1563  size = copy_to_mode_reg (size_mode, size);
1564
1565  /* It is incorrect to use the libcall calling conventions to call
1566     memcpy in this context.  This could be a user call to memcpy and
1567     the user may wish to examine the return value from memcpy.  For
1568     targets where libcalls and normal calls have different conventions
1569     for returning pointers, we could end up generating incorrect code.
1570
1571     For convenience, we generate the call to bcopy this way as well.  */
1572
1573  if (TARGET_MEM_FUNCTIONS)
1574    size_tree = make_tree (sizetype, size);
1575  else
1576    size_tree = make_tree (unsigned_type_node, size);
1577
1578  fn = emit_block_move_libcall_fn (true);
1579  arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1580  if (TARGET_MEM_FUNCTIONS)
1581    {
1582      arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1583      arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1584    }
1585  else
1586    {
1587      arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1588      arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1589    }
1590
1591  /* Now we have to build up the CALL_EXPR itself.  */
1592  call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1593  call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1594		     call_expr, arg_list, NULL_TREE);
1595
1596  retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1597
1598  /* If we are initializing a readonly value, show the above call clobbered
1599     it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1600     the delay slot scheduler might overlook conflicts and take nasty
1601     decisions.  */
1602  if (RTX_UNCHANGING_P (dst))
1603    add_function_usage_to
1604      (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1605					     gen_rtx_CLOBBER (VOIDmode, dst),
1606					     NULL_RTX));
1607
1608  return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1609}
1610
1611/* A subroutine of emit_block_move_via_libcall.  Create the tree node
1612   for the function we use for block copies.  The first time FOR_CALL
1613   is true, we call assemble_external.  */
1614
1615static GTY(()) tree block_move_fn;
1616
1617void
1618init_block_move_fn (const char *asmspec)
1619{
1620  if (!block_move_fn)
1621    {
1622      tree args, fn;
1623
1624      if (TARGET_MEM_FUNCTIONS)
1625	{
1626	  fn = get_identifier ("memcpy");
1627	  args = build_function_type_list (ptr_type_node, ptr_type_node,
1628					   const_ptr_type_node, sizetype,
1629					   NULL_TREE);
1630	}
1631      else
1632	{
1633	  fn = get_identifier ("bcopy");
1634	  args = build_function_type_list (void_type_node, const_ptr_type_node,
1635					   ptr_type_node, unsigned_type_node,
1636					   NULL_TREE);
1637	}
1638
1639      fn = build_decl (FUNCTION_DECL, fn, args);
1640      DECL_EXTERNAL (fn) = 1;
1641      TREE_PUBLIC (fn) = 1;
1642      DECL_ARTIFICIAL (fn) = 1;
1643      TREE_NOTHROW (fn) = 1;
1644
1645      block_move_fn = fn;
1646    }
1647
1648  if (asmspec)
1649    {
1650      SET_DECL_RTL (block_move_fn, NULL_RTX);
1651      SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1652    }
1653}
1654
1655static tree
1656emit_block_move_libcall_fn (int for_call)
1657{
1658  static bool emitted_extern;
1659
1660  if (!block_move_fn)
1661    init_block_move_fn (NULL);
1662
1663  if (for_call && !emitted_extern)
1664    {
1665      emitted_extern = true;
1666      make_decl_rtl (block_move_fn, NULL);
1667      assemble_external (block_move_fn);
1668    }
1669
1670  return block_move_fn;
1671}
1672
1673/* A subroutine of emit_block_move.  Copy the data via an explicit
1674   loop.  This is used only when libcalls are forbidden.  */
1675/* ??? It'd be nice to copy in hunks larger than QImode.  */
1676
1677static void
1678emit_block_move_via_loop (rtx x, rtx y, rtx size,
1679			  unsigned int align ATTRIBUTE_UNUSED)
1680{
1681  rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1682  enum machine_mode iter_mode;
1683
1684  iter_mode = GET_MODE (size);
1685  if (iter_mode == VOIDmode)
1686    iter_mode = word_mode;
1687
1688  top_label = gen_label_rtx ();
1689  cmp_label = gen_label_rtx ();
1690  iter = gen_reg_rtx (iter_mode);
1691
1692  emit_move_insn (iter, const0_rtx);
1693
1694  x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1695  y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1696  do_pending_stack_adjust ();
1697
1698  emit_note (NOTE_INSN_LOOP_BEG);
1699
1700  emit_jump (cmp_label);
1701  emit_label (top_label);
1702
1703  tmp = convert_modes (Pmode, iter_mode, iter, true);
1704  x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1705  y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1706  x = change_address (x, QImode, x_addr);
1707  y = change_address (y, QImode, y_addr);
1708
1709  emit_move_insn (x, y);
1710
1711  tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1712			     true, OPTAB_LIB_WIDEN);
1713  if (tmp != iter)
1714    emit_move_insn (iter, tmp);
1715
1716  emit_note (NOTE_INSN_LOOP_CONT);
1717  emit_label (cmp_label);
1718
1719  emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1720			   true, top_label);
1721
1722  emit_note (NOTE_INSN_LOOP_END);
1723}
1724
1725/* Copy all or part of a value X into registers starting at REGNO.
1726   The number of registers to be filled is NREGS.  */
1727
1728void
1729move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1730{
1731  int i;
1732#ifdef HAVE_load_multiple
1733  rtx pat;
1734  rtx last;
1735#endif
1736
1737  if (nregs == 0)
1738    return;
1739
1740  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1741    x = validize_mem (force_const_mem (mode, x));
1742
1743  /* See if the machine can do this with a load multiple insn.  */
1744#ifdef HAVE_load_multiple
1745  if (HAVE_load_multiple)
1746    {
1747      last = get_last_insn ();
1748      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1749			       GEN_INT (nregs));
1750      if (pat)
1751	{
1752	  emit_insn (pat);
1753	  return;
1754	}
1755      else
1756	delete_insns_since (last);
1757    }
1758#endif
1759
1760  for (i = 0; i < nregs; i++)
1761    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1762		    operand_subword_force (x, i, mode));
1763}
1764
1765/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1766   The number of registers to be filled is NREGS.  */
1767
1768void
1769move_block_from_reg (int regno, rtx x, int nregs)
1770{
1771  int i;
1772
1773  if (nregs == 0)
1774    return;
1775
1776  /* See if the machine can do this with a store multiple insn.  */
1777#ifdef HAVE_store_multiple
1778  if (HAVE_store_multiple)
1779    {
1780      rtx last = get_last_insn ();
1781      rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1782				    GEN_INT (nregs));
1783      if (pat)
1784	{
1785	  emit_insn (pat);
1786	  return;
1787	}
1788      else
1789	delete_insns_since (last);
1790    }
1791#endif
1792
1793  for (i = 0; i < nregs; i++)
1794    {
1795      rtx tem = operand_subword (x, i, 1, BLKmode);
1796
1797      if (tem == 0)
1798	abort ();
1799
1800      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1801    }
1802}
1803
1804/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1805   ORIG, where ORIG is a non-consecutive group of registers represented by
1806   a PARALLEL.  The clone is identical to the original except in that the
1807   original set of registers is replaced by a new set of pseudo registers.
1808   The new set has the same modes as the original set.  */
1809
1810rtx
1811gen_group_rtx (rtx orig)
1812{
1813  int i, length;
1814  rtx *tmps;
1815
1816  if (GET_CODE (orig) != PARALLEL)
1817    abort ();
1818
1819  length = XVECLEN (orig, 0);
1820  tmps = alloca (sizeof (rtx) * length);
1821
1822  /* Skip a NULL entry in first slot.  */
1823  i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1824
1825  if (i)
1826    tmps[0] = 0;
1827
1828  for (; i < length; i++)
1829    {
1830      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1831      rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1832
1833      tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1834    }
1835
1836  return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1837}
1838
1839/* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1840   where DST is non-consecutive registers represented by a PARALLEL.
1841   SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1842   if not known.  */
1843
1844void
1845emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1846{
1847  rtx *tmps, src;
1848  int start, i;
1849
1850  if (GET_CODE (dst) != PARALLEL)
1851    abort ();
1852
1853  /* Check for a NULL entry, used to indicate that the parameter goes
1854     both on the stack and in registers.  */
1855  if (XEXP (XVECEXP (dst, 0, 0), 0))
1856    start = 0;
1857  else
1858    start = 1;
1859
1860  tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1861
1862  /* Process the pieces.  */
1863  for (i = start; i < XVECLEN (dst, 0); i++)
1864    {
1865      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1866      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1867      unsigned int bytelen = GET_MODE_SIZE (mode);
1868      int shift = 0;
1869
1870      /* Handle trailing fragments that run over the size of the struct.  */
1871      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1872	{
1873	  /* Arrange to shift the fragment to where it belongs.
1874	     extract_bit_field loads to the lsb of the reg.  */
1875	  if (
1876#ifdef BLOCK_REG_PADDING
1877	      BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1878	      == (BYTES_BIG_ENDIAN ? upward : downward)
1879#else
1880	      BYTES_BIG_ENDIAN
1881#endif
1882	      )
1883	    shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1884	  bytelen = ssize - bytepos;
1885	  if (bytelen <= 0)
1886	    abort ();
1887	}
1888
1889      /* If we won't be loading directly from memory, protect the real source
1890	 from strange tricks we might play; but make sure that the source can
1891	 be loaded directly into the destination.  */
1892      src = orig_src;
1893      if (GET_CODE (orig_src) != MEM
1894	  && (!CONSTANT_P (orig_src)
1895	      || (GET_MODE (orig_src) != mode
1896		  && GET_MODE (orig_src) != VOIDmode)))
1897	{
1898	  if (GET_MODE (orig_src) == VOIDmode)
1899	    src = gen_reg_rtx (mode);
1900	  else
1901	    src = gen_reg_rtx (GET_MODE (orig_src));
1902
1903	  emit_move_insn (src, orig_src);
1904	}
1905
1906      /* Optimize the access just a bit.  */
1907      if (GET_CODE (src) == MEM
1908	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1909	      || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1910	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1911	  && bytelen == GET_MODE_SIZE (mode))
1912	{
1913	  tmps[i] = gen_reg_rtx (mode);
1914	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1915	}
1916      else if (GET_CODE (src) == CONCAT)
1917	{
1918	  unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1919	  unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1920
1921	  if ((bytepos == 0 && bytelen == slen0)
1922	      || (bytepos != 0 && bytepos + bytelen <= slen))
1923	    {
1924	      /* The following assumes that the concatenated objects all
1925		 have the same size.  In this case, a simple calculation
1926		 can be used to determine the object and the bit field
1927		 to be extracted.  */
1928	      tmps[i] = XEXP (src, bytepos / slen0);
1929	      if (! CONSTANT_P (tmps[i])
1930		  && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1931		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1932					     (bytepos % slen0) * BITS_PER_UNIT,
1933					     1, NULL_RTX, mode, mode, ssize);
1934	    }
1935	  else if (bytepos == 0)
1936	    {
1937	      rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1938	      emit_move_insn (mem, src);
1939	      tmps[i] = adjust_address (mem, mode, 0);
1940	    }
1941	  else
1942	    abort ();
1943	}
1944      /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1945	 SIMD register, which is currently broken.  While we get GCC
1946	 to emit proper RTL for these cases, let's dump to memory.  */
1947      else if (VECTOR_MODE_P (GET_MODE (dst))
1948	       && GET_CODE (src) == REG)
1949	{
1950	  int slen = GET_MODE_SIZE (GET_MODE (src));
1951	  rtx mem;
1952
1953	  mem = assign_stack_temp (GET_MODE (src), slen, 0);
1954	  emit_move_insn (mem, src);
1955	  tmps[i] = adjust_address (mem, mode, (int) bytepos);
1956	}
1957      else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1958	       && XVECLEN (dst, 0) > 1)
1959	tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1960      else if (CONSTANT_P (src)
1961	       || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1962	tmps[i] = src;
1963      else
1964	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1965				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1966				     mode, mode, ssize);
1967
1968      if (shift)
1969	expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1970		      tmps[i], 0, OPTAB_WIDEN);
1971    }
1972
1973  emit_queue ();
1974
1975  /* Copy the extracted pieces into the proper (probable) hard regs.  */
1976  for (i = start; i < XVECLEN (dst, 0); i++)
1977    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1978}
1979
1980/* Emit code to move a block SRC to block DST, where SRC and DST are
1981   non-consecutive groups of registers, each represented by a PARALLEL.  */
1982
1983void
1984emit_group_move (rtx dst, rtx src)
1985{
1986  int i;
1987
1988  if (GET_CODE (src) != PARALLEL
1989      || GET_CODE (dst) != PARALLEL
1990      || XVECLEN (src, 0) != XVECLEN (dst, 0))
1991    abort ();
1992
1993  /* Skip first entry if NULL.  */
1994  for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1995    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1996		    XEXP (XVECEXP (src, 0, i), 0));
1997}
1998
1999/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2000   where SRC is non-consecutive registers represented by a PARALLEL.
2001   SSIZE represents the total size of block ORIG_DST, or -1 if not
2002   known.  */
2003
2004void
2005emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2006{
2007  rtx *tmps, dst;
2008  int start, i;
2009
2010  if (GET_CODE (src) != PARALLEL)
2011    abort ();
2012
2013  /* Check for a NULL entry, used to indicate that the parameter goes
2014     both on the stack and in registers.  */
2015  if (XEXP (XVECEXP (src, 0, 0), 0))
2016    start = 0;
2017  else
2018    start = 1;
2019
2020  tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2021
2022  /* Copy the (probable) hard regs into pseudos.  */
2023  for (i = start; i < XVECLEN (src, 0); i++)
2024    {
2025      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2026      tmps[i] = gen_reg_rtx (GET_MODE (reg));
2027      emit_move_insn (tmps[i], reg);
2028    }
2029  emit_queue ();
2030
2031  /* If we won't be storing directly into memory, protect the real destination
2032     from strange tricks we might play.  */
2033  dst = orig_dst;
2034  if (GET_CODE (dst) == PARALLEL)
2035    {
2036      rtx temp;
2037
2038      /* We can get a PARALLEL dst if there is a conditional expression in
2039	 a return statement.  In that case, the dst and src are the same,
2040	 so no action is necessary.  */
2041      if (rtx_equal_p (dst, src))
2042	return;
2043
2044      /* It is unclear if we can ever reach here, but we may as well handle
2045	 it.  Allocate a temporary, and split this into a store/load to/from
2046	 the temporary.  */
2047
2048      temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2049      emit_group_store (temp, src, type, ssize);
2050      emit_group_load (dst, temp, type, ssize);
2051      return;
2052    }
2053  else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2054    {
2055      dst = gen_reg_rtx (GET_MODE (orig_dst));
2056      /* Make life a bit easier for combine.  */
2057      emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2058    }
2059
2060  /* Process the pieces.  */
2061  for (i = start; i < XVECLEN (src, 0); i++)
2062    {
2063      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2064      enum machine_mode mode = GET_MODE (tmps[i]);
2065      unsigned int bytelen = GET_MODE_SIZE (mode);
2066      rtx dest = dst;
2067
2068      /* Handle trailing fragments that run over the size of the struct.  */
2069      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2070	{
2071	  /* store_bit_field always takes its value from the lsb.
2072	     Move the fragment to the lsb if it's not already there.  */
2073	  if (
2074#ifdef BLOCK_REG_PADDING
2075	      BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2076	      == (BYTES_BIG_ENDIAN ? upward : downward)
2077#else
2078	      BYTES_BIG_ENDIAN
2079#endif
2080	      )
2081	    {
2082	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2083	      expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2084			    tmps[i], 0, OPTAB_WIDEN);
2085	    }
2086	  bytelen = ssize - bytepos;
2087	}
2088
2089      if (GET_CODE (dst) == CONCAT)
2090	{
2091	  if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2092	    dest = XEXP (dst, 0);
2093	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2094	    {
2095	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2096	      dest = XEXP (dst, 1);
2097	    }
2098	  else if (bytepos == 0 && XVECLEN (src, 0))
2099	    {
2100	      dest = assign_stack_temp (GET_MODE (dest),
2101				        GET_MODE_SIZE (GET_MODE (dest)), 0);
2102	      emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2103			      tmps[i]);
2104	      dst = dest;
2105	      break;
2106	    }
2107	  else
2108	    abort ();
2109	}
2110
2111      /* Optimize the access just a bit.  */
2112      if (GET_CODE (dest) == MEM
2113	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2114	      || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2115	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2116	  && bytelen == GET_MODE_SIZE (mode))
2117	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2118      else
2119	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2120			 mode, tmps[i], ssize);
2121    }
2122
2123  emit_queue ();
2124
2125  /* Copy from the pseudo into the (probable) hard reg.  */
2126  if (orig_dst != dst)
2127    emit_move_insn (orig_dst, dst);
2128}
2129
2130/* Generate code to copy a BLKmode object of TYPE out of a
2131   set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2132   is null, a stack temporary is created.  TGTBLK is returned.
2133
2134   The purpose of this routine is to handle functions that return
2135   BLKmode structures in registers.  Some machines (the PA for example)
2136   want to return all small structures in registers regardless of the
2137   structure's alignment.  */
2138
2139rtx
2140copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2141{
2142  unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2143  rtx src = NULL, dst = NULL;
2144  unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2145  unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2146
2147  if (tgtblk == 0)
2148    {
2149      tgtblk = assign_temp (build_qualified_type (type,
2150						  (TYPE_QUALS (type)
2151						   | TYPE_QUAL_CONST)),
2152			    0, 1, 1);
2153      preserve_temp_slots (tgtblk);
2154    }
2155
2156  /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2157     into a new pseudo which is a full word.  */
2158
2159  if (GET_MODE (srcreg) != BLKmode
2160      && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2161    srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2162
2163  /* If the structure doesn't take up a whole number of words, see whether
2164     SRCREG is padded on the left or on the right.  If it's on the left,
2165     set PADDING_CORRECTION to the number of bits to skip.
2166
2167     In most ABIs, the structure will be returned at the least end of
2168     the register, which translates to right padding on little-endian
2169     targets and left padding on big-endian targets.  The opposite
2170     holds if the structure is returned at the most significant
2171     end of the register.  */
2172  if (bytes % UNITS_PER_WORD != 0
2173      && (targetm.calls.return_in_msb (type)
2174	  ? !BYTES_BIG_ENDIAN
2175	  : BYTES_BIG_ENDIAN))
2176    padding_correction
2177      = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2178
2179  /* Copy the structure BITSIZE bites at a time.
2180
2181     We could probably emit more efficient code for machines which do not use
2182     strict alignment, but it doesn't seem worth the effort at the current
2183     time.  */
2184  for (bitpos = 0, xbitpos = padding_correction;
2185       bitpos < bytes * BITS_PER_UNIT;
2186       bitpos += bitsize, xbitpos += bitsize)
2187    {
2188      /* We need a new source operand each time xbitpos is on a
2189	 word boundary and when xbitpos == padding_correction
2190	 (the first time through).  */
2191      if (xbitpos % BITS_PER_WORD == 0
2192	  || xbitpos == padding_correction)
2193	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2194				     GET_MODE (srcreg));
2195
2196      /* We need a new destination operand each time bitpos is on
2197	 a word boundary.  */
2198      if (bitpos % BITS_PER_WORD == 0)
2199	dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2200
2201      /* Use xbitpos for the source extraction (right justified) and
2202	 xbitpos for the destination store (left justified).  */
2203      store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2204		       extract_bit_field (src, bitsize,
2205					  xbitpos % BITS_PER_WORD, 1,
2206					  NULL_RTX, word_mode, word_mode,
2207					  BITS_PER_WORD),
2208		       BITS_PER_WORD);
2209    }
2210
2211  return tgtblk;
2212}
2213
2214/* Add a USE expression for REG to the (possibly empty) list pointed
2215   to by CALL_FUSAGE.  REG must denote a hard register.  */
2216
2217void
2218use_reg (rtx *call_fusage, rtx reg)
2219{
2220  if (GET_CODE (reg) != REG
2221      || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2222    abort ();
2223
2224  *call_fusage
2225    = gen_rtx_EXPR_LIST (VOIDmode,
2226			 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2227}
2228
2229/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2230   starting at REGNO.  All of these registers must be hard registers.  */
2231
2232void
2233use_regs (rtx *call_fusage, int regno, int nregs)
2234{
2235  int i;
2236
2237  if (regno + nregs > FIRST_PSEUDO_REGISTER)
2238    abort ();
2239
2240  for (i = 0; i < nregs; i++)
2241    use_reg (call_fusage, regno_reg_rtx[regno + i]);
2242}
2243
2244/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2245   PARALLEL REGS.  This is for calls that pass values in multiple
2246   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2247
2248void
2249use_group_regs (rtx *call_fusage, rtx regs)
2250{
2251  int i;
2252
2253  for (i = 0; i < XVECLEN (regs, 0); i++)
2254    {
2255      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2256
2257      /* A NULL entry means the parameter goes both on the stack and in
2258	 registers.  This can also be a MEM for targets that pass values
2259	 partially on the stack and partially in registers.  */
2260      if (reg != 0 && GET_CODE (reg) == REG)
2261	use_reg (call_fusage, reg);
2262    }
2263}
2264
2265
2266/* Determine whether the LEN bytes generated by CONSTFUN can be
2267   stored to memory using several move instructions.  CONSTFUNDATA is
2268   a pointer which will be passed as argument in every CONSTFUN call.
2269   ALIGN is maximum alignment we can assume.  Return nonzero if a
2270   call to store_by_pieces should succeed.  */
2271
2272int
2273can_store_by_pieces (unsigned HOST_WIDE_INT len,
2274		     rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2275		     void *constfundata, unsigned int align)
2276{
2277  unsigned HOST_WIDE_INT max_size, l;
2278  HOST_WIDE_INT offset = 0;
2279  enum machine_mode mode, tmode;
2280  enum insn_code icode;
2281  int reverse;
2282  rtx cst;
2283
2284  if (len == 0)
2285    return 1;
2286
2287  if (! STORE_BY_PIECES_P (len, align))
2288    return 0;
2289
2290  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2291      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2292    align = MOVE_MAX * BITS_PER_UNIT;
2293
2294  /* We would first store what we can in the largest integer mode, then go to
2295     successively smaller modes.  */
2296
2297  for (reverse = 0;
2298       reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2299       reverse++)
2300    {
2301      l = len;
2302      mode = VOIDmode;
2303      max_size = STORE_MAX_PIECES + 1;
2304      while (max_size > 1)
2305	{
2306	  for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2307	       tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2308	    if (GET_MODE_SIZE (tmode) < max_size)
2309	      mode = tmode;
2310
2311	  if (mode == VOIDmode)
2312	    break;
2313
2314	  icode = mov_optab->handlers[(int) mode].insn_code;
2315	  if (icode != CODE_FOR_nothing
2316	      && align >= GET_MODE_ALIGNMENT (mode))
2317	    {
2318	      unsigned int size = GET_MODE_SIZE (mode);
2319
2320	      while (l >= size)
2321		{
2322		  if (reverse)
2323		    offset -= size;
2324
2325		  cst = (*constfun) (constfundata, offset, mode);
2326		  if (!LEGITIMATE_CONSTANT_P (cst))
2327		    return 0;
2328
2329		  if (!reverse)
2330		    offset += size;
2331
2332		  l -= size;
2333		}
2334	    }
2335
2336	  max_size = GET_MODE_SIZE (mode);
2337	}
2338
2339      /* The code above should have handled everything.  */
2340      if (l != 0)
2341	abort ();
2342    }
2343
2344  return 1;
2345}
2346
2347/* Generate several move instructions to store LEN bytes generated by
2348   CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2349   pointer which will be passed as argument in every CONSTFUN call.
2350   ALIGN is maximum alignment we can assume.
2351   If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2352   mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2353   stpcpy.  */
2354
2355rtx
2356store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2357		 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2358		 void *constfundata, unsigned int align, int endp)
2359{
2360  struct store_by_pieces data;
2361
2362  if (len == 0)
2363    {
2364      if (endp == 2)
2365	abort ();
2366      return to;
2367    }
2368
2369  if (! STORE_BY_PIECES_P (len, align))
2370    abort ();
2371  to = protect_from_queue (to, 1);
2372  data.constfun = constfun;
2373  data.constfundata = constfundata;
2374  data.len = len;
2375  data.to = to;
2376  store_by_pieces_1 (&data, align);
2377  if (endp)
2378    {
2379      rtx to1;
2380
2381      if (data.reverse)
2382	abort ();
2383      if (data.autinc_to)
2384	{
2385	  if (endp == 2)
2386	    {
2387	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2388		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2389	      else
2390		data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2391								-1));
2392	    }
2393	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2394					   data.offset);
2395	}
2396      else
2397	{
2398	  if (endp == 2)
2399	    --data.offset;
2400	  to1 = adjust_address (data.to, QImode, data.offset);
2401	}
2402      return to1;
2403    }
2404  else
2405    return data.to;
2406}
2407
2408/* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2409   rtx with BLKmode).  The caller must pass TO through protect_from_queue
2410   before calling. ALIGN is maximum alignment we can assume.  */
2411
2412static void
2413clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2414{
2415  struct store_by_pieces data;
2416
2417  if (len == 0)
2418    return;
2419
2420  data.constfun = clear_by_pieces_1;
2421  data.constfundata = NULL;
2422  data.len = len;
2423  data.to = to;
2424  store_by_pieces_1 (&data, align);
2425}
2426
2427/* Callback routine for clear_by_pieces.
2428   Return const0_rtx unconditionally.  */
2429
2430static rtx
2431clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2432		   HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2433		   enum machine_mode mode ATTRIBUTE_UNUSED)
2434{
2435  return const0_rtx;
2436}
2437
2438/* Subroutine of clear_by_pieces and store_by_pieces.
2439   Generate several move instructions to store LEN bytes of block TO.  (A MEM
2440   rtx with BLKmode).  The caller must pass TO through protect_from_queue
2441   before calling.  ALIGN is maximum alignment we can assume.  */
2442
2443static void
2444store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2445		   unsigned int align ATTRIBUTE_UNUSED)
2446{
2447  rtx to_addr = XEXP (data->to, 0);
2448  unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2449  enum machine_mode mode = VOIDmode, tmode;
2450  enum insn_code icode;
2451
2452  data->offset = 0;
2453  data->to_addr = to_addr;
2454  data->autinc_to
2455    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2456       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2457
2458  data->explicit_inc_to = 0;
2459  data->reverse
2460    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2461  if (data->reverse)
2462    data->offset = data->len;
2463
2464  /* If storing requires more than two move insns,
2465     copy addresses to registers (to make displacements shorter)
2466     and use post-increment if available.  */
2467  if (!data->autinc_to
2468      && move_by_pieces_ninsns (data->len, align) > 2)
2469    {
2470      /* Determine the main mode we'll be using.  */
2471      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2472	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2473	if (GET_MODE_SIZE (tmode) < max_size)
2474	  mode = tmode;
2475
2476      if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2477	{
2478	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2479	  data->autinc_to = 1;
2480	  data->explicit_inc_to = -1;
2481	}
2482
2483      if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2484	  && ! data->autinc_to)
2485	{
2486	  data->to_addr = copy_addr_to_reg (to_addr);
2487	  data->autinc_to = 1;
2488	  data->explicit_inc_to = 1;
2489	}
2490
2491      if ( !data->autinc_to && CONSTANT_P (to_addr))
2492	data->to_addr = copy_addr_to_reg (to_addr);
2493    }
2494
2495  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2496      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2497    align = MOVE_MAX * BITS_PER_UNIT;
2498
2499  /* First store what we can in the largest integer mode, then go to
2500     successively smaller modes.  */
2501
2502  while (max_size > 1)
2503    {
2504      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2505	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2506	if (GET_MODE_SIZE (tmode) < max_size)
2507	  mode = tmode;
2508
2509      if (mode == VOIDmode)
2510	break;
2511
2512      icode = mov_optab->handlers[(int) mode].insn_code;
2513      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2514	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2515
2516      max_size = GET_MODE_SIZE (mode);
2517    }
2518
2519  /* The code above should have handled everything.  */
2520  if (data->len != 0)
2521    abort ();
2522}
2523
2524/* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2525   with move instructions for mode MODE.  GENFUN is the gen_... function
2526   to make a move insn for that mode.  DATA has all the other info.  */
2527
2528static void
2529store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2530		   struct store_by_pieces *data)
2531{
2532  unsigned int size = GET_MODE_SIZE (mode);
2533  rtx to1, cst;
2534
2535  while (data->len >= size)
2536    {
2537      if (data->reverse)
2538	data->offset -= size;
2539
2540      if (data->autinc_to)
2541	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2542					 data->offset);
2543      else
2544	to1 = adjust_address (data->to, mode, data->offset);
2545
2546      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2547	emit_insn (gen_add2_insn (data->to_addr,
2548				  GEN_INT (-(HOST_WIDE_INT) size)));
2549
2550      cst = (*data->constfun) (data->constfundata, data->offset, mode);
2551      emit_insn ((*genfun) (to1, cst));
2552
2553      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2554	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2555
2556      if (! data->reverse)
2557	data->offset += size;
2558
2559      data->len -= size;
2560    }
2561}
2562
2563/* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2564   its length in bytes.  */
2565
2566rtx
2567clear_storage (rtx object, rtx size)
2568{
2569  rtx retval = 0;
2570  unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2571			: GET_MODE_ALIGNMENT (GET_MODE (object)));
2572
2573  /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2574     just move a zero.  Otherwise, do this a piece at a time.  */
2575  if (GET_MODE (object) != BLKmode
2576      && GET_CODE (size) == CONST_INT
2577      && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2578    emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2579  else
2580    {
2581      object = protect_from_queue (object, 1);
2582      size = protect_from_queue (size, 0);
2583
2584      if (size == const0_rtx)
2585	;
2586      else if (GET_CODE (size) == CONST_INT
2587	  && CLEAR_BY_PIECES_P (INTVAL (size), align))
2588	clear_by_pieces (object, INTVAL (size), align);
2589      else if (clear_storage_via_clrstr (object, size, align))
2590	;
2591      else
2592	retval = clear_storage_via_libcall (object, size);
2593    }
2594
2595  return retval;
2596}
2597
2598/* A subroutine of clear_storage.  Expand a clrstr pattern;
2599   return true if successful.  */
2600
2601static bool
2602clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2603{
2604  /* Try the most limited insn first, because there's no point
2605     including more than one in the machine description unless
2606     the more limited one has some advantage.  */
2607
2608  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2609  enum machine_mode mode;
2610
2611  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2612       mode = GET_MODE_WIDER_MODE (mode))
2613    {
2614      enum insn_code code = clrstr_optab[(int) mode];
2615      insn_operand_predicate_fn pred;
2616
2617      if (code != CODE_FOR_nothing
2618	  /* We don't need MODE to be narrower than
2619	     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2620	     the mode mask, as it is returned by the macro, it will
2621	     definitely be less than the actual mode mask.  */
2622	  && ((GET_CODE (size) == CONST_INT
2623	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2624		   <= (GET_MODE_MASK (mode) >> 1)))
2625	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2626	  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2627	      || (*pred) (object, BLKmode))
2628	  && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2629	      || (*pred) (opalign, VOIDmode)))
2630	{
2631	  rtx op1;
2632	  rtx last = get_last_insn ();
2633	  rtx pat;
2634
2635	  op1 = convert_to_mode (mode, size, 1);
2636	  pred = insn_data[(int) code].operand[1].predicate;
2637	  if (pred != 0 && ! (*pred) (op1, mode))
2638	    op1 = copy_to_mode_reg (mode, op1);
2639
2640	  pat = GEN_FCN ((int) code) (object, op1, opalign);
2641	  if (pat)
2642	    {
2643	      emit_insn (pat);
2644	      return true;
2645	    }
2646	  else
2647	    delete_insns_since (last);
2648	}
2649    }
2650
2651  return false;
2652}
2653
2654/* A subroutine of clear_storage.  Expand a call to memset or bzero.
2655   Return the return value of memset, 0 otherwise.  */
2656
2657static rtx
2658clear_storage_via_libcall (rtx object, rtx size)
2659{
2660  tree call_expr, arg_list, fn, object_tree, size_tree;
2661  enum machine_mode size_mode;
2662  rtx retval;
2663
2664  /* OBJECT or SIZE may have been passed through protect_from_queue.
2665
2666     It is unsafe to save the value generated by protect_from_queue
2667     and reuse it later.  Consider what happens if emit_queue is
2668     called before the return value from protect_from_queue is used.
2669
2670     Expansion of the CALL_EXPR below will call emit_queue before
2671     we are finished emitting RTL for argument setup.  So if we are
2672     not careful we could get the wrong value for an argument.
2673
2674     To avoid this problem we go ahead and emit code to copy OBJECT
2675     and SIZE into new pseudos.  We can then place those new pseudos
2676     into an RTL_EXPR and use them later, even after a call to
2677     emit_queue.
2678
2679     Note this is not strictly needed for library calls since they
2680     do not call emit_queue before loading their arguments.  However,
2681     we may need to have library calls call emit_queue in the future
2682     since failing to do so could cause problems for targets which
2683     define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
2684
2685  object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2686
2687  if (TARGET_MEM_FUNCTIONS)
2688    size_mode = TYPE_MODE (sizetype);
2689  else
2690    size_mode = TYPE_MODE (unsigned_type_node);
2691  size = convert_to_mode (size_mode, size, 1);
2692  size = copy_to_mode_reg (size_mode, size);
2693
2694  /* It is incorrect to use the libcall calling conventions to call
2695     memset in this context.  This could be a user call to memset and
2696     the user may wish to examine the return value from memset.  For
2697     targets where libcalls and normal calls have different conventions
2698     for returning pointers, we could end up generating incorrect code.
2699
2700     For convenience, we generate the call to bzero this way as well.  */
2701
2702  object_tree = make_tree (ptr_type_node, object);
2703  if (TARGET_MEM_FUNCTIONS)
2704    size_tree = make_tree (sizetype, size);
2705  else
2706    size_tree = make_tree (unsigned_type_node, size);
2707
2708  fn = clear_storage_libcall_fn (true);
2709  arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2710  if (TARGET_MEM_FUNCTIONS)
2711    arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2712  arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2713
2714  /* Now we have to build up the CALL_EXPR itself.  */
2715  call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2716  call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2717		     call_expr, arg_list, NULL_TREE);
2718
2719  retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2720
2721  /* If we are initializing a readonly value, show the above call
2722     clobbered it.  Otherwise, a load from it may erroneously be
2723     hoisted from a loop.  */
2724  if (RTX_UNCHANGING_P (object))
2725    emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2726
2727  return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2728}
2729
2730/* A subroutine of clear_storage_via_libcall.  Create the tree node
2731   for the function we use for block clears.  The first time FOR_CALL
2732   is true, we call assemble_external.  */
2733
2734static GTY(()) tree block_clear_fn;
2735
2736void
2737init_block_clear_fn (const char *asmspec)
2738{
2739  if (!block_clear_fn)
2740    {
2741      tree fn, args;
2742
2743      if (TARGET_MEM_FUNCTIONS)
2744	{
2745	  fn = get_identifier ("memset");
2746	  args = build_function_type_list (ptr_type_node, ptr_type_node,
2747					   integer_type_node, sizetype,
2748					   NULL_TREE);
2749	}
2750      else
2751	{
2752	  fn = get_identifier ("bzero");
2753	  args = build_function_type_list (void_type_node, ptr_type_node,
2754					   unsigned_type_node, NULL_TREE);
2755	}
2756
2757      fn = build_decl (FUNCTION_DECL, fn, args);
2758      DECL_EXTERNAL (fn) = 1;
2759      TREE_PUBLIC (fn) = 1;
2760      DECL_ARTIFICIAL (fn) = 1;
2761      TREE_NOTHROW (fn) = 1;
2762
2763      block_clear_fn = fn;
2764    }
2765
2766  if (asmspec)
2767    {
2768      SET_DECL_RTL (block_clear_fn, NULL_RTX);
2769      SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2770    }
2771}
2772
2773static tree
2774clear_storage_libcall_fn (int for_call)
2775{
2776  static bool emitted_extern;
2777
2778  if (!block_clear_fn)
2779    init_block_clear_fn (NULL);
2780
2781  if (for_call && !emitted_extern)
2782    {
2783      emitted_extern = true;
2784      make_decl_rtl (block_clear_fn, NULL);
2785      assemble_external (block_clear_fn);
2786    }
2787
2788  return block_clear_fn;
2789}
2790
2791/* Generate code to copy Y into X.
2792   Both Y and X must have the same mode, except that
2793   Y can be a constant with VOIDmode.
2794   This mode cannot be BLKmode; use emit_block_move for that.
2795
2796   Return the last instruction emitted.  */
2797
2798rtx
2799emit_move_insn (rtx x, rtx y)
2800{
2801  enum machine_mode mode = GET_MODE (x);
2802  rtx y_cst = NULL_RTX;
2803  rtx last_insn, set;
2804
2805  x = protect_from_queue (x, 1);
2806  y = protect_from_queue (y, 0);
2807
2808  if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2809    abort ();
2810
2811  /* Never force constant_p_rtx to memory.  */
2812  if (GET_CODE (y) == CONSTANT_P_RTX)
2813    ;
2814  else if (CONSTANT_P (y))
2815    {
2816      if (optimize
2817	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2818	  && (last_insn = compress_float_constant (x, y)))
2819	return last_insn;
2820
2821      y_cst = y;
2822
2823      if (!LEGITIMATE_CONSTANT_P (y))
2824	{
2825	  y = force_const_mem (mode, y);
2826
2827	  /* If the target's cannot_force_const_mem prevented the spill,
2828	     assume that the target's move expanders will also take care
2829	     of the non-legitimate constant.  */
2830	  if (!y)
2831	    y = y_cst;
2832	}
2833    }
2834
2835  /* If X or Y are memory references, verify that their addresses are valid
2836     for the machine.  */
2837  if (GET_CODE (x) == MEM
2838      && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2839	   && ! push_operand (x, GET_MODE (x)))
2840	  || (flag_force_addr
2841	      && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2842    x = validize_mem (x);
2843
2844  if (GET_CODE (y) == MEM
2845      && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2846	  || (flag_force_addr
2847	      && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2848    y = validize_mem (y);
2849
2850  if (mode == BLKmode)
2851    abort ();
2852
2853  last_insn = emit_move_insn_1 (x, y);
2854
2855  if (y_cst && GET_CODE (x) == REG
2856      && (set = single_set (last_insn)) != NULL_RTX
2857      && SET_DEST (set) == x
2858      && ! rtx_equal_p (y_cst, SET_SRC (set)))
2859    set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2860
2861  return last_insn;
2862}
2863
2864/* Low level part of emit_move_insn.
2865   Called just like emit_move_insn, but assumes X and Y
2866   are basically valid.  */
2867
2868rtx
2869emit_move_insn_1 (rtx x, rtx y)
2870{
2871  enum machine_mode mode = GET_MODE (x);
2872  enum machine_mode submode;
2873  enum mode_class class = GET_MODE_CLASS (mode);
2874
2875  if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2876    abort ();
2877
2878  if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2879    return
2880      emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2881
2882  /* Expand complex moves by moving real part and imag part, if possible.  */
2883  else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2884	   && BLKmode != (submode = GET_MODE_INNER (mode))
2885	   && (mov_optab->handlers[(int) submode].insn_code
2886	       != CODE_FOR_nothing))
2887    {
2888      /* Don't split destination if it is a stack push.  */
2889      int stack = push_operand (x, GET_MODE (x));
2890
2891#ifdef PUSH_ROUNDING
2892      /* In case we output to the stack, but the size is smaller than the
2893	 machine can push exactly, we need to use move instructions.  */
2894      if (stack
2895	  && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2896	      != GET_MODE_SIZE (submode)))
2897	{
2898	  rtx temp;
2899	  HOST_WIDE_INT offset1, offset2;
2900
2901	  /* Do not use anti_adjust_stack, since we don't want to update
2902	     stack_pointer_delta.  */
2903	  temp = expand_binop (Pmode,
2904#ifdef STACK_GROWS_DOWNWARD
2905			       sub_optab,
2906#else
2907			       add_optab,
2908#endif
2909			       stack_pointer_rtx,
2910			       GEN_INT
2911				 (PUSH_ROUNDING
2912				  (GET_MODE_SIZE (GET_MODE (x)))),
2913			       stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2914
2915	  if (temp != stack_pointer_rtx)
2916	    emit_move_insn (stack_pointer_rtx, temp);
2917
2918#ifdef STACK_GROWS_DOWNWARD
2919	  offset1 = 0;
2920	  offset2 = GET_MODE_SIZE (submode);
2921#else
2922	  offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2923	  offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2924		     + GET_MODE_SIZE (submode));
2925#endif
2926
2927	  emit_move_insn (change_address (x, submode,
2928					  gen_rtx_PLUS (Pmode,
2929						        stack_pointer_rtx,
2930							GEN_INT (offset1))),
2931			  gen_realpart (submode, y));
2932	  emit_move_insn (change_address (x, submode,
2933					  gen_rtx_PLUS (Pmode,
2934						        stack_pointer_rtx,
2935							GEN_INT (offset2))),
2936			  gen_imagpart (submode, y));
2937	}
2938      else
2939#endif
2940      /* If this is a stack, push the highpart first, so it
2941	 will be in the argument order.
2942
2943	 In that case, change_address is used only to convert
2944	 the mode, not to change the address.  */
2945      if (stack)
2946	{
2947	  /* Note that the real part always precedes the imag part in memory
2948	     regardless of machine's endianness.  */
2949#ifdef STACK_GROWS_DOWNWARD
2950	  emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2951			  gen_imagpart (submode, y));
2952	  emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2953			  gen_realpart (submode, y));
2954#else
2955	  emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2956			  gen_realpart (submode, y));
2957	  emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2958			  gen_imagpart (submode, y));
2959#endif
2960	}
2961      else
2962	{
2963	  rtx realpart_x, realpart_y;
2964	  rtx imagpart_x, imagpart_y;
2965
2966	  /* If this is a complex value with each part being smaller than a
2967	     word, the usual calling sequence will likely pack the pieces into
2968	     a single register.  Unfortunately, SUBREG of hard registers only
2969	     deals in terms of words, so we have a problem converting input
2970	     arguments to the CONCAT of two registers that is used elsewhere
2971	     for complex values.  If this is before reload, we can copy it into
2972	     memory and reload.  FIXME, we should see about using extract and
2973	     insert on integer registers, but complex short and complex char
2974	     variables should be rarely used.  */
2975	  if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2976	      && (reload_in_progress | reload_completed) == 0)
2977	    {
2978	      int packed_dest_p
2979		= (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2980	      int packed_src_p
2981		= (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2982
2983	      if (packed_dest_p || packed_src_p)
2984		{
2985		  enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2986					       ? MODE_FLOAT : MODE_INT);
2987
2988		  enum machine_mode reg_mode
2989		    = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2990
2991		  if (reg_mode != BLKmode)
2992		    {
2993		      rtx mem = assign_stack_temp (reg_mode,
2994						   GET_MODE_SIZE (mode), 0);
2995		      rtx cmem = adjust_address (mem, mode, 0);
2996
2997		      cfun->cannot_inline
2998			= N_("function using short complex types cannot be inline");
2999
3000		      if (packed_dest_p)
3001			{
3002			  rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3003
3004			  emit_move_insn_1 (cmem, y);
3005			  return emit_move_insn_1 (sreg, mem);
3006			}
3007		      else
3008			{
3009			  rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3010
3011			  emit_move_insn_1 (mem, sreg);
3012			  return emit_move_insn_1 (x, cmem);
3013			}
3014		    }
3015		}
3016	    }
3017
3018	  realpart_x = gen_realpart (submode, x);
3019	  realpart_y = gen_realpart (submode, y);
3020	  imagpart_x = gen_imagpart (submode, x);
3021	  imagpart_y = gen_imagpart (submode, y);
3022
3023	  /* Show the output dies here.  This is necessary for SUBREGs
3024	     of pseudos since we cannot track their lifetimes correctly;
3025	     hard regs shouldn't appear here except as return values.
3026	     We never want to emit such a clobber after reload.  */
3027	  if (x != y
3028	      && ! (reload_in_progress || reload_completed)
3029	      && (GET_CODE (realpart_x) == SUBREG
3030		  || GET_CODE (imagpart_x) == SUBREG))
3031	    emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3032
3033	  emit_move_insn (realpart_x, realpart_y);
3034	  emit_move_insn (imagpart_x, imagpart_y);
3035	}
3036
3037      return get_last_insn ();
3038    }
3039
3040  /* Handle MODE_CC modes:  If we don't have a special move insn for this mode,
3041     find a mode to do it in.  If we have a movcc, use it.  Otherwise,
3042     find the MODE_INT mode of the same width.  */
3043  else if (GET_MODE_CLASS (mode) == MODE_CC
3044	   && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3045    {
3046      enum insn_code insn_code;
3047      enum machine_mode tmode = VOIDmode;
3048      rtx x1 = x, y1 = y;
3049
3050      if (mode != CCmode
3051	  && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3052	tmode = CCmode;
3053      else
3054	for (tmode = QImode; tmode != VOIDmode;
3055	     tmode = GET_MODE_WIDER_MODE (tmode))
3056	  if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3057	    break;
3058
3059      if (tmode == VOIDmode)
3060	abort ();
3061
3062      /* Get X and Y in TMODE.  We can't use gen_lowpart here because it
3063	 may call change_address which is not appropriate if we were
3064	 called when a reload was in progress.  We don't have to worry
3065	 about changing the address since the size in bytes is supposed to
3066	 be the same.  Copy the MEM to change the mode and move any
3067	 substitutions from the old MEM to the new one.  */
3068
3069      if (reload_in_progress)
3070	{
3071	  x = gen_lowpart_common (tmode, x1);
3072	  if (x == 0 && GET_CODE (x1) == MEM)
3073	    {
3074	      x = adjust_address_nv (x1, tmode, 0);
3075	      copy_replacements (x1, x);
3076	    }
3077
3078	  y = gen_lowpart_common (tmode, y1);
3079	  if (y == 0 && GET_CODE (y1) == MEM)
3080	    {
3081	      y = adjust_address_nv (y1, tmode, 0);
3082	      copy_replacements (y1, y);
3083	    }
3084	}
3085      else
3086	{
3087	  x = gen_lowpart (tmode, x);
3088	  y = gen_lowpart (tmode, y);
3089	}
3090
3091      insn_code = mov_optab->handlers[(int) tmode].insn_code;
3092      return emit_insn (GEN_FCN (insn_code) (x, y));
3093    }
3094
3095  /* Try using a move pattern for the corresponding integer mode.  This is
3096     only safe when simplify_subreg can convert MODE constants into integer
3097     constants.  At present, it can only do this reliably if the value
3098     fits within a HOST_WIDE_INT.  */
3099  else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3100	   && (submode = int_mode_for_mode (mode)) != BLKmode
3101	   && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3102    return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3103		      (simplify_gen_subreg (submode, x, mode, 0),
3104		       simplify_gen_subreg (submode, y, mode, 0)));
3105
3106  /* This will handle any multi-word or full-word mode that lacks a move_insn
3107     pattern.  However, you will get better code if you define such patterns,
3108     even if they must turn into multiple assembler instructions.  */
3109  else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3110    {
3111      rtx last_insn = 0;
3112      rtx seq, inner;
3113      int need_clobber;
3114      int i;
3115
3116#ifdef PUSH_ROUNDING
3117
3118      /* If X is a push on the stack, do the push now and replace
3119	 X with a reference to the stack pointer.  */
3120      if (push_operand (x, GET_MODE (x)))
3121	{
3122	  rtx temp;
3123	  enum rtx_code code;
3124
3125	  /* Do not use anti_adjust_stack, since we don't want to update
3126	     stack_pointer_delta.  */
3127	  temp = expand_binop (Pmode,
3128#ifdef STACK_GROWS_DOWNWARD
3129			       sub_optab,
3130#else
3131			       add_optab,
3132#endif
3133			       stack_pointer_rtx,
3134			       GEN_INT
3135				 (PUSH_ROUNDING
3136				  (GET_MODE_SIZE (GET_MODE (x)))),
3137			       stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3138
3139	  if (temp != stack_pointer_rtx)
3140	    emit_move_insn (stack_pointer_rtx, temp);
3141
3142	  code = GET_CODE (XEXP (x, 0));
3143
3144	  /* Just hope that small offsets off SP are OK.  */
3145	  if (code == POST_INC)
3146	    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3147				GEN_INT (-((HOST_WIDE_INT)
3148					   GET_MODE_SIZE (GET_MODE (x)))));
3149	  else if (code == POST_DEC)
3150	    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3151				GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3152	  else
3153	    temp = stack_pointer_rtx;
3154
3155	  x = change_address (x, VOIDmode, temp);
3156	}
3157#endif
3158
3159      /* If we are in reload, see if either operand is a MEM whose address
3160	 is scheduled for replacement.  */
3161      if (reload_in_progress && GET_CODE (x) == MEM
3162	  && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3163	x = replace_equiv_address_nv (x, inner);
3164      if (reload_in_progress && GET_CODE (y) == MEM
3165	  && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3166	y = replace_equiv_address_nv (y, inner);
3167
3168      start_sequence ();
3169
3170      need_clobber = 0;
3171      for (i = 0;
3172	   i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3173	   i++)
3174	{
3175	  rtx xpart = operand_subword (x, i, 1, mode);
3176	  rtx ypart = operand_subword (y, i, 1, mode);
3177
3178	  /* If we can't get a part of Y, put Y into memory if it is a
3179	     constant.  Otherwise, force it into a register.  If we still
3180	     can't get a part of Y, abort.  */
3181	  if (ypart == 0 && CONSTANT_P (y))
3182	    {
3183	      y = force_const_mem (mode, y);
3184	      ypart = operand_subword (y, i, 1, mode);
3185	    }
3186	  else if (ypart == 0)
3187	    ypart = operand_subword_force (y, i, mode);
3188
3189	  if (xpart == 0 || ypart == 0)
3190	    abort ();
3191
3192	  need_clobber |= (GET_CODE (xpart) == SUBREG);
3193
3194	  last_insn = emit_move_insn (xpart, ypart);
3195	}
3196
3197      seq = get_insns ();
3198      end_sequence ();
3199
3200      /* Show the output dies here.  This is necessary for SUBREGs
3201	 of pseudos since we cannot track their lifetimes correctly;
3202	 hard regs shouldn't appear here except as return values.
3203	 We never want to emit such a clobber after reload.  */
3204      if (x != y
3205	  && ! (reload_in_progress || reload_completed)
3206	  && need_clobber != 0)
3207	emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3208
3209      emit_insn (seq);
3210
3211      return last_insn;
3212    }
3213  else
3214    abort ();
3215}
3216
3217/* If Y is representable exactly in a narrower mode, and the target can
3218   perform the extension directly from constant or memory, then emit the
3219   move as an extension.  */
3220
3221static rtx
3222compress_float_constant (rtx x, rtx y)
3223{
3224  enum machine_mode dstmode = GET_MODE (x);
3225  enum machine_mode orig_srcmode = GET_MODE (y);
3226  enum machine_mode srcmode;
3227  REAL_VALUE_TYPE r;
3228
3229  REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3230
3231  for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3232       srcmode != orig_srcmode;
3233       srcmode = GET_MODE_WIDER_MODE (srcmode))
3234    {
3235      enum insn_code ic;
3236      rtx trunc_y, last_insn;
3237
3238      /* Skip if the target can't extend this way.  */
3239      ic = can_extend_p (dstmode, srcmode, 0);
3240      if (ic == CODE_FOR_nothing)
3241	continue;
3242
3243      /* Skip if the narrowed value isn't exact.  */
3244      if (! exact_real_truncate (srcmode, &r))
3245	continue;
3246
3247      trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3248
3249      if (LEGITIMATE_CONSTANT_P (trunc_y))
3250	{
3251	  /* Skip if the target needs extra instructions to perform
3252	     the extension.  */
3253	  if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3254	    continue;
3255	}
3256      else if (float_extend_from_mem[dstmode][srcmode])
3257	trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3258      else
3259	continue;
3260
3261      emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3262      last_insn = get_last_insn ();
3263
3264      if (GET_CODE (x) == REG)
3265	set_unique_reg_note (last_insn, REG_EQUAL, y);
3266
3267      return last_insn;
3268    }
3269
3270  return NULL_RTX;
3271}
3272
3273/* Pushing data onto the stack.  */
3274
3275/* Push a block of length SIZE (perhaps variable)
3276   and return an rtx to address the beginning of the block.
3277   Note that it is not possible for the value returned to be a QUEUED.
3278   The value may be virtual_outgoing_args_rtx.
3279
3280   EXTRA is the number of bytes of padding to push in addition to SIZE.
3281   BELOW nonzero means this padding comes at low addresses;
3282   otherwise, the padding comes at high addresses.  */
3283
3284rtx
3285push_block (rtx size, int extra, int below)
3286{
3287  rtx temp;
3288
3289  size = convert_modes (Pmode, ptr_mode, size, 1);
3290  if (CONSTANT_P (size))
3291    anti_adjust_stack (plus_constant (size, extra));
3292  else if (GET_CODE (size) == REG && extra == 0)
3293    anti_adjust_stack (size);
3294  else
3295    {
3296      temp = copy_to_mode_reg (Pmode, size);
3297      if (extra != 0)
3298	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3299			     temp, 0, OPTAB_LIB_WIDEN);
3300      anti_adjust_stack (temp);
3301    }
3302
3303#ifndef STACK_GROWS_DOWNWARD
3304  if (0)
3305#else
3306  if (1)
3307#endif
3308    {
3309      temp = virtual_outgoing_args_rtx;
3310      if (extra != 0 && below)
3311	temp = plus_constant (temp, extra);
3312    }
3313  else
3314    {
3315      if (GET_CODE (size) == CONST_INT)
3316	temp = plus_constant (virtual_outgoing_args_rtx,
3317			      -INTVAL (size) - (below ? 0 : extra));
3318      else if (extra != 0 && !below)
3319	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3320			     negate_rtx (Pmode, plus_constant (size, extra)));
3321      else
3322	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3323			     negate_rtx (Pmode, size));
3324    }
3325
3326  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3327}
3328
3329#ifdef PUSH_ROUNDING
3330
3331/* Emit single push insn.  */
3332
3333static void
3334emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3335{
3336  rtx dest_addr;
3337  unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3338  rtx dest;
3339  enum insn_code icode;
3340  insn_operand_predicate_fn pred;
3341
3342  stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3343  /* If there is push pattern, use it.  Otherwise try old way of throwing
3344     MEM representing push operation to move expander.  */
3345  icode = push_optab->handlers[(int) mode].insn_code;
3346  if (icode != CODE_FOR_nothing)
3347    {
3348      if (((pred = insn_data[(int) icode].operand[0].predicate)
3349	   && !((*pred) (x, mode))))
3350	x = force_reg (mode, x);
3351      emit_insn (GEN_FCN (icode) (x));
3352      return;
3353    }
3354  if (GET_MODE_SIZE (mode) == rounded_size)
3355    dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3356  /* If we are to pad downward, adjust the stack pointer first and
3357     then store X into the stack location using an offset.  This is
3358     because emit_move_insn does not know how to pad; it does not have
3359     access to type.  */
3360  else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3361    {
3362      unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3363      HOST_WIDE_INT offset;
3364
3365      emit_move_insn (stack_pointer_rtx,
3366		      expand_binop (Pmode,
3367#ifdef STACK_GROWS_DOWNWARD
3368				    sub_optab,
3369#else
3370				    add_optab,
3371#endif
3372				    stack_pointer_rtx,
3373				    GEN_INT (rounded_size),
3374				    NULL_RTX, 0, OPTAB_LIB_WIDEN));
3375
3376      offset = (HOST_WIDE_INT) padding_size;
3377#ifdef STACK_GROWS_DOWNWARD
3378      if (STACK_PUSH_CODE == POST_DEC)
3379	/* We have already decremented the stack pointer, so get the
3380	   previous value.  */
3381	offset += (HOST_WIDE_INT) rounded_size;
3382#else
3383      if (STACK_PUSH_CODE == POST_INC)
3384	/* We have already incremented the stack pointer, so get the
3385	   previous value.  */
3386	offset -= (HOST_WIDE_INT) rounded_size;
3387#endif
3388      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3389    }
3390  else
3391    {
3392#ifdef STACK_GROWS_DOWNWARD
3393      /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
3394      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3395				GEN_INT (-(HOST_WIDE_INT) rounded_size));
3396#else
3397      /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
3398      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3399				GEN_INT (rounded_size));
3400#endif
3401      dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3402    }
3403
3404  dest = gen_rtx_MEM (mode, dest_addr);
3405
3406  if (type != 0)
3407    {
3408      set_mem_attributes (dest, type, 1);
3409
3410      if (flag_optimize_sibling_calls)
3411	/* Function incoming arguments may overlap with sibling call
3412	   outgoing arguments and we cannot allow reordering of reads
3413	   from function arguments with stores to outgoing arguments
3414	   of sibling calls.  */
3415	set_mem_alias_set (dest, 0);
3416    }
3417  emit_move_insn (dest, x);
3418}
3419#endif
3420
3421/* Generate code to push X onto the stack, assuming it has mode MODE and
3422   type TYPE.
3423   MODE is redundant except when X is a CONST_INT (since they don't
3424   carry mode info).
3425   SIZE is an rtx for the size of data to be copied (in bytes),
3426   needed only if X is BLKmode.
3427
3428   ALIGN (in bits) is maximum alignment we can assume.
3429
3430   If PARTIAL and REG are both nonzero, then copy that many of the first
3431   words of X into registers starting with REG, and push the rest of X.
3432   The amount of space pushed is decreased by PARTIAL words,
3433   rounded *down* to a multiple of PARM_BOUNDARY.
3434   REG must be a hard register in this case.
3435   If REG is zero but PARTIAL is not, take any all others actions for an
3436   argument partially in registers, but do not actually load any
3437   registers.
3438
3439   EXTRA is the amount in bytes of extra space to leave next to this arg.
3440   This is ignored if an argument block has already been allocated.
3441
3442   On a machine that lacks real push insns, ARGS_ADDR is the address of
3443   the bottom of the argument block for this call.  We use indexing off there
3444   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
3445   argument block has not been preallocated.
3446
3447   ARGS_SO_FAR is the size of args previously pushed for this call.
3448
3449   REG_PARM_STACK_SPACE is nonzero if functions require stack space
3450   for arguments passed in registers.  If nonzero, it will be the number
3451   of bytes required.  */
3452
3453void
3454emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3455		unsigned int align, int partial, rtx reg, int extra,
3456		rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3457		rtx alignment_pad)
3458{
3459  rtx xinner;
3460  enum direction stack_direction
3461#ifdef STACK_GROWS_DOWNWARD
3462    = downward;
3463#else
3464    = upward;
3465#endif
3466
3467  /* Decide where to pad the argument: `downward' for below,
3468     `upward' for above, or `none' for don't pad it.
3469     Default is below for small data on big-endian machines; else above.  */
3470  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3471
3472  /* Invert direction if stack is post-decrement.
3473     FIXME: why?  */
3474  if (STACK_PUSH_CODE == POST_DEC)
3475    if (where_pad != none)
3476      where_pad = (where_pad == downward ? upward : downward);
3477
3478  xinner = x = protect_from_queue (x, 0);
3479
3480  if (mode == BLKmode)
3481    {
3482      /* Copy a block into the stack, entirely or partially.  */
3483
3484      rtx temp;
3485      int used = partial * UNITS_PER_WORD;
3486      int offset;
3487      int skip;
3488
3489      if (reg && GET_CODE (reg) == PARALLEL)
3490	{
3491	  /* Use the size of the elt to compute offset.  */
3492	  rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3493	  used = partial * GET_MODE_SIZE (GET_MODE (elt));
3494	  offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3495	}
3496      else
3497	offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3498
3499      if (size == 0)
3500	abort ();
3501
3502      used -= offset;
3503
3504      /* USED is now the # of bytes we need not copy to the stack
3505	 because registers will take care of them.  */
3506
3507      if (partial != 0)
3508	xinner = adjust_address (xinner, BLKmode, used);
3509
3510      /* If the partial register-part of the arg counts in its stack size,
3511	 skip the part of stack space corresponding to the registers.
3512	 Otherwise, start copying to the beginning of the stack space,
3513	 by setting SKIP to 0.  */
3514      skip = (reg_parm_stack_space == 0) ? 0 : used;
3515
3516#ifdef PUSH_ROUNDING
3517      /* Do it with several push insns if that doesn't take lots of insns
3518	 and if there is no difficulty with push insns that skip bytes
3519	 on the stack for alignment purposes.  */
3520      if (args_addr == 0
3521	  && PUSH_ARGS
3522	  && GET_CODE (size) == CONST_INT
3523	  && skip == 0
3524	  && MEM_ALIGN (xinner) >= align
3525	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3526	  /* Here we avoid the case of a structure whose weak alignment
3527	     forces many pushes of a small amount of data,
3528	     and such small pushes do rounding that causes trouble.  */
3529	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3530	      || align >= BIGGEST_ALIGNMENT
3531	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3532		  == (align / BITS_PER_UNIT)))
3533	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3534	{
3535	  /* Push padding now if padding above and stack grows down,
3536	     or if padding below and stack grows up.
3537	     But if space already allocated, this has already been done.  */
3538	  if (extra && args_addr == 0
3539	      && where_pad != none && where_pad != stack_direction)
3540	    anti_adjust_stack (GEN_INT (extra));
3541
3542	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3543	}
3544      else
3545#endif /* PUSH_ROUNDING  */
3546	{
3547	  rtx target;
3548
3549	  /* Otherwise make space on the stack and copy the data
3550	     to the address of that space.  */
3551
3552	  /* Deduct words put into registers from the size we must copy.  */
3553	  if (partial != 0)
3554	    {
3555	      if (GET_CODE (size) == CONST_INT)
3556		size = GEN_INT (INTVAL (size) - used);
3557	      else
3558		size = expand_binop (GET_MODE (size), sub_optab, size,
3559				     GEN_INT (used), NULL_RTX, 0,
3560				     OPTAB_LIB_WIDEN);
3561	    }
3562
3563	  /* Get the address of the stack space.
3564	     In this case, we do not deal with EXTRA separately.
3565	     A single stack adjust will do.  */
3566	  if (! args_addr)
3567	    {
3568	      temp = push_block (size, extra, where_pad == downward);
3569	      extra = 0;
3570	    }
3571	  else if (GET_CODE (args_so_far) == CONST_INT)
3572	    temp = memory_address (BLKmode,
3573				   plus_constant (args_addr,
3574						  skip + INTVAL (args_so_far)));
3575	  else
3576	    temp = memory_address (BLKmode,
3577				   plus_constant (gen_rtx_PLUS (Pmode,
3578								args_addr,
3579								args_so_far),
3580						  skip));
3581
3582	  if (!ACCUMULATE_OUTGOING_ARGS)
3583	    {
3584	      /* If the source is referenced relative to the stack pointer,
3585		 copy it to another register to stabilize it.  We do not need
3586		 to do this if we know that we won't be changing sp.  */
3587
3588	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3589		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3590		temp = copy_to_reg (temp);
3591	    }
3592
3593	  target = gen_rtx_MEM (BLKmode, temp);
3594
3595	  if (type != 0)
3596	    {
3597	      set_mem_attributes (target, type, 1);
3598	      /* Function incoming arguments may overlap with sibling call
3599		 outgoing arguments and we cannot allow reordering of reads
3600		 from function arguments with stores to outgoing arguments
3601		 of sibling calls.  */
3602	      set_mem_alias_set (target, 0);
3603	    }
3604
3605	  /* ALIGN may well be better aligned than TYPE, e.g. due to
3606	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
3607	  set_mem_align (target, align);
3608
3609	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3610	}
3611    }
3612  else if (partial > 0)
3613    {
3614      /* Scalar partly in registers.  */
3615
3616      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3617      int i;
3618      int not_stack;
3619      /* # words of start of argument
3620	 that we must make space for but need not store.  */
3621      int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3622      int args_offset = INTVAL (args_so_far);
3623      int skip;
3624
3625      /* Push padding now if padding above and stack grows down,
3626	 or if padding below and stack grows up.
3627	 But if space already allocated, this has already been done.  */
3628      if (extra && args_addr == 0
3629	  && where_pad != none && where_pad != stack_direction)
3630	anti_adjust_stack (GEN_INT (extra));
3631
3632      /* If we make space by pushing it, we might as well push
3633	 the real data.  Otherwise, we can leave OFFSET nonzero
3634	 and leave the space uninitialized.  */
3635      if (args_addr == 0)
3636	offset = 0;
3637
3638      /* Now NOT_STACK gets the number of words that we don't need to
3639	 allocate on the stack.  */
3640      not_stack = partial - offset;
3641
3642      /* If the partial register-part of the arg counts in its stack size,
3643	 skip the part of stack space corresponding to the registers.
3644	 Otherwise, start copying to the beginning of the stack space,
3645	 by setting SKIP to 0.  */
3646      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3647
3648      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3649	x = validize_mem (force_const_mem (mode, x));
3650
3651      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3652	 SUBREGs of such registers are not allowed.  */
3653      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3654	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3655	x = copy_to_reg (x);
3656
3657      /* Loop over all the words allocated on the stack for this arg.  */
3658      /* We can do it by words, because any scalar bigger than a word
3659	 has a size a multiple of a word.  */
3660#ifndef PUSH_ARGS_REVERSED
3661      for (i = not_stack; i < size; i++)
3662#else
3663      for (i = size - 1; i >= not_stack; i--)
3664#endif
3665	if (i >= not_stack + offset)
3666	  emit_push_insn (operand_subword_force (x, i, mode),
3667			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3668			  0, args_addr,
3669			  GEN_INT (args_offset + ((i - not_stack + skip)
3670						  * UNITS_PER_WORD)),
3671			  reg_parm_stack_space, alignment_pad);
3672    }
3673  else
3674    {
3675      rtx addr;
3676      rtx dest;
3677
3678      /* Push padding now if padding above and stack grows down,
3679	 or if padding below and stack grows up.
3680	 But if space already allocated, this has already been done.  */
3681      if (extra && args_addr == 0
3682	  && where_pad != none && where_pad != stack_direction)
3683	anti_adjust_stack (GEN_INT (extra));
3684
3685#ifdef PUSH_ROUNDING
3686      if (args_addr == 0 && PUSH_ARGS)
3687	emit_single_push_insn (mode, x, type);
3688      else
3689#endif
3690	{
3691	  if (GET_CODE (args_so_far) == CONST_INT)
3692	    addr
3693	      = memory_address (mode,
3694				plus_constant (args_addr,
3695					       INTVAL (args_so_far)));
3696	  else
3697	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3698						       args_so_far));
3699	  dest = gen_rtx_MEM (mode, addr);
3700	  if (type != 0)
3701	    {
3702	      set_mem_attributes (dest, type, 1);
3703	      /* Function incoming arguments may overlap with sibling call
3704		 outgoing arguments and we cannot allow reordering of reads
3705		 from function arguments with stores to outgoing arguments
3706		 of sibling calls.  */
3707	      set_mem_alias_set (dest, 0);
3708	    }
3709
3710	  emit_move_insn (dest, x);
3711	}
3712    }
3713
3714  /* If part should go in registers, copy that part
3715     into the appropriate registers.  Do this now, at the end,
3716     since mem-to-mem copies above may do function calls.  */
3717  if (partial > 0 && reg != 0)
3718    {
3719      /* Handle calls that pass values in multiple non-contiguous locations.
3720	 The Irix 6 ABI has examples of this.  */
3721      if (GET_CODE (reg) == PARALLEL)
3722	emit_group_load (reg, x, type, -1);
3723      else
3724	move_block_to_reg (REGNO (reg), x, partial, mode);
3725    }
3726
3727  if (extra && args_addr == 0 && where_pad == stack_direction)
3728    anti_adjust_stack (GEN_INT (extra));
3729
3730  if (alignment_pad && args_addr == 0)
3731    anti_adjust_stack (alignment_pad);
3732}
3733
3734/* Return X if X can be used as a subtarget in a sequence of arithmetic
3735   operations.  */
3736
3737static rtx
3738get_subtarget (rtx x)
3739{
3740  return ((x == 0
3741	   /* Only registers can be subtargets.  */
3742	   || GET_CODE (x) != REG
3743	   /* If the register is readonly, it can't be set more than once.  */
3744	   || RTX_UNCHANGING_P (x)
3745	   /* Don't use hard regs to avoid extending their life.  */
3746	   || REGNO (x) < FIRST_PSEUDO_REGISTER
3747	   /* Avoid subtargets inside loops,
3748	      since they hide some invariant expressions.  */
3749	   || preserve_subexpressions_p ())
3750	  ? 0 : x);
3751}
3752
3753/* Expand an assignment that stores the value of FROM into TO.
3754   If WANT_VALUE is nonzero, return an rtx for the value of TO.
3755   (This may contain a QUEUED rtx;
3756   if the value is constant, this rtx is a constant.)
3757   Otherwise, the returned value is NULL_RTX.  */
3758
3759rtx
3760expand_assignment (tree to, tree from, int want_value)
3761{
3762  rtx to_rtx = 0;
3763  rtx result;
3764
3765  /* Don't crash if the lhs of the assignment was erroneous.  */
3766
3767  if (TREE_CODE (to) == ERROR_MARK)
3768    {
3769      result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3770      return want_value ? result : NULL_RTX;
3771    }
3772
3773  /* Assignment of a structure component needs special treatment
3774     if the structure component's rtx is not simply a MEM.
3775     Assignment of an array element at a constant index, and assignment of
3776     an array element in an unaligned packed structure field, has the same
3777     problem.  */
3778
3779  if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3780      || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3781      || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3782    {
3783      enum machine_mode mode1;
3784      HOST_WIDE_INT bitsize, bitpos;
3785      rtx orig_to_rtx;
3786      tree offset;
3787      int unsignedp;
3788      int volatilep = 0;
3789      tree tem;
3790
3791      push_temp_slots ();
3792      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3793				 &unsignedp, &volatilep);
3794
3795      /* If we are going to use store_bit_field and extract_bit_field,
3796	 make sure to_rtx will be safe for multiple use.  */
3797
3798      if (mode1 == VOIDmode && want_value)
3799	tem = stabilize_reference (tem);
3800
3801      orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3802
3803      if (offset != 0)
3804	{
3805	  rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3806
3807	  if (GET_CODE (to_rtx) != MEM)
3808	    abort ();
3809
3810#ifdef POINTERS_EXTEND_UNSIGNED
3811	  if (GET_MODE (offset_rtx) != Pmode)
3812	    offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3813#else
3814	  if (GET_MODE (offset_rtx) != ptr_mode)
3815	    offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3816#endif
3817
3818	  /* A constant address in TO_RTX can have VOIDmode, we must not try
3819	     to call force_reg for that case.  Avoid that case.  */
3820	  if (GET_CODE (to_rtx) == MEM
3821	      && GET_MODE (to_rtx) == BLKmode
3822	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3823	      && bitsize > 0
3824	      && (bitpos % bitsize) == 0
3825	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3826	      && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3827	    {
3828	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3829	      bitpos = 0;
3830	    }
3831
3832	  to_rtx = offset_address (to_rtx, offset_rtx,
3833				   highest_pow2_factor_for_target (to,
3834				   				   offset));
3835	}
3836
3837      if (GET_CODE (to_rtx) == MEM)
3838	{
3839	  /* If the field is at offset zero, we could have been given the
3840	     DECL_RTX of the parent struct.  Don't munge it.  */
3841	  to_rtx = shallow_copy_rtx (to_rtx);
3842
3843	  set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3844	}
3845
3846      /* Deal with volatile and readonly fields.  The former is only done
3847	 for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
3848      if (volatilep && GET_CODE (to_rtx) == MEM)
3849	{
3850	  if (to_rtx == orig_to_rtx)
3851	    to_rtx = copy_rtx (to_rtx);
3852	  MEM_VOLATILE_P (to_rtx) = 1;
3853	}
3854
3855      if (TREE_CODE (to) == COMPONENT_REF
3856	  && TREE_READONLY (TREE_OPERAND (to, 1))
3857	  /* We can't assert that a MEM won't be set more than once
3858	     if the component is not addressable because another
3859	     non-addressable component may be referenced by the same MEM.  */
3860	  && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3861	{
3862	  if (to_rtx == orig_to_rtx)
3863	    to_rtx = copy_rtx (to_rtx);
3864	  RTX_UNCHANGING_P (to_rtx) = 1;
3865	}
3866
3867      if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3868	{
3869	  if (to_rtx == orig_to_rtx)
3870	    to_rtx = copy_rtx (to_rtx);
3871	  MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3872	}
3873
3874      result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3875			    (want_value
3876			     /* Spurious cast for HPUX compiler.  */
3877			     ? ((enum machine_mode)
3878				TYPE_MODE (TREE_TYPE (to)))
3879			     : VOIDmode),
3880			    unsignedp, TREE_TYPE (tem), get_alias_set (to));
3881
3882      preserve_temp_slots (result);
3883      free_temp_slots ();
3884      pop_temp_slots ();
3885
3886      /* If the value is meaningful, convert RESULT to the proper mode.
3887	 Otherwise, return nothing.  */
3888      return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3889					  TYPE_MODE (TREE_TYPE (from)),
3890					  result,
3891					  TREE_UNSIGNED (TREE_TYPE (to)))
3892	      : NULL_RTX);
3893    }
3894
3895  /* If the rhs is a function call and its value is not an aggregate,
3896     call the function before we start to compute the lhs.
3897     This is needed for correct code for cases such as
3898     val = setjmp (buf) on machines where reference to val
3899     requires loading up part of an address in a separate insn.
3900
3901     Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3902     since it might be a promoted variable where the zero- or sign- extension
3903     needs to be done.  Handling this in the normal way is safe because no
3904     computation is done before the call.  */
3905  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3906      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3907      && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3908	    && GET_CODE (DECL_RTL (to)) == REG))
3909    {
3910      rtx value;
3911
3912      push_temp_slots ();
3913      value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3914      if (to_rtx == 0)
3915	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3916
3917      /* Handle calls that return values in multiple non-contiguous locations.
3918	 The Irix 6 ABI has examples of this.  */
3919      if (GET_CODE (to_rtx) == PARALLEL)
3920	emit_group_load (to_rtx, value, TREE_TYPE (from),
3921			 int_size_in_bytes (TREE_TYPE (from)));
3922      else if (GET_MODE (to_rtx) == BLKmode)
3923	emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3924      else
3925	{
3926	  if (POINTER_TYPE_P (TREE_TYPE (to)))
3927	    value = convert_memory_address (GET_MODE (to_rtx), value);
3928	  emit_move_insn (to_rtx, value);
3929	}
3930      preserve_temp_slots (to_rtx);
3931      free_temp_slots ();
3932      pop_temp_slots ();
3933      return want_value ? to_rtx : NULL_RTX;
3934    }
3935
3936  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
3937     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
3938
3939  if (to_rtx == 0)
3940    to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3941
3942  /* Don't move directly into a return register.  */
3943  if (TREE_CODE (to) == RESULT_DECL
3944      && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3945    {
3946      rtx temp;
3947
3948      push_temp_slots ();
3949      temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3950
3951      if (GET_CODE (to_rtx) == PARALLEL)
3952	emit_group_load (to_rtx, temp, TREE_TYPE (from),
3953			 int_size_in_bytes (TREE_TYPE (from)));
3954      else
3955	emit_move_insn (to_rtx, temp);
3956
3957      preserve_temp_slots (to_rtx);
3958      free_temp_slots ();
3959      pop_temp_slots ();
3960      return want_value ? to_rtx : NULL_RTX;
3961    }
3962
3963  /* In case we are returning the contents of an object which overlaps
3964     the place the value is being stored, use a safe function when copying
3965     a value through a pointer into a structure value return block.  */
3966  if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3967      && current_function_returns_struct
3968      && !current_function_returns_pcc_struct)
3969    {
3970      rtx from_rtx, size;
3971
3972      push_temp_slots ();
3973      size = expr_size (from);
3974      from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3975
3976      if (TARGET_MEM_FUNCTIONS)
3977	emit_library_call (memmove_libfunc, LCT_NORMAL,
3978			   VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3979			   XEXP (from_rtx, 0), Pmode,
3980			   convert_to_mode (TYPE_MODE (sizetype),
3981					    size, TREE_UNSIGNED (sizetype)),
3982			   TYPE_MODE (sizetype));
3983      else
3984        emit_library_call (bcopy_libfunc, LCT_NORMAL,
3985			   VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3986			   XEXP (to_rtx, 0), Pmode,
3987			   convert_to_mode (TYPE_MODE (integer_type_node),
3988					    size,
3989					    TREE_UNSIGNED (integer_type_node)),
3990			   TYPE_MODE (integer_type_node));
3991
3992      preserve_temp_slots (to_rtx);
3993      free_temp_slots ();
3994      pop_temp_slots ();
3995      return want_value ? to_rtx : NULL_RTX;
3996    }
3997
3998  /* Compute FROM and store the value in the rtx we got.  */
3999
4000  push_temp_slots ();
4001  result = store_expr (from, to_rtx, want_value);
4002  preserve_temp_slots (result);
4003  free_temp_slots ();
4004  pop_temp_slots ();
4005  return want_value ? result : NULL_RTX;
4006}
4007
4008/* Generate code for computing expression EXP,
4009   and storing the value into TARGET.
4010   TARGET may contain a QUEUED rtx.
4011
4012   If WANT_VALUE & 1 is nonzero, return a copy of the value
4013   not in TARGET, so that we can be sure to use the proper
4014   value in a containing expression even if TARGET has something
4015   else stored in it.  If possible, we copy the value through a pseudo
4016   and return that pseudo.  Or, if the value is constant, we try to
4017   return the constant.  In some cases, we return a pseudo
4018   copied *from* TARGET.
4019
4020   If the mode is BLKmode then we may return TARGET itself.
4021   It turns out that in BLKmode it doesn't cause a problem.
4022   because C has no operators that could combine two different
4023   assignments into the same BLKmode object with different values
4024   with no sequence point.  Will other languages need this to
4025   be more thorough?
4026
4027   If WANT_VALUE & 1 is 0, we return NULL, to make sure
4028   to catch quickly any cases where the caller uses the value
4029   and fails to set WANT_VALUE.
4030
4031   If WANT_VALUE & 2 is set, this is a store into a call param on the
4032   stack, and block moves may need to be treated specially.  */
4033
4034rtx
4035store_expr (tree exp, rtx target, int want_value)
4036{
4037  rtx temp;
4038  rtx alt_rtl = NULL_RTX;
4039  rtx mark = mark_queue ();
4040  int dont_return_target = 0;
4041  int dont_store_target = 0;
4042
4043  if (VOID_TYPE_P (TREE_TYPE (exp)))
4044    {
4045      /* C++ can generate ?: expressions with a throw expression in one
4046	 branch and an rvalue in the other. Here, we resolve attempts to
4047	 store the throw expression's nonexistent result.  */
4048      if (want_value)
4049	abort ();
4050      expand_expr (exp, const0_rtx, VOIDmode, 0);
4051      return NULL_RTX;
4052    }
4053  if (TREE_CODE (exp) == COMPOUND_EXPR)
4054    {
4055      /* Perform first part of compound expression, then assign from second
4056	 part.  */
4057      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4058		   want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4059      emit_queue ();
4060      return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4061    }
4062  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4063    {
4064      /* For conditional expression, get safe form of the target.  Then
4065	 test the condition, doing the appropriate assignment on either
4066	 side.  This avoids the creation of unnecessary temporaries.
4067	 For non-BLKmode, it is more efficient not to do this.  */
4068
4069      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4070
4071      emit_queue ();
4072      target = protect_from_queue (target, 1);
4073
4074      do_pending_stack_adjust ();
4075      NO_DEFER_POP;
4076      jumpifnot (TREE_OPERAND (exp, 0), lab1);
4077      start_cleanup_deferral ();
4078      store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4079      end_cleanup_deferral ();
4080      emit_queue ();
4081      emit_jump_insn (gen_jump (lab2));
4082      emit_barrier ();
4083      emit_label (lab1);
4084      start_cleanup_deferral ();
4085      store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4086      end_cleanup_deferral ();
4087      emit_queue ();
4088      emit_label (lab2);
4089      OK_DEFER_POP;
4090
4091      return want_value & 1 ? target : NULL_RTX;
4092    }
4093  else if (queued_subexp_p (target))
4094    /* If target contains a postincrement, let's not risk
4095       using it as the place to generate the rhs.  */
4096    {
4097      if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4098	{
4099	  /* Expand EXP into a new pseudo.  */
4100	  temp = gen_reg_rtx (GET_MODE (target));
4101	  temp = expand_expr (exp, temp, GET_MODE (target),
4102			      (want_value & 2
4103			       ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4104	}
4105      else
4106	temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4107			    (want_value & 2
4108			     ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4109
4110      /* If target is volatile, ANSI requires accessing the value
4111	 *from* the target, if it is accessed.  So make that happen.
4112	 In no case return the target itself.  */
4113      if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4114	dont_return_target = 1;
4115    }
4116  else if ((want_value & 1) != 0
4117	   && GET_CODE (target) == MEM
4118	   && ! MEM_VOLATILE_P (target)
4119	   && GET_MODE (target) != BLKmode)
4120    /* If target is in memory and caller wants value in a register instead,
4121       arrange that.  Pass TARGET as target for expand_expr so that,
4122       if EXP is another assignment, WANT_VALUE will be nonzero for it.
4123       We know expand_expr will not use the target in that case.
4124       Don't do this if TARGET is volatile because we are supposed
4125       to write it and then read it.  */
4126    {
4127      temp = expand_expr (exp, target, GET_MODE (target),
4128			  want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4129      if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4130	{
4131	  /* If TEMP is already in the desired TARGET, only copy it from
4132	     memory and don't store it there again.  */
4133	  if (temp == target
4134	      || (rtx_equal_p (temp, target)
4135		  && ! side_effects_p (temp) && ! side_effects_p (target)))
4136	    dont_store_target = 1;
4137	  temp = copy_to_reg (temp);
4138	}
4139      dont_return_target = 1;
4140    }
4141  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4142    /* If this is a scalar in a register that is stored in a wider mode
4143       than the declared mode, compute the result into its declared mode
4144       and then convert to the wider mode.  Our value is the computed
4145       expression.  */
4146    {
4147      rtx inner_target = 0;
4148
4149      /* If we don't want a value, we can do the conversion inside EXP,
4150	 which will often result in some optimizations.  Do the conversion
4151	 in two steps: first change the signedness, if needed, then
4152	 the extend.  But don't do this if the type of EXP is a subtype
4153	 of something else since then the conversion might involve
4154	 more than just converting modes.  */
4155      if ((want_value & 1) == 0
4156	  && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4157	  && TREE_TYPE (TREE_TYPE (exp)) == 0)
4158	{
4159	  if (TREE_UNSIGNED (TREE_TYPE (exp))
4160	      != SUBREG_PROMOTED_UNSIGNED_P (target))
4161	    exp = convert
4162	      ((*lang_hooks.types.signed_or_unsigned_type)
4163	       (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4164
4165	  exp = convert ((*lang_hooks.types.type_for_mode)
4166			 (GET_MODE (SUBREG_REG (target)),
4167			  SUBREG_PROMOTED_UNSIGNED_P (target)),
4168			 exp);
4169
4170	  inner_target = SUBREG_REG (target);
4171	}
4172
4173      temp = expand_expr (exp, inner_target, VOIDmode,
4174			  want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4175
4176      /* If TEMP is a MEM and we want a result value, make the access
4177	 now so it gets done only once.  Strictly speaking, this is
4178	 only necessary if the MEM is volatile, or if the address
4179	 overlaps TARGET.  But not performing the load twice also
4180	 reduces the amount of rtl we generate and then have to CSE.  */
4181      if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4182	temp = copy_to_reg (temp);
4183
4184      /* If TEMP is a VOIDmode constant, use convert_modes to make
4185	 sure that we properly convert it.  */
4186      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4187	{
4188	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4189				temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4190	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4191			        GET_MODE (target), temp,
4192			        SUBREG_PROMOTED_UNSIGNED_P (target));
4193	}
4194
4195      convert_move (SUBREG_REG (target), temp,
4196		    SUBREG_PROMOTED_UNSIGNED_P (target));
4197
4198      /* If we promoted a constant, change the mode back down to match
4199	 target.  Otherwise, the caller might get confused by a result whose
4200	 mode is larger than expected.  */
4201
4202      if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4203	{
4204	  if (GET_MODE (temp) != VOIDmode)
4205	    {
4206	      temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4207	      SUBREG_PROMOTED_VAR_P (temp) = 1;
4208	      SUBREG_PROMOTED_UNSIGNED_SET (temp,
4209		SUBREG_PROMOTED_UNSIGNED_P (target));
4210	    }
4211	  else
4212	    temp = convert_modes (GET_MODE (target),
4213				  GET_MODE (SUBREG_REG (target)),
4214				  temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4215	}
4216
4217      return want_value & 1 ? temp : NULL_RTX;
4218    }
4219  else
4220    {
4221      temp = expand_expr_real (exp, target, GET_MODE (target),
4222			       (want_value & 2
4223				? EXPAND_STACK_PARM : EXPAND_NORMAL),
4224			       &alt_rtl);
4225      /* Return TARGET if it's a specified hardware register.
4226	 If TARGET is a volatile mem ref, either return TARGET
4227	 or return a reg copied *from* TARGET; ANSI requires this.
4228
4229	 Otherwise, if TEMP is not TARGET, return TEMP
4230	 if it is constant (for efficiency),
4231	 or if we really want the correct value.  */
4232      if (!(target && GET_CODE (target) == REG
4233	    && REGNO (target) < FIRST_PSEUDO_REGISTER)
4234	  && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4235	  && ! rtx_equal_p (temp, target)
4236	  && (CONSTANT_P (temp) || (want_value & 1) != 0))
4237	dont_return_target = 1;
4238    }
4239
4240  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4241     the same as that of TARGET, adjust the constant.  This is needed, for
4242     example, in case it is a CONST_DOUBLE and we want only a word-sized
4243     value.  */
4244  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4245      && TREE_CODE (exp) != ERROR_MARK
4246      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4247    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4248			  temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4249
4250  /* If value was not generated in the target, store it there.
4251     Convert the value to TARGET's type first if necessary and emit the
4252     pending incrementations that have been queued when expanding EXP.
4253     Note that we cannot emit the whole queue blindly because this will
4254     effectively disable the POST_INC optimization later.
4255
4256     If TEMP and TARGET compare equal according to rtx_equal_p, but
4257     one or both of them are volatile memory refs, we have to distinguish
4258     two cases:
4259     - expand_expr has used TARGET.  In this case, we must not generate
4260       another copy.  This can be detected by TARGET being equal according
4261       to == .
4262     - expand_expr has not used TARGET - that means that the source just
4263       happens to have the same RTX form.  Since temp will have been created
4264       by expand_expr, it will compare unequal according to == .
4265       We must generate a copy in this case, to reach the correct number
4266       of volatile memory references.  */
4267
4268  if ((! rtx_equal_p (temp, target)
4269       || (temp != target && (side_effects_p (temp)
4270			      || side_effects_p (target))))
4271      && TREE_CODE (exp) != ERROR_MARK
4272      && ! dont_store_target
4273      /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4274	 but TARGET is not valid memory reference, TEMP will differ
4275	 from TARGET although it is really the same location.  */
4276      && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4277      /* If there's nothing to copy, don't bother.  Don't call expr_size
4278	 unless necessary, because some front-ends (C++) expr_size-hook
4279	 aborts on objects that are not supposed to be bit-copied or
4280	 bit-initialized.  */
4281      && expr_size (exp) != const0_rtx)
4282    {
4283      emit_insns_enqueued_after_mark (mark);
4284      target = protect_from_queue (target, 1);
4285      temp = protect_from_queue (temp, 0);
4286      if (GET_MODE (temp) != GET_MODE (target)
4287	  && GET_MODE (temp) != VOIDmode)
4288	{
4289	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4290	  if (dont_return_target)
4291	    {
4292	      /* In this case, we will return TEMP,
4293		 so make sure it has the proper mode.
4294		 But don't forget to store the value into TARGET.  */
4295	      temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4296	      emit_move_insn (target, temp);
4297	    }
4298	  else
4299	    convert_move (target, temp, unsignedp);
4300	}
4301
4302      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4303	{
4304	  /* Handle copying a string constant into an array.  The string
4305	     constant may be shorter than the array.  So copy just the string's
4306	     actual length, and clear the rest.  First get the size of the data
4307	     type of the string, which is actually the size of the target.  */
4308	  rtx size = expr_size (exp);
4309
4310	  if (GET_CODE (size) == CONST_INT
4311	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
4312	    emit_block_move (target, temp, size,
4313			     (want_value & 2
4314			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4315	  else
4316	    {
4317	      /* Compute the size of the data to copy from the string.  */
4318	      tree copy_size
4319		= size_binop (MIN_EXPR,
4320			      make_tree (sizetype, size),
4321			      size_int (TREE_STRING_LENGTH (exp)));
4322	      rtx copy_size_rtx
4323		= expand_expr (copy_size, NULL_RTX, VOIDmode,
4324			       (want_value & 2
4325				? EXPAND_STACK_PARM : EXPAND_NORMAL));
4326	      rtx label = 0;
4327
4328	      /* Copy that much.  */
4329	      copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4330					       TREE_UNSIGNED (sizetype));
4331	      emit_block_move (target, temp, copy_size_rtx,
4332			       (want_value & 2
4333				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4334
4335	      /* Figure out how much is left in TARGET that we have to clear.
4336		 Do all calculations in ptr_mode.  */
4337	      if (GET_CODE (copy_size_rtx) == CONST_INT)
4338		{
4339		  size = plus_constant (size, -INTVAL (copy_size_rtx));
4340		  target = adjust_address (target, BLKmode,
4341					   INTVAL (copy_size_rtx));
4342		}
4343	      else
4344		{
4345		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4346				       copy_size_rtx, NULL_RTX, 0,
4347				       OPTAB_LIB_WIDEN);
4348
4349#ifdef POINTERS_EXTEND_UNSIGNED
4350		  if (GET_MODE (copy_size_rtx) != Pmode)
4351		    copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4352						     TREE_UNSIGNED (sizetype));
4353#endif
4354
4355		  target = offset_address (target, copy_size_rtx,
4356					   highest_pow2_factor (copy_size));
4357		  label = gen_label_rtx ();
4358		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4359					   GET_MODE (size), 0, label);
4360		}
4361
4362	      if (size != const0_rtx)
4363		clear_storage (target, size);
4364
4365	      if (label)
4366		emit_label (label);
4367	    }
4368	}
4369      /* Handle calls that return values in multiple non-contiguous locations.
4370	 The Irix 6 ABI has examples of this.  */
4371      else if (GET_CODE (target) == PARALLEL)
4372	emit_group_load (target, temp, TREE_TYPE (exp),
4373			 int_size_in_bytes (TREE_TYPE (exp)));
4374      else if (GET_MODE (temp) == BLKmode)
4375	emit_block_move (target, temp, expr_size (exp),
4376			 (want_value & 2
4377			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4378      else
4379	emit_move_insn (target, temp);
4380    }
4381
4382  /* If we don't want a value, return NULL_RTX.  */
4383  if ((want_value & 1) == 0)
4384    return NULL_RTX;
4385
4386  /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4387     ??? The latter test doesn't seem to make sense.  */
4388  else if (dont_return_target && GET_CODE (temp) != MEM)
4389    return temp;
4390
4391  /* Return TARGET itself if it is a hard register.  */
4392  else if ((want_value & 1) != 0
4393	   && GET_MODE (target) != BLKmode
4394	   && ! (GET_CODE (target) == REG
4395		 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4396    return copy_to_reg (target);
4397
4398  else
4399    return target;
4400}
4401
4402/* Return 1 if EXP just contains zeros.  FIXME merge with initializer_zerop.  */
4403
4404static int
4405is_zeros_p (tree exp)
4406{
4407  tree elt;
4408
4409  switch (TREE_CODE (exp))
4410    {
4411    case CONVERT_EXPR:
4412    case NOP_EXPR:
4413    case NON_LVALUE_EXPR:
4414    case VIEW_CONVERT_EXPR:
4415      return is_zeros_p (TREE_OPERAND (exp, 0));
4416
4417    case INTEGER_CST:
4418      return integer_zerop (exp);
4419
4420    case COMPLEX_CST:
4421      return
4422	is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4423
4424    case REAL_CST:
4425      return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4426
4427    case VECTOR_CST:
4428      for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4429	   elt = TREE_CHAIN (elt))
4430	if (!is_zeros_p (TREE_VALUE (elt)))
4431	  return 0;
4432
4433      return 1;
4434
4435    case CONSTRUCTOR:
4436      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4437	return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4438      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4439	if (! is_zeros_p (TREE_VALUE (elt)))
4440	  return 0;
4441
4442      return 1;
4443
4444    default:
4445      return 0;
4446    }
4447}
4448
4449/* Return 1 if EXP contains mostly (3/4)  zeros.  */
4450
4451int
4452mostly_zeros_p (tree exp)
4453{
4454  if (TREE_CODE (exp) == CONSTRUCTOR)
4455    {
4456      int elts = 0, zeros = 0;
4457      tree elt = CONSTRUCTOR_ELTS (exp);
4458      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4459	{
4460	  /* If there are no ranges of true bits, it is all zero.  */
4461	  return elt == NULL_TREE;
4462	}
4463      for (; elt; elt = TREE_CHAIN (elt))
4464	{
4465	  /* We do not handle the case where the index is a RANGE_EXPR,
4466	     so the statistic will be somewhat inaccurate.
4467	     We do make a more accurate count in store_constructor itself,
4468	     so since this function is only used for nested array elements,
4469	     this should be close enough.  */
4470	  if (mostly_zeros_p (TREE_VALUE (elt)))
4471	    zeros++;
4472	  elts++;
4473	}
4474
4475      return 4 * zeros >= 3 * elts;
4476    }
4477
4478  return is_zeros_p (exp);
4479}
4480
4481/* Helper function for store_constructor.
4482   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4483   TYPE is the type of the CONSTRUCTOR, not the element type.
4484   CLEARED is as for store_constructor.
4485   ALIAS_SET is the alias set to use for any stores.
4486
4487   This provides a recursive shortcut back to store_constructor when it isn't
4488   necessary to go through store_field.  This is so that we can pass through
4489   the cleared field to let store_constructor know that we may not have to
4490   clear a substructure if the outer structure has already been cleared.  */
4491
4492static void
4493store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4494			 HOST_WIDE_INT bitpos, enum machine_mode mode,
4495			 tree exp, tree type, int cleared, int alias_set)
4496{
4497  if (TREE_CODE (exp) == CONSTRUCTOR
4498      && bitpos % BITS_PER_UNIT == 0
4499      /* If we have a nonzero bitpos for a register target, then we just
4500	 let store_field do the bitfield handling.  This is unlikely to
4501	 generate unnecessary clear instructions anyways.  */
4502      && (bitpos == 0 || GET_CODE (target) == MEM))
4503    {
4504      if (GET_CODE (target) == MEM)
4505	target
4506	  = adjust_address (target,
4507			    GET_MODE (target) == BLKmode
4508			    || 0 != (bitpos
4509				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
4510			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4511
4512
4513      /* Update the alias set, if required.  */
4514      if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4515	  && MEM_ALIAS_SET (target) != 0)
4516	{
4517	  target = copy_rtx (target);
4518	  set_mem_alias_set (target, alias_set);
4519	}
4520
4521      store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4522    }
4523  else
4524    store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4525		 alias_set);
4526}
4527
4528/* Store the value of constructor EXP into the rtx TARGET.
4529   TARGET is either a REG or a MEM; we know it cannot conflict, since
4530   safe_from_p has been called.
4531   CLEARED is true if TARGET is known to have been zero'd.
4532   SIZE is the number of bytes of TARGET we are allowed to modify: this
4533   may not be the same as the size of EXP if we are assigning to a field
4534   which has been packed to exclude padding bits.  */
4535
4536static void
4537store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4538{
4539  tree type = TREE_TYPE (exp);
4540#ifdef WORD_REGISTER_OPERATIONS
4541  HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4542#endif
4543
4544  if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4545      || TREE_CODE (type) == QUAL_UNION_TYPE)
4546    {
4547      tree elt;
4548
4549      /* If size is zero or the target is already cleared, do nothing.  */
4550      if (size == 0 || cleared)
4551	cleared = 1;
4552      /* We either clear the aggregate or indicate the value is dead.  */
4553      else if ((TREE_CODE (type) == UNION_TYPE
4554		|| TREE_CODE (type) == QUAL_UNION_TYPE)
4555	       && ! CONSTRUCTOR_ELTS (exp))
4556	/* If the constructor is empty, clear the union.  */
4557	{
4558	  clear_storage (target, expr_size (exp));
4559	  cleared = 1;
4560	}
4561
4562      /* If we are building a static constructor into a register,
4563	 set the initial value as zero so we can fold the value into
4564	 a constant.  But if more than one register is involved,
4565	 this probably loses.  */
4566      else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4567	       && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4568	{
4569	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4570	  cleared = 1;
4571	}
4572
4573      /* If the constructor has fewer fields than the structure
4574	 or if we are initializing the structure to mostly zeros,
4575	 clear the whole structure first.  Don't do this if TARGET is a
4576	 register whose mode size isn't equal to SIZE since clear_storage
4577	 can't handle this case.  */
4578      else if (size > 0
4579	       && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4580		   || mostly_zeros_p (exp))
4581	       && (GET_CODE (target) != REG
4582		   || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4583		       == size)))
4584	{
4585	  rtx xtarget = target;
4586
4587	  if (RTX_UNCHANGING_P (target))
4588	    {
4589	      xtarget = copy_rtx (target);
4590	      RTX_UNCHANGING_P (xtarget) = 0;
4591	    }
4592
4593	  clear_storage (xtarget, GEN_INT (size));
4594	  cleared = 1;
4595	  if (RTX_UNCHANGING_P (target) || readonly_fields_p (type))
4596	    {
4597	      /* ??? Emit a blockage to prevent the scheduler from swapping
4598		 the memory write issued above without the /u flag and
4599		 memory writes that will be issued later with it.
4600		 Note that the clearing above cannot be simply disabled
4601		 in the unsafe cases because the C front-end relies on
4602		 it to implement the semantics of constructors for
4603		 automatic objects.  However, not all machine descriptions
4604		 define a blockage insn, so emit an ASM_INPUT to
4605		 act as one. �*/
4606	      emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4607	    }
4608	}
4609
4610      if (! cleared)
4611	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4612
4613      /* Store each element of the constructor into
4614	 the corresponding field of TARGET.  */
4615
4616      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4617	{
4618	  tree field = TREE_PURPOSE (elt);
4619	  tree value = TREE_VALUE (elt);
4620	  enum machine_mode mode;
4621	  HOST_WIDE_INT bitsize;
4622	  HOST_WIDE_INT bitpos = 0;
4623	  tree offset;
4624	  rtx to_rtx = target;
4625
4626	  /* Just ignore missing fields.
4627	     We cleared the whole structure, above,
4628	     if any fields are missing.  */
4629	  if (field == 0)
4630	    continue;
4631
4632	  if (cleared && is_zeros_p (value))
4633	    continue;
4634
4635	  if (host_integerp (DECL_SIZE (field), 1))
4636	    bitsize = tree_low_cst (DECL_SIZE (field), 1);
4637	  else
4638	    bitsize = -1;
4639
4640	  mode = DECL_MODE (field);
4641	  if (DECL_BIT_FIELD (field))
4642	    mode = VOIDmode;
4643
4644	  offset = DECL_FIELD_OFFSET (field);
4645	  if (host_integerp (offset, 0)
4646	      && host_integerp (bit_position (field), 0))
4647	    {
4648	      bitpos = int_bit_position (field);
4649	      offset = 0;
4650	    }
4651	  else
4652	    bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4653
4654	  if (offset)
4655	    {
4656	      rtx offset_rtx;
4657
4658	      if (CONTAINS_PLACEHOLDER_P (offset))
4659		offset = build (WITH_RECORD_EXPR, sizetype,
4660				offset, make_tree (TREE_TYPE (exp), target));
4661
4662	      offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4663	      if (GET_CODE (to_rtx) != MEM)
4664		abort ();
4665
4666#ifdef POINTERS_EXTEND_UNSIGNED
4667	      if (GET_MODE (offset_rtx) != Pmode)
4668		offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4669#else
4670	      if (GET_MODE (offset_rtx) != ptr_mode)
4671		offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4672#endif
4673
4674	      to_rtx = offset_address (to_rtx, offset_rtx,
4675				       highest_pow2_factor (offset));
4676	    }
4677
4678	  if (TREE_READONLY (field))
4679	    {
4680	      if (GET_CODE (to_rtx) == MEM)
4681		to_rtx = copy_rtx (to_rtx);
4682
4683	      RTX_UNCHANGING_P (to_rtx) = 1;
4684	    }
4685
4686#ifdef WORD_REGISTER_OPERATIONS
4687	  /* If this initializes a field that is smaller than a word, at the
4688	     start of a word, try to widen it to a full word.
4689	     This special case allows us to output C++ member function
4690	     initializations in a form that the optimizers can understand.  */
4691	  if (GET_CODE (target) == REG
4692	      && bitsize < BITS_PER_WORD
4693	      && bitpos % BITS_PER_WORD == 0
4694	      && GET_MODE_CLASS (mode) == MODE_INT
4695	      && TREE_CODE (value) == INTEGER_CST
4696	      && exp_size >= 0
4697	      && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4698	    {
4699	      tree type = TREE_TYPE (value);
4700
4701	      if (TYPE_PRECISION (type) < BITS_PER_WORD)
4702		{
4703		  type = (*lang_hooks.types.type_for_size)
4704		    (BITS_PER_WORD, TREE_UNSIGNED (type));
4705		  value = convert (type, value);
4706		}
4707
4708	      if (BYTES_BIG_ENDIAN)
4709		value
4710		  = fold (build (LSHIFT_EXPR, type, value,
4711				 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4712	      bitsize = BITS_PER_WORD;
4713	      mode = word_mode;
4714	    }
4715#endif
4716
4717	  if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4718	      && DECL_NONADDRESSABLE_P (field))
4719	    {
4720	      to_rtx = copy_rtx (to_rtx);
4721	      MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4722	    }
4723
4724	  store_constructor_field (to_rtx, bitsize, bitpos, mode,
4725				   value, type, cleared,
4726				   get_alias_set (TREE_TYPE (field)));
4727	}
4728    }
4729  else if (TREE_CODE (type) == ARRAY_TYPE
4730	   || TREE_CODE (type) == VECTOR_TYPE)
4731    {
4732      tree elt;
4733      int i;
4734      int need_to_clear;
4735      tree domain = TYPE_DOMAIN (type);
4736      tree elttype = TREE_TYPE (type);
4737      int const_bounds_p;
4738      HOST_WIDE_INT minelt = 0;
4739      HOST_WIDE_INT maxelt = 0;
4740      int icode = 0;
4741      rtx *vector = NULL;
4742      int elt_size = 0;
4743      unsigned n_elts = 0;
4744
4745      /* Vectors are like arrays, but the domain is stored via an array
4746	 type indirectly.  */
4747      if (TREE_CODE (type) == VECTOR_TYPE)
4748	{
4749	  /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4750	     the same field as TYPE_DOMAIN, we are not guaranteed that
4751	     it always will.  */
4752	  domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4753	  domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4754	  if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4755	    {
4756	      enum machine_mode mode = GET_MODE (target);
4757
4758	      icode = (int) vec_init_optab->handlers[mode].insn_code;
4759	      if (icode != CODE_FOR_nothing)
4760		{
4761		  unsigned int i;
4762
4763		  elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4764		  n_elts = (GET_MODE_SIZE (mode) / elt_size);
4765		  vector = alloca (n_elts);
4766		  for (i = 0; i < n_elts; i++)
4767		    vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4768		}
4769	    }
4770	}
4771
4772      const_bounds_p = (TYPE_MIN_VALUE (domain)
4773			&& TYPE_MAX_VALUE (domain)
4774			&& host_integerp (TYPE_MIN_VALUE (domain), 0)
4775			&& host_integerp (TYPE_MAX_VALUE (domain), 0));
4776
4777      /* If we have constant bounds for the range of the type, get them.  */
4778      if (const_bounds_p)
4779	{
4780	  minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4781	  maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4782	}
4783
4784      /* If the constructor has fewer elements than the array,
4785         clear the whole array first.  Similarly if this is
4786         static constructor of a non-BLKmode object.  */
4787      if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4788	need_to_clear = 1;
4789      else
4790	{
4791	  HOST_WIDE_INT count = 0, zero_count = 0;
4792	  need_to_clear = ! const_bounds_p;
4793
4794	  /* This loop is a more accurate version of the loop in
4795	     mostly_zeros_p (it handles RANGE_EXPR in an index).
4796	     It is also needed to check for missing elements.  */
4797	  for (elt = CONSTRUCTOR_ELTS (exp);
4798	       elt != NULL_TREE && ! need_to_clear;
4799	       elt = TREE_CHAIN (elt))
4800	    {
4801	      tree index = TREE_PURPOSE (elt);
4802	      HOST_WIDE_INT this_node_count;
4803
4804	      if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4805		{
4806		  tree lo_index = TREE_OPERAND (index, 0);
4807		  tree hi_index = TREE_OPERAND (index, 1);
4808
4809		  if (! host_integerp (lo_index, 1)
4810		      || ! host_integerp (hi_index, 1))
4811		    {
4812		      need_to_clear = 1;
4813		      break;
4814		    }
4815
4816		  this_node_count = (tree_low_cst (hi_index, 1)
4817				     - tree_low_cst (lo_index, 1) + 1);
4818		}
4819	      else
4820		this_node_count = 1;
4821
4822	      count += this_node_count;
4823	      if (mostly_zeros_p (TREE_VALUE (elt)))
4824		zero_count += this_node_count;
4825	    }
4826
4827	  /* Clear the entire array first if there are any missing elements,
4828	     or if the incidence of zero elements is >= 75%.  */
4829	  if (! need_to_clear
4830	      && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4831	    need_to_clear = 1;
4832	}
4833
4834      if (need_to_clear && size > 0 && !vector)
4835	{
4836	  if (! cleared)
4837	    {
4838	      if (REG_P (target))
4839		emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4840	      else
4841		{
4842		  rtx xtarget = target;
4843
4844		  if (RTX_UNCHANGING_P (target))
4845		    {
4846		      xtarget = copy_rtx (target);
4847		      RTX_UNCHANGING_P (xtarget) = 0;
4848		    }
4849
4850		  clear_storage (xtarget, GEN_INT (size));
4851
4852		  if (RTX_UNCHANGING_P (target))
4853		    {
4854		      /* ??? Emit a blockage to prevent the scheduler from
4855			 swapping the memory write issued above without the
4856			 /u flag and memory writes that will be issued later
4857			 with it.  */
4858		      emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4859		    }
4860		}
4861	    }
4862	  cleared = 1;
4863	}
4864      else if (REG_P (target))
4865	/* Inform later passes that the old value is dead.  */
4866	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4867
4868      /* Store each element of the constructor into
4869	 the corresponding element of TARGET, determined
4870	 by counting the elements.  */
4871      for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4872	   elt;
4873	   elt = TREE_CHAIN (elt), i++)
4874	{
4875	  enum machine_mode mode;
4876	  HOST_WIDE_INT bitsize;
4877	  HOST_WIDE_INT bitpos;
4878	  int unsignedp;
4879	  tree value = TREE_VALUE (elt);
4880	  tree index = TREE_PURPOSE (elt);
4881	  rtx xtarget = target;
4882
4883	  if (cleared && is_zeros_p (value))
4884	    continue;
4885
4886	  unsignedp = TREE_UNSIGNED (elttype);
4887	  mode = TYPE_MODE (elttype);
4888	  if (mode == BLKmode)
4889	    bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4890		       ? tree_low_cst (TYPE_SIZE (elttype), 1)
4891		       : -1);
4892	  else
4893	    bitsize = GET_MODE_BITSIZE (mode);
4894
4895	  if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4896	    {
4897	      tree lo_index = TREE_OPERAND (index, 0);
4898	      tree hi_index = TREE_OPERAND (index, 1);
4899	      rtx index_r, pos_rtx, loop_end;
4900	      struct nesting *loop;
4901	      HOST_WIDE_INT lo, hi, count;
4902	      tree position;
4903
4904	      if (vector)
4905		abort ();
4906
4907	      /* If the range is constant and "small", unroll the loop.  */
4908	      if (const_bounds_p
4909		  && host_integerp (lo_index, 0)
4910		  && host_integerp (hi_index, 0)
4911		  && (lo = tree_low_cst (lo_index, 0),
4912		      hi = tree_low_cst (hi_index, 0),
4913		      count = hi - lo + 1,
4914		      (GET_CODE (target) != MEM
4915		       || count <= 2
4916		       || (host_integerp (TYPE_SIZE (elttype), 1)
4917			   && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4918			       <= 40 * 8)))))
4919		{
4920		  lo -= minelt;  hi -= minelt;
4921		  for (; lo <= hi; lo++)
4922		    {
4923		      bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4924
4925		      if (GET_CODE (target) == MEM
4926			  && !MEM_KEEP_ALIAS_SET_P (target)
4927			  && TREE_CODE (type) == ARRAY_TYPE
4928			  && TYPE_NONALIASED_COMPONENT (type))
4929			{
4930			  target = copy_rtx (target);
4931			  MEM_KEEP_ALIAS_SET_P (target) = 1;
4932			}
4933
4934		      store_constructor_field
4935			(target, bitsize, bitpos, mode, value, type, cleared,
4936			 get_alias_set (elttype));
4937		    }
4938		}
4939	      else
4940		{
4941		  expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4942		  loop_end = gen_label_rtx ();
4943
4944		  unsignedp = TREE_UNSIGNED (domain);
4945
4946		  index = build_decl (VAR_DECL, NULL_TREE, domain);
4947
4948		  index_r
4949		    = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4950						 &unsignedp, 0));
4951		  SET_DECL_RTL (index, index_r);
4952		  if (TREE_CODE (value) == SAVE_EXPR
4953		      && SAVE_EXPR_RTL (value) == 0)
4954		    {
4955		      /* Make sure value gets expanded once before the
4956                         loop.  */
4957		      expand_expr (value, const0_rtx, VOIDmode, 0);
4958		      emit_queue ();
4959		    }
4960		  store_expr (lo_index, index_r, 0);
4961		  loop = expand_start_loop (0);
4962
4963		  /* Assign value to element index.  */
4964		  position
4965		    = convert (ssizetype,
4966			       fold (build (MINUS_EXPR, TREE_TYPE (index),
4967					    index, TYPE_MIN_VALUE (domain))));
4968		  position = size_binop (MULT_EXPR, position,
4969					 convert (ssizetype,
4970						  TYPE_SIZE_UNIT (elttype)));
4971
4972		  pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4973		  xtarget = offset_address (target, pos_rtx,
4974					    highest_pow2_factor (position));
4975		  xtarget = adjust_address (xtarget, mode, 0);
4976		  if (TREE_CODE (value) == CONSTRUCTOR)
4977		    store_constructor (value, xtarget, cleared,
4978				       bitsize / BITS_PER_UNIT);
4979		  else
4980		    store_expr (value, xtarget, 0);
4981
4982		  expand_exit_loop_if_false (loop,
4983					     build (LT_EXPR, integer_type_node,
4984						    index, hi_index));
4985
4986		  expand_increment (build (PREINCREMENT_EXPR,
4987					   TREE_TYPE (index),
4988					   index, integer_one_node), 0, 0);
4989		  expand_end_loop ();
4990		  emit_label (loop_end);
4991		}
4992	    }
4993	  else if ((index != 0 && ! host_integerp (index, 0))
4994		   || ! host_integerp (TYPE_SIZE (elttype), 1))
4995	    {
4996	      tree position;
4997
4998	      if (vector)
4999		abort ();
5000
5001	      if (index == 0)
5002		index = ssize_int (1);
5003
5004	      if (minelt)
5005		index = convert (ssizetype,
5006				 fold (build (MINUS_EXPR, index,
5007					      TYPE_MIN_VALUE (domain))));
5008
5009	      position = size_binop (MULT_EXPR, index,
5010				     convert (ssizetype,
5011					      TYPE_SIZE_UNIT (elttype)));
5012	      xtarget = offset_address (target,
5013					expand_expr (position, 0, VOIDmode, 0),
5014					highest_pow2_factor (position));
5015	      xtarget = adjust_address (xtarget, mode, 0);
5016	      store_expr (value, xtarget, 0);
5017	    }
5018	  else if (vector)
5019	    {
5020	      int pos;
5021
5022	      if (index != 0)
5023		pos = tree_low_cst (index, 0) - minelt;
5024	      else
5025		pos = i;
5026	      vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5027	    }
5028	  else
5029	    {
5030	      if (index != 0)
5031		bitpos = ((tree_low_cst (index, 0) - minelt)
5032			  * tree_low_cst (TYPE_SIZE (elttype), 1));
5033	      else
5034		bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5035
5036	      if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5037		  && TREE_CODE (type) == ARRAY_TYPE
5038		  && TYPE_NONALIASED_COMPONENT (type))
5039		{
5040		  target = copy_rtx (target);
5041		  MEM_KEEP_ALIAS_SET_P (target) = 1;
5042		}
5043	      store_constructor_field (target, bitsize, bitpos, mode, value,
5044				       type, cleared, get_alias_set (elttype));
5045	    }
5046	}
5047      if (vector)
5048	{
5049	  emit_insn (GEN_FCN (icode) (target,
5050				      gen_rtx_PARALLEL (GET_MODE (target),
5051						        gen_rtvec_v (n_elts, vector))));
5052	}
5053    }
5054
5055  /* Set constructor assignments.  */
5056  else if (TREE_CODE (type) == SET_TYPE)
5057    {
5058      tree elt = CONSTRUCTOR_ELTS (exp);
5059      unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5060      tree domain = TYPE_DOMAIN (type);
5061      tree domain_min, domain_max, bitlength;
5062
5063      /* The default implementation strategy is to extract the constant
5064	 parts of the constructor, use that to initialize the target,
5065	 and then "or" in whatever non-constant ranges we need in addition.
5066
5067	 If a large set is all zero or all ones, it is
5068	 probably better to set it using memset (if available) or bzero.
5069	 Also, if a large set has just a single range, it may also be
5070	 better to first clear all the first clear the set (using
5071	 bzero/memset), and set the bits we want.  */
5072
5073      /* Check for all zeros.  */
5074      if (elt == NULL_TREE && size > 0)
5075	{
5076	  if (!cleared)
5077	    clear_storage (target, GEN_INT (size));
5078	  return;
5079	}
5080
5081      domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5082      domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5083      bitlength = size_binop (PLUS_EXPR,
5084			      size_diffop (domain_max, domain_min),
5085			      ssize_int (1));
5086
5087      nbits = tree_low_cst (bitlength, 1);
5088
5089      /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5090	 are "complicated" (more than one range), initialize (the
5091	 constant parts) by copying from a constant.  */
5092      if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5093	  || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5094	{
5095	  unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5096	  enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5097	  char *bit_buffer = alloca (nbits);
5098	  HOST_WIDE_INT word = 0;
5099	  unsigned int bit_pos = 0;
5100	  unsigned int ibit = 0;
5101	  unsigned int offset = 0;  /* In bytes from beginning of set.  */
5102
5103	  elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5104	  for (;;)
5105	    {
5106	      if (bit_buffer[ibit])
5107		{
5108		  if (BYTES_BIG_ENDIAN)
5109		    word |= (1 << (set_word_size - 1 - bit_pos));
5110		  else
5111		    word |= 1 << bit_pos;
5112		}
5113
5114	      bit_pos++;  ibit++;
5115	      if (bit_pos >= set_word_size || ibit == nbits)
5116		{
5117		  if (word != 0 || ! cleared)
5118		    {
5119		      rtx datum = gen_int_mode (word, mode);
5120		      rtx to_rtx;
5121
5122		      /* The assumption here is that it is safe to use
5123			 XEXP if the set is multi-word, but not if
5124			 it's single-word.  */
5125		      if (GET_CODE (target) == MEM)
5126			to_rtx = adjust_address (target, mode, offset);
5127		      else if (offset == 0)
5128			to_rtx = target;
5129		      else
5130			abort ();
5131		      emit_move_insn (to_rtx, datum);
5132		    }
5133
5134		  if (ibit == nbits)
5135		    break;
5136		  word = 0;
5137		  bit_pos = 0;
5138		  offset += set_word_size / BITS_PER_UNIT;
5139		}
5140	    }
5141	}
5142      else if (!cleared)
5143	/* Don't bother clearing storage if the set is all ones.  */
5144	if (TREE_CHAIN (elt) != NULL_TREE
5145	    || (TREE_PURPOSE (elt) == NULL_TREE
5146		? nbits != 1
5147		: ( ! host_integerp (TREE_VALUE (elt), 0)
5148		   || ! host_integerp (TREE_PURPOSE (elt), 0)
5149		   || (tree_low_cst (TREE_VALUE (elt), 0)
5150		       - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5151		       != (HOST_WIDE_INT) nbits))))
5152	  clear_storage (target, expr_size (exp));
5153
5154      for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5155	{
5156	  /* Start of range of element or NULL.  */
5157	  tree startbit = TREE_PURPOSE (elt);
5158	  /* End of range of element, or element value.  */
5159	  tree endbit   = TREE_VALUE (elt);
5160	  HOST_WIDE_INT startb, endb;
5161	  rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5162
5163	  bitlength_rtx = expand_expr (bitlength,
5164				       NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5165
5166	  /* Handle non-range tuple element like [ expr ].  */
5167	  if (startbit == NULL_TREE)
5168	    {
5169	      startbit = save_expr (endbit);
5170	      endbit = startbit;
5171	    }
5172
5173	  startbit = convert (sizetype, startbit);
5174	  endbit = convert (sizetype, endbit);
5175	  if (! integer_zerop (domain_min))
5176	    {
5177	      startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5178	      endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5179	    }
5180	  startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5181				      EXPAND_CONST_ADDRESS);
5182	  endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5183				    EXPAND_CONST_ADDRESS);
5184
5185	  if (REG_P (target))
5186	    {
5187	      targetx
5188		= assign_temp
5189		  ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5190					  (GET_MODE (target), 0),
5191					  TYPE_QUAL_CONST)),
5192		   0, 1, 1);
5193	      emit_move_insn (targetx, target);
5194	    }
5195
5196	  else if (GET_CODE (target) == MEM)
5197	    targetx = target;
5198	  else
5199	    abort ();
5200
5201	  /* Optimization:  If startbit and endbit are constants divisible
5202	     by BITS_PER_UNIT, call memset instead.  */
5203	  if (TARGET_MEM_FUNCTIONS
5204	      && TREE_CODE (startbit) == INTEGER_CST
5205	      && TREE_CODE (endbit) == INTEGER_CST
5206	      && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5207	      && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5208	    {
5209	      emit_library_call (memset_libfunc, LCT_NORMAL,
5210				 VOIDmode, 3,
5211				 plus_constant (XEXP (targetx, 0),
5212						startb / BITS_PER_UNIT),
5213				 Pmode,
5214				 constm1_rtx, TYPE_MODE (integer_type_node),
5215				 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5216				 TYPE_MODE (sizetype));
5217	    }
5218	  else
5219	    emit_library_call (setbits_libfunc, LCT_NORMAL,
5220			       VOIDmode, 4, XEXP (targetx, 0),
5221			       Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5222			       startbit_rtx, TYPE_MODE (sizetype),
5223			       endbit_rtx, TYPE_MODE (sizetype));
5224
5225	  if (REG_P (target))
5226	    emit_move_insn (target, targetx);
5227	}
5228    }
5229
5230  else
5231    abort ();
5232}
5233
5234/* Store the value of EXP (an expression tree)
5235   into a subfield of TARGET which has mode MODE and occupies
5236   BITSIZE bits, starting BITPOS bits from the start of TARGET.
5237   If MODE is VOIDmode, it means that we are storing into a bit-field.
5238
5239   If VALUE_MODE is VOIDmode, return nothing in particular.
5240   UNSIGNEDP is not used in this case.
5241
5242   Otherwise, return an rtx for the value stored.  This rtx
5243   has mode VALUE_MODE if that is convenient to do.
5244   In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5245
5246   TYPE is the type of the underlying object,
5247
5248   ALIAS_SET is the alias set for the destination.  This value will
5249   (in general) be different from that for TARGET, since TARGET is a
5250   reference to the containing structure.  */
5251
5252static rtx
5253store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5254	     enum machine_mode mode, tree exp, enum machine_mode value_mode,
5255	     int unsignedp, tree type, int alias_set)
5256{
5257  HOST_WIDE_INT width_mask = 0;
5258
5259  if (TREE_CODE (exp) == ERROR_MARK)
5260    return const0_rtx;
5261
5262  /* If we have nothing to store, do nothing unless the expression has
5263     side-effects.  */
5264  if (bitsize == 0)
5265    return expand_expr (exp, const0_rtx, VOIDmode, 0);
5266  else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5267    width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5268
5269  /* If we are storing into an unaligned field of an aligned union that is
5270     in a register, we may have the mode of TARGET being an integer mode but
5271     MODE == BLKmode.  In that case, get an aligned object whose size and
5272     alignment are the same as TARGET and store TARGET into it (we can avoid
5273     the store if the field being stored is the entire width of TARGET).  Then
5274     call ourselves recursively to store the field into a BLKmode version of
5275     that object.  Finally, load from the object into TARGET.  This is not
5276     very efficient in general, but should only be slightly more expensive
5277     than the otherwise-required unaligned accesses.  Perhaps this can be
5278     cleaned up later.  It's tempting to make OBJECT readonly, but it's set
5279     twice, once with emit_move_insn and once via store_field.  */
5280
5281  if (mode == BLKmode
5282      && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5283    {
5284      rtx object = assign_temp (type, 0, 1, 1);
5285      rtx blk_object = adjust_address (object, BLKmode, 0);
5286
5287      if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5288	emit_move_insn (object, target);
5289
5290      store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5291		   alias_set);
5292
5293      emit_move_insn (target, object);
5294
5295      /* We want to return the BLKmode version of the data.  */
5296      return blk_object;
5297    }
5298
5299  if (GET_CODE (target) == CONCAT)
5300    {
5301      /* We're storing into a struct containing a single __complex.  */
5302
5303      if (bitpos != 0)
5304	abort ();
5305      return store_expr (exp, target, 0);
5306    }
5307
5308  /* If the structure is in a register or if the component
5309     is a bit field, we cannot use addressing to access it.
5310     Use bit-field techniques or SUBREG to store in it.  */
5311
5312  if (mode == VOIDmode
5313      || (mode != BLKmode && ! direct_store[(int) mode]
5314	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5315	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5316      || GET_CODE (target) == REG
5317      || GET_CODE (target) == SUBREG
5318      /* If the field isn't aligned enough to store as an ordinary memref,
5319	 store it as a bit field.  */
5320      || (mode != BLKmode
5321	  && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5322		|| bitpos % GET_MODE_ALIGNMENT (mode))
5323	       && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5324	      || (bitpos % BITS_PER_UNIT != 0)))
5325      /* If the RHS and field are a constant size and the size of the
5326	 RHS isn't the same size as the bitfield, we must use bitfield
5327	 operations.  */
5328      || (bitsize >= 0
5329	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5330	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5331    {
5332      rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5333
5334      /* If BITSIZE is narrower than the size of the type of EXP
5335	 we will be narrowing TEMP.  Normally, what's wanted are the
5336	 low-order bits.  However, if EXP's type is a record and this is
5337	 big-endian machine, we want the upper BITSIZE bits.  */
5338      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5339	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5340	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5341	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5342			     size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5343				       - bitsize),
5344			     NULL_RTX, 1);
5345
5346      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5347	 MODE.  */
5348      if (mode != VOIDmode && mode != BLKmode
5349	  && mode != TYPE_MODE (TREE_TYPE (exp)))
5350	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5351
5352      /* If the modes of TARGET and TEMP are both BLKmode, both
5353	 must be in memory and BITPOS must be aligned on a byte
5354	 boundary.  If so, we simply do a block copy.  */
5355      if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5356	{
5357	  if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5358	      || bitpos % BITS_PER_UNIT != 0)
5359	    abort ();
5360
5361	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5362	  emit_block_move (target, temp,
5363			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5364				    / BITS_PER_UNIT),
5365			   BLOCK_OP_NORMAL);
5366
5367	  return value_mode == VOIDmode ? const0_rtx : target;
5368	}
5369
5370      /* Store the value in the bitfield.  */
5371      store_bit_field (target, bitsize, bitpos, mode, temp,
5372		       int_size_in_bytes (type));
5373
5374      if (value_mode != VOIDmode)
5375	{
5376	  /* The caller wants an rtx for the value.
5377	     If possible, avoid refetching from the bitfield itself.  */
5378	  if (width_mask != 0
5379	      && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5380	    {
5381	      tree count;
5382	      enum machine_mode tmode;
5383
5384	      tmode = GET_MODE (temp);
5385	      if (tmode == VOIDmode)
5386		tmode = value_mode;
5387
5388	      if (unsignedp)
5389		return expand_and (tmode, temp,
5390				   gen_int_mode (width_mask, tmode),
5391				   NULL_RTX);
5392
5393	      count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5394	      temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5395	      return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5396	    }
5397
5398	  return extract_bit_field (target, bitsize, bitpos, unsignedp,
5399				    NULL_RTX, value_mode, VOIDmode,
5400				    int_size_in_bytes (type));
5401	}
5402      return const0_rtx;
5403    }
5404  else
5405    {
5406      rtx addr = XEXP (target, 0);
5407      rtx to_rtx = target;
5408
5409      /* If a value is wanted, it must be the lhs;
5410	 so make the address stable for multiple use.  */
5411
5412      if (value_mode != VOIDmode && GET_CODE (addr) != REG
5413	  && ! CONSTANT_ADDRESS_P (addr)
5414	  /* A frame-pointer reference is already stable.  */
5415	  && ! (GET_CODE (addr) == PLUS
5416		&& GET_CODE (XEXP (addr, 1)) == CONST_INT
5417		&& (XEXP (addr, 0) == virtual_incoming_args_rtx
5418		    || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5419	to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5420
5421      /* Now build a reference to just the desired component.  */
5422
5423      to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5424
5425      if (to_rtx == target)
5426	to_rtx = copy_rtx (to_rtx);
5427
5428      MEM_SET_IN_STRUCT_P (to_rtx, 1);
5429      if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5430	set_mem_alias_set (to_rtx, alias_set);
5431
5432      return store_expr (exp, to_rtx, value_mode != VOIDmode);
5433    }
5434}
5435
5436/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5437   an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5438   codes and find the ultimate containing object, which we return.
5439
5440   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5441   bit position, and *PUNSIGNEDP to the signedness of the field.
5442   If the position of the field is variable, we store a tree
5443   giving the variable offset (in units) in *POFFSET.
5444   This offset is in addition to the bit position.
5445   If the position is not variable, we store 0 in *POFFSET.
5446
5447   If any of the extraction expressions is volatile,
5448   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
5449
5450   If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
5451   is a mode that can be used to access the field.  In that case, *PBITSIZE
5452   is redundant.
5453
5454   If the field describes a variable-sized object, *PMODE is set to
5455   VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
5456   this case, but the address of the object can be found.  */
5457
5458tree
5459get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5460		     HOST_WIDE_INT *pbitpos, tree *poffset,
5461		     enum machine_mode *pmode, int *punsignedp,
5462		     int *pvolatilep)
5463{
5464  tree size_tree = 0;
5465  enum machine_mode mode = VOIDmode;
5466  tree offset = size_zero_node;
5467  tree bit_offset = bitsize_zero_node;
5468  tree placeholder_ptr = 0;
5469  tree tem;
5470
5471  /* First get the mode, signedness, and size.  We do this from just the
5472     outermost expression.  */
5473  if (TREE_CODE (exp) == COMPONENT_REF)
5474    {
5475      size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5476      if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5477	mode = DECL_MODE (TREE_OPERAND (exp, 1));
5478
5479      *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5480    }
5481  else if (TREE_CODE (exp) == BIT_FIELD_REF)
5482    {
5483      size_tree = TREE_OPERAND (exp, 1);
5484      *punsignedp = TREE_UNSIGNED (exp);
5485    }
5486  else
5487    {
5488      mode = TYPE_MODE (TREE_TYPE (exp));
5489      *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5490
5491      if (mode == BLKmode)
5492	size_tree = TYPE_SIZE (TREE_TYPE (exp));
5493      else
5494	*pbitsize = GET_MODE_BITSIZE (mode);
5495    }
5496
5497  if (size_tree != 0)
5498    {
5499      if (! host_integerp (size_tree, 1))
5500	mode = BLKmode, *pbitsize = -1;
5501      else
5502	*pbitsize = tree_low_cst (size_tree, 1);
5503    }
5504
5505  /* Compute cumulative bit-offset for nested component-refs and array-refs,
5506     and find the ultimate containing object.  */
5507  while (1)
5508    {
5509      if (TREE_CODE (exp) == BIT_FIELD_REF)
5510	bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5511      else if (TREE_CODE (exp) == COMPONENT_REF)
5512	{
5513	  tree field = TREE_OPERAND (exp, 1);
5514	  tree this_offset = DECL_FIELD_OFFSET (field);
5515
5516	  /* If this field hasn't been filled in yet, don't go
5517	     past it.  This should only happen when folding expressions
5518	     made during type construction.  */
5519	  if (this_offset == 0)
5520	    break;
5521	  else if (CONTAINS_PLACEHOLDER_P (this_offset))
5522	    this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5523
5524	  offset = size_binop (PLUS_EXPR, offset, this_offset);
5525	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
5526				   DECL_FIELD_BIT_OFFSET (field));
5527
5528	  /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
5529	}
5530
5531      else if (TREE_CODE (exp) == ARRAY_REF
5532	       || TREE_CODE (exp) == ARRAY_RANGE_REF)
5533	{
5534	  tree index = TREE_OPERAND (exp, 1);
5535	  tree array = TREE_OPERAND (exp, 0);
5536	  tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5537	  tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5538	  tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5539
5540	  /* We assume all arrays have sizes that are a multiple of a byte.
5541	     First subtract the lower bound, if any, in the type of the
5542	     index, then convert to sizetype and multiply by the size of the
5543	     array element.  */
5544	  if (low_bound != 0 && ! integer_zerop (low_bound))
5545	    index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5546				 index, low_bound));
5547
5548	  /* If the index has a self-referential type, pass it to a
5549	     WITH_RECORD_EXPR; if the component size is, pass our
5550	     component to one.  */
5551	  if (CONTAINS_PLACEHOLDER_P (index))
5552	    index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5553	  if (CONTAINS_PLACEHOLDER_P (unit_size))
5554	    unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5555
5556	  offset = size_binop (PLUS_EXPR, offset,
5557			       size_binop (MULT_EXPR,
5558					   convert (sizetype, index),
5559					   unit_size));
5560	}
5561
5562      else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5563	{
5564	  tree new = find_placeholder (exp, &placeholder_ptr);
5565
5566	  /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5567	     We might have been called from tree optimization where we
5568	     haven't set up an object yet.  */
5569	  if (new == 0)
5570	    break;
5571	  else
5572	    exp = new;
5573
5574	  continue;
5575	}
5576
5577      /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5578	 conversions that don't change the mode, and all view conversions
5579	 except those that need to "step up" the alignment.  */
5580      else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5581	       && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5582		     && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5583			    > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5584			   && STRICT_ALIGNMENT
5585			   && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5586			       < BIGGEST_ALIGNMENT)
5587			   && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5588			       || TYPE_ALIGN_OK (TREE_TYPE
5589						 (TREE_OPERAND (exp, 0))))))
5590	       && ! ((TREE_CODE (exp) == NOP_EXPR
5591		      || TREE_CODE (exp) == CONVERT_EXPR)
5592		     && (TYPE_MODE (TREE_TYPE (exp))
5593			 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5594	break;
5595
5596      /* If any reference in the chain is volatile, the effect is volatile.  */
5597      if (TREE_THIS_VOLATILE (exp))
5598	*pvolatilep = 1;
5599
5600      exp = TREE_OPERAND (exp, 0);
5601    }
5602
5603  /* If OFFSET is constant, see if we can return the whole thing as a
5604     constant bit position.  Otherwise, split it up.  */
5605  if (host_integerp (offset, 0)
5606      && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5607				 bitsize_unit_node))
5608      && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5609      && host_integerp (tem, 0))
5610    *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5611  else
5612    *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5613
5614  *pmode = mode;
5615  return exp;
5616}
5617
5618/* Return 1 if T is an expression that get_inner_reference handles.  */
5619
5620int
5621handled_component_p (tree t)
5622{
5623  switch (TREE_CODE (t))
5624    {
5625    case BIT_FIELD_REF:
5626    case COMPONENT_REF:
5627    case ARRAY_REF:
5628    case ARRAY_RANGE_REF:
5629    case NON_LVALUE_EXPR:
5630    case VIEW_CONVERT_EXPR:
5631      return 1;
5632
5633    /* ??? Sure they are handled, but get_inner_reference may return
5634       a different PBITSIZE, depending upon whether the expression is
5635       wrapped up in a NOP_EXPR or not, e.g. for bitfields.  */
5636    case NOP_EXPR:
5637    case CONVERT_EXPR:
5638      return (TYPE_MODE (TREE_TYPE (t))
5639	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5640
5641    default:
5642      return 0;
5643    }
5644}
5645
5646/* Given an rtx VALUE that may contain additions and multiplications, return
5647   an equivalent value that just refers to a register, memory, or constant.
5648   This is done by generating instructions to perform the arithmetic and
5649   returning a pseudo-register containing the value.
5650
5651   The returned value may be a REG, SUBREG, MEM or constant.  */
5652
5653rtx
5654force_operand (rtx value, rtx target)
5655{
5656  rtx op1, op2;
5657  /* Use subtarget as the target for operand 0 of a binary operation.  */
5658  rtx subtarget = get_subtarget (target);
5659  enum rtx_code code = GET_CODE (value);
5660
5661  /* Check for a PIC address load.  */
5662  if ((code == PLUS || code == MINUS)
5663      && XEXP (value, 0) == pic_offset_table_rtx
5664      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5665	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
5666	  || GET_CODE (XEXP (value, 1)) == CONST))
5667    {
5668      if (!subtarget)
5669	subtarget = gen_reg_rtx (GET_MODE (value));
5670      emit_move_insn (subtarget, value);
5671      return subtarget;
5672    }
5673
5674  if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5675    {
5676      if (!target)
5677	target = gen_reg_rtx (GET_MODE (value));
5678      convert_move (target, force_operand (XEXP (value, 0), NULL),
5679		    code == ZERO_EXTEND);
5680      return target;
5681    }
5682
5683  if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5684    {
5685      op2 = XEXP (value, 1);
5686      if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5687	subtarget = 0;
5688      if (code == MINUS && GET_CODE (op2) == CONST_INT)
5689	{
5690	  code = PLUS;
5691	  op2 = negate_rtx (GET_MODE (value), op2);
5692	}
5693
5694      /* Check for an addition with OP2 a constant integer and our first
5695         operand a PLUS of a virtual register and something else.  In that
5696         case, we want to emit the sum of the virtual register and the
5697         constant first and then add the other value.  This allows virtual
5698         register instantiation to simply modify the constant rather than
5699         creating another one around this addition.  */
5700      if (code == PLUS && GET_CODE (op2) == CONST_INT
5701	  && GET_CODE (XEXP (value, 0)) == PLUS
5702	  && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5703	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5704	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5705	{
5706	  rtx temp = expand_simple_binop (GET_MODE (value), code,
5707					  XEXP (XEXP (value, 0), 0), op2,
5708					  subtarget, 0, OPTAB_LIB_WIDEN);
5709	  return expand_simple_binop (GET_MODE (value), code, temp,
5710				      force_operand (XEXP (XEXP (value,
5711								 0), 1), 0),
5712				      target, 0, OPTAB_LIB_WIDEN);
5713	}
5714
5715      op1 = force_operand (XEXP (value, 0), subtarget);
5716      op2 = force_operand (op2, NULL_RTX);
5717      switch (code)
5718	{
5719	case MULT:
5720	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
5721	case DIV:
5722	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
5723	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
5724					target, 1, OPTAB_LIB_WIDEN);
5725	  else
5726	    return expand_divmod (0,
5727				  FLOAT_MODE_P (GET_MODE (value))
5728				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
5729				  GET_MODE (value), op1, op2, target, 0);
5730	  break;
5731	case MOD:
5732	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5733				target, 0);
5734	  break;
5735	case UDIV:
5736	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5737				target, 1);
5738	  break;
5739	case UMOD:
5740	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5741				target, 1);
5742	  break;
5743	case ASHIFTRT:
5744	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
5745				      target, 0, OPTAB_LIB_WIDEN);
5746	  break;
5747	default:
5748	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
5749				      target, 1, OPTAB_LIB_WIDEN);
5750	}
5751    }
5752  if (GET_RTX_CLASS (code) == '1')
5753    {
5754      op1 = force_operand (XEXP (value, 0), NULL_RTX);
5755      return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5756    }
5757
5758#ifdef INSN_SCHEDULING
5759  /* On machines that have insn scheduling, we want all memory reference to be
5760     explicit, so we need to deal with such paradoxical SUBREGs.  */
5761  if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5762      && (GET_MODE_SIZE (GET_MODE (value))
5763	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5764    value
5765      = simplify_gen_subreg (GET_MODE (value),
5766			     force_reg (GET_MODE (SUBREG_REG (value)),
5767					force_operand (SUBREG_REG (value),
5768						       NULL_RTX)),
5769			     GET_MODE (SUBREG_REG (value)),
5770			     SUBREG_BYTE (value));
5771#endif
5772
5773  return value;
5774}
5775
5776/* Subroutine of expand_expr: return nonzero iff there is no way that
5777   EXP can reference X, which is being modified.  TOP_P is nonzero if this
5778   call is going to be used to determine whether we need a temporary
5779   for EXP, as opposed to a recursive call to this function.
5780
5781   It is always safe for this routine to return zero since it merely
5782   searches for optimization opportunities.  */
5783
5784int
5785safe_from_p (rtx x, tree exp, int top_p)
5786{
5787  rtx exp_rtl = 0;
5788  int i, nops;
5789  static tree save_expr_list;
5790
5791  if (x == 0
5792      /* If EXP has varying size, we MUST use a target since we currently
5793	 have no way of allocating temporaries of variable size
5794	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5795	 So we assume here that something at a higher level has prevented a
5796	 clash.  This is somewhat bogus, but the best we can do.  Only
5797	 do this when X is BLKmode and when we are at the top level.  */
5798      || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5799	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5800	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5801	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5802	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5803	      != INTEGER_CST)
5804	  && GET_MODE (x) == BLKmode)
5805      /* If X is in the outgoing argument area, it is always safe.  */
5806      || (GET_CODE (x) == MEM
5807	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
5808	      || (GET_CODE (XEXP (x, 0)) == PLUS
5809		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5810    return 1;
5811
5812  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5813     find the underlying pseudo.  */
5814  if (GET_CODE (x) == SUBREG)
5815    {
5816      x = SUBREG_REG (x);
5817      if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5818	return 0;
5819    }
5820
5821  /* A SAVE_EXPR might appear many times in the expression passed to the
5822     top-level safe_from_p call, and if it has a complex subexpression,
5823     examining it multiple times could result in a combinatorial explosion.
5824     E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5825     with optimization took about 28 minutes to compile -- even though it was
5826     only a few lines long.  So we mark each SAVE_EXPR we see with TREE_PRIVATE
5827     and turn that off when we are done.  We keep a list of the SAVE_EXPRs
5828     we have processed.  Note that the only test of top_p was above.  */
5829
5830  if (top_p)
5831    {
5832      int rtn;
5833      tree t;
5834
5835      save_expr_list = 0;
5836
5837      rtn = safe_from_p (x, exp, 0);
5838
5839      for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5840	TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5841
5842      return rtn;
5843    }
5844
5845  /* Now look at our tree code and possibly recurse.  */
5846  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5847    {
5848    case 'd':
5849      exp_rtl = DECL_RTL_IF_SET (exp);
5850      break;
5851
5852    case 'c':
5853      return 1;
5854
5855    case 'x':
5856      if (TREE_CODE (exp) == TREE_LIST)
5857	{
5858	  while (1)
5859	    {
5860	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5861		return 0;
5862	      exp = TREE_CHAIN (exp);
5863	      if (!exp)
5864		return 1;
5865	      if (TREE_CODE (exp) != TREE_LIST)
5866		return safe_from_p (x, exp, 0);
5867	    }
5868	}
5869      else if (TREE_CODE (exp) == ERROR_MARK)
5870	return 1;	/* An already-visited SAVE_EXPR? */
5871      else
5872	return 0;
5873
5874    case '2':
5875    case '<':
5876      if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5877	return 0;
5878      /* Fall through.  */
5879
5880    case '1':
5881      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5882
5883    case 'e':
5884    case 'r':
5885      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
5886	 the expression.  If it is set, we conflict iff we are that rtx or
5887	 both are in memory.  Otherwise, we check all operands of the
5888	 expression recursively.  */
5889
5890      switch (TREE_CODE (exp))
5891	{
5892	case ADDR_EXPR:
5893	  /* If the operand is static or we are static, we can't conflict.
5894	     Likewise if we don't conflict with the operand at all.  */
5895	  if (staticp (TREE_OPERAND (exp, 0))
5896	      || TREE_STATIC (exp)
5897	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5898	    return 1;
5899
5900	  /* Otherwise, the only way this can conflict is if we are taking
5901	     the address of a DECL a that address if part of X, which is
5902	     very rare.  */
5903	  exp = TREE_OPERAND (exp, 0);
5904	  if (DECL_P (exp))
5905	    {
5906	      if (!DECL_RTL_SET_P (exp)
5907		  || GET_CODE (DECL_RTL (exp)) != MEM)
5908		return 0;
5909	      else
5910		exp_rtl = XEXP (DECL_RTL (exp), 0);
5911	    }
5912	  break;
5913
5914	case INDIRECT_REF:
5915	  if (GET_CODE (x) == MEM
5916	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5917					get_alias_set (exp)))
5918	    return 0;
5919	  break;
5920
5921	case CALL_EXPR:
5922	  /* Assume that the call will clobber all hard registers and
5923	     all of memory.  */
5924	  if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5925	      || GET_CODE (x) == MEM)
5926	    return 0;
5927	  break;
5928
5929	case RTL_EXPR:
5930	  /* If a sequence exists, we would have to scan every instruction
5931	     in the sequence to see if it was safe.  This is probably not
5932	     worthwhile.  */
5933	  if (RTL_EXPR_SEQUENCE (exp))
5934	    return 0;
5935
5936	  exp_rtl = RTL_EXPR_RTL (exp);
5937	  break;
5938
5939	case WITH_CLEANUP_EXPR:
5940	  exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5941	  break;
5942
5943	case CLEANUP_POINT_EXPR:
5944	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5945
5946	case SAVE_EXPR:
5947	  exp_rtl = SAVE_EXPR_RTL (exp);
5948	  if (exp_rtl)
5949	    break;
5950
5951	  /* If we've already scanned this, don't do it again.  Otherwise,
5952	     show we've scanned it and record for clearing the flag if we're
5953	     going on.  */
5954	  if (TREE_PRIVATE (exp))
5955	    return 1;
5956
5957	  TREE_PRIVATE (exp) = 1;
5958	  if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5959	    {
5960	      TREE_PRIVATE (exp) = 0;
5961	      return 0;
5962	    }
5963
5964	  save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5965	  return 1;
5966
5967	case BIND_EXPR:
5968	  /* The only operand we look at is operand 1.  The rest aren't
5969	     part of the expression.  */
5970	  return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5971
5972	default:
5973	  break;
5974	}
5975
5976      /* If we have an rtx, we do not need to scan our operands.  */
5977      if (exp_rtl)
5978	break;
5979
5980      nops = first_rtl_op (TREE_CODE (exp));
5981      for (i = 0; i < nops; i++)
5982	if (TREE_OPERAND (exp, i) != 0
5983	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5984	  return 0;
5985
5986      /* If this is a language-specific tree code, it may require
5987	 special handling.  */
5988      if ((unsigned int) TREE_CODE (exp)
5989	  >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5990	  && !(*lang_hooks.safe_from_p) (x, exp))
5991	return 0;
5992    }
5993
5994  /* If we have an rtl, find any enclosed object.  Then see if we conflict
5995     with it.  */
5996  if (exp_rtl)
5997    {
5998      if (GET_CODE (exp_rtl) == SUBREG)
5999	{
6000	  exp_rtl = SUBREG_REG (exp_rtl);
6001	  if (GET_CODE (exp_rtl) == REG
6002	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6003	    return 0;
6004	}
6005
6006      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
6007	 are memory and they conflict.  */
6008      return ! (rtx_equal_p (x, exp_rtl)
6009		|| (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6010		    && true_dependence (exp_rtl, VOIDmode, x,
6011					rtx_addr_varies_p)));
6012    }
6013
6014  /* If we reach here, it is safe.  */
6015  return 1;
6016}
6017
6018/* Subroutine of expand_expr: return rtx if EXP is a
6019   variable or parameter; else return 0.  */
6020
6021static rtx
6022var_rtx (tree exp)
6023{
6024  STRIP_NOPS (exp);
6025  switch (TREE_CODE (exp))
6026    {
6027    case PARM_DECL:
6028    case VAR_DECL:
6029      return DECL_RTL (exp);
6030    default:
6031      return 0;
6032    }
6033}
6034
6035/* Return the highest power of two that EXP is known to be a multiple of.
6036   This is used in updating alignment of MEMs in array references.  */
6037
6038static unsigned HOST_WIDE_INT
6039highest_pow2_factor (tree exp)
6040{
6041  unsigned HOST_WIDE_INT c0, c1;
6042
6043  switch (TREE_CODE (exp))
6044    {
6045    case INTEGER_CST:
6046      /* We can find the lowest bit that's a one.  If the low
6047	 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6048	 We need to handle this case since we can find it in a COND_EXPR,
6049	 a MIN_EXPR, or a MAX_EXPR.  If the constant overflows, we have an
6050	 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6051	 later ICE.  */
6052      if (TREE_CONSTANT_OVERFLOW (exp))
6053	return BIGGEST_ALIGNMENT;
6054      else
6055	{
6056	  /* Note: tree_low_cst is intentionally not used here,
6057	     we don't care about the upper bits.  */
6058	  c0 = TREE_INT_CST_LOW (exp);
6059	  c0 &= -c0;
6060	  return c0 ? c0 : BIGGEST_ALIGNMENT;
6061	}
6062      break;
6063
6064    case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
6065      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6066      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6067      return MIN (c0, c1);
6068
6069    case MULT_EXPR:
6070      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6071      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6072      return c0 * c1;
6073
6074    case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
6075    case CEIL_DIV_EXPR:
6076      if (integer_pow2p (TREE_OPERAND (exp, 1))
6077	  && host_integerp (TREE_OPERAND (exp, 1), 1))
6078	{
6079	  c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6080	  c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6081	  return MAX (1, c0 / c1);
6082	}
6083      break;
6084
6085    case NON_LVALUE_EXPR:  case NOP_EXPR:  case CONVERT_EXPR:
6086    case SAVE_EXPR: case WITH_RECORD_EXPR:
6087      return highest_pow2_factor (TREE_OPERAND (exp, 0));
6088
6089    case COMPOUND_EXPR:
6090      return highest_pow2_factor (TREE_OPERAND (exp, 1));
6091
6092    case COND_EXPR:
6093      c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6094      c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6095      return MIN (c0, c1);
6096
6097    default:
6098      break;
6099    }
6100
6101  return 1;
6102}
6103
6104/* Similar, except that the alignment requirements of TARGET are
6105   taken into account.  Assume it is at least as aligned as its
6106   type, unless it is a COMPONENT_REF in which case the layout of
6107   the structure gives the alignment.  */
6108
6109static unsigned HOST_WIDE_INT
6110highest_pow2_factor_for_target (tree target, tree exp)
6111{
6112  unsigned HOST_WIDE_INT target_align, factor;
6113
6114  factor = highest_pow2_factor (exp);
6115  if (TREE_CODE (target) == COMPONENT_REF)
6116    target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6117  else
6118    target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6119  return MAX (factor, target_align);
6120}
6121
6122/* Return an object on the placeholder list that matches EXP, a
6123   PLACEHOLDER_EXPR.  An object "matches" if it is of the type of the
6124   PLACEHOLDER_EXPR or a pointer type to it.  For further information, see
6125   tree.def.  If no such object is found, return 0.  If PLIST is nonzero, it
6126   is a location which initially points to a starting location in the
6127   placeholder list (zero means start of the list) and where a pointer into
6128   the placeholder list at which the object is found is placed.  */
6129
6130tree
6131find_placeholder (tree exp, tree *plist)
6132{
6133  tree type = TREE_TYPE (exp);
6134  tree placeholder_expr;
6135
6136  for (placeholder_expr
6137       = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6138       placeholder_expr != 0;
6139       placeholder_expr = TREE_CHAIN (placeholder_expr))
6140    {
6141      tree need_type = TYPE_MAIN_VARIANT (type);
6142      tree elt;
6143
6144      /* Find the outermost reference that is of the type we want.  If none,
6145	 see if any object has a type that is a pointer to the type we
6146	 want.  */
6147      for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6148	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6149		   || TREE_CODE (elt) == COND_EXPR)
6150		  ? TREE_OPERAND (elt, 1)
6151		  : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6152		     || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6153		     || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6154		     || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6155		  ? TREE_OPERAND (elt, 0) : 0))
6156	if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6157	  {
6158	    if (plist)
6159	      *plist = placeholder_expr;
6160	    return elt;
6161	  }
6162
6163      for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6164	   elt
6165	   = ((TREE_CODE (elt) == COMPOUND_EXPR
6166	       || TREE_CODE (elt) == COND_EXPR)
6167	      ? TREE_OPERAND (elt, 1)
6168	      : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6169		 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6170		 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6171		 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6172	      ? TREE_OPERAND (elt, 0) : 0))
6173	if (POINTER_TYPE_P (TREE_TYPE (elt))
6174	    && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6175		== need_type))
6176	  {
6177	    if (plist)
6178	      *plist = placeholder_expr;
6179	    return build1 (INDIRECT_REF, need_type, elt);
6180	  }
6181    }
6182
6183  return 0;
6184}
6185
6186/* Subroutine of expand_expr.  Expand the two operands of a binary
6187   expression EXP0 and EXP1 placing the results in OP0 and OP1.
6188   The value may be stored in TARGET if TARGET is nonzero.  The
6189   MODIFIER argument is as documented by expand_expr.  */
6190
6191static void
6192expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6193		 enum expand_modifier modifier)
6194{
6195  if (! safe_from_p (target, exp1, 1))
6196    target = 0;
6197  if (operand_equal_p (exp0, exp1, 0))
6198    {
6199      *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6200      *op1 = copy_rtx (*op0);
6201    }
6202  else
6203    {
6204      /* If we need to preserve evaluation order, copy exp0 into its own
6205	 temporary variable so that it can't be clobbered by exp1.  */
6206      if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6207	exp0 = save_expr (exp0);
6208      *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6209      *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6210    }
6211}
6212
6213
6214/* expand_expr: generate code for computing expression EXP.
6215   An rtx for the computed value is returned.  The value is never null.
6216   In the case of a void EXP, const0_rtx is returned.
6217
6218   The value may be stored in TARGET if TARGET is nonzero.
6219   TARGET is just a suggestion; callers must assume that
6220   the rtx returned may not be the same as TARGET.
6221
6222   If TARGET is CONST0_RTX, it means that the value will be ignored.
6223
6224   If TMODE is not VOIDmode, it suggests generating the
6225   result in mode TMODE.  But this is done only when convenient.
6226   Otherwise, TMODE is ignored and the value generated in its natural mode.
6227   TMODE is just a suggestion; callers must assume that
6228   the rtx returned may not have mode TMODE.
6229
6230   Note that TARGET may have neither TMODE nor MODE.  In that case, it
6231   probably will not be used.
6232
6233   If MODIFIER is EXPAND_SUM then when EXP is an addition
6234   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6235   or a nest of (PLUS ...) and (MINUS ...) where the terms are
6236   products as above, or REG or MEM, or constant.
6237   Ordinarily in such cases we would output mul or add instructions
6238   and then return a pseudo reg containing the sum.
6239
6240   EXPAND_INITIALIZER is much like EXPAND_SUM except that
6241   it also marks a label as absolutely required (it can't be dead).
6242   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6243   This is used for outputting expressions used in initializers.
6244
6245   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6246   with a constant address even if that address is not normally legitimate.
6247   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6248
6249   EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6250   a call parameter.  Such targets require special care as we haven't yet
6251   marked TARGET so that it's safe from being trashed by libcalls.  We
6252   don't want to use TARGET for anything but the final result;
6253   Intermediate values must go elsewhere.   Additionally, calls to
6254   emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6255
6256   If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6257   address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6258   DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
6259   COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6260   recursively.  */
6261
6262rtx
6263expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6264		  enum expand_modifier modifier, rtx *alt_rtl)
6265{
6266  rtx op0, op1, temp;
6267  tree type = TREE_TYPE (exp);
6268  int unsignedp = TREE_UNSIGNED (type);
6269  enum machine_mode mode;
6270  enum tree_code code = TREE_CODE (exp);
6271  optab this_optab;
6272  rtx subtarget, original_target;
6273  int ignore;
6274  tree context;
6275
6276  /* Handle ERROR_MARK before anybody tries to access its type.  */
6277  if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6278    {
6279      op0 = CONST0_RTX (tmode);
6280      if (op0 != 0)
6281	return op0;
6282      return const0_rtx;
6283    }
6284
6285  mode = TYPE_MODE (type);
6286  /* Use subtarget as the target for operand 0 of a binary operation.  */
6287  subtarget = get_subtarget (target);
6288  original_target = target;
6289  ignore = (target == const0_rtx
6290	    || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6291		 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6292		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6293		&& TREE_CODE (type) == VOID_TYPE));
6294
6295  /* If we are going to ignore this result, we need only do something
6296     if there is a side-effect somewhere in the expression.  If there
6297     is, short-circuit the most common cases here.  Note that we must
6298     not call expand_expr with anything but const0_rtx in case this
6299     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
6300
6301  if (ignore)
6302    {
6303      if (! TREE_SIDE_EFFECTS (exp))
6304	return const0_rtx;
6305
6306      /* Ensure we reference a volatile object even if value is ignored, but
6307	 don't do this if all we are doing is taking its address.  */
6308      if (TREE_THIS_VOLATILE (exp)
6309	  && TREE_CODE (exp) != FUNCTION_DECL
6310	  && mode != VOIDmode && mode != BLKmode
6311	  && modifier != EXPAND_CONST_ADDRESS)
6312	{
6313	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6314	  if (GET_CODE (temp) == MEM)
6315	    temp = copy_to_reg (temp);
6316	  return const0_rtx;
6317	}
6318
6319      if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6320	  || code == INDIRECT_REF || code == BUFFER_REF)
6321	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6322			    modifier);
6323
6324      else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6325	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6326	{
6327	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6328	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6329	  return const0_rtx;
6330	}
6331      else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6332	       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6333	/* If the second operand has no side effects, just evaluate
6334	   the first.  */
6335	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6336			    modifier);
6337      else if (code == BIT_FIELD_REF)
6338	{
6339	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6340	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6341	  expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6342	  return const0_rtx;
6343	}
6344
6345      target = 0;
6346    }
6347
6348  /* If will do cse, generate all results into pseudo registers
6349     since 1) that allows cse to find more things
6350     and 2) otherwise cse could produce an insn the machine
6351     cannot support.  An exception is a CONSTRUCTOR into a multi-word
6352     MEM: that's much more likely to be most efficient into the MEM.
6353     Another is a CALL_EXPR which must return in memory.  */
6354
6355  if (! cse_not_expected && mode != BLKmode && target
6356      && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6357      && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6358      && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6359    target = 0;
6360
6361  switch (code)
6362    {
6363    case LABEL_DECL:
6364      {
6365	tree function = decl_function_context (exp);
6366	/* Labels in containing functions, or labels used from initializers,
6367	   must be forced.  */
6368	if (modifier == EXPAND_INITIALIZER
6369	    || (function != current_function_decl
6370		&& function != inline_function_decl
6371		&& function != 0))
6372	  temp = force_label_rtx (exp);
6373	else
6374	  temp = label_rtx (exp);
6375
6376	temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6377	if (function != current_function_decl
6378	    && function != inline_function_decl && function != 0)
6379	  LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6380	return temp;
6381      }
6382
6383    case PARM_DECL:
6384      if (!DECL_RTL_SET_P (exp))
6385	{
6386	  error ("%Jprior parameter's size depends on '%D'", exp, exp);
6387	  return CONST0_RTX (mode);
6388	}
6389
6390      /* ... fall through ...  */
6391
6392    case VAR_DECL:
6393      /* If a static var's type was incomplete when the decl was written,
6394	 but the type is complete now, lay out the decl now.  */
6395      if (DECL_SIZE (exp) == 0
6396	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6397	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6398	layout_decl (exp, 0);
6399
6400      /* ... fall through ...  */
6401
6402    case FUNCTION_DECL:
6403    case RESULT_DECL:
6404      if (DECL_RTL (exp) == 0)
6405	abort ();
6406
6407      /* Ensure variable marked as used even if it doesn't go through
6408	 a parser.  If it hasn't be used yet, write out an external
6409	 definition.  */
6410      if (! TREE_USED (exp))
6411	{
6412	  assemble_external (exp);
6413	  TREE_USED (exp) = 1;
6414	}
6415
6416      /* Show we haven't gotten RTL for this yet.  */
6417      temp = 0;
6418
6419      /* Handle variables inherited from containing functions.  */
6420      context = decl_function_context (exp);
6421
6422      /* We treat inline_function_decl as an alias for the current function
6423	 because that is the inline function whose vars, types, etc.
6424	 are being merged into the current function.
6425	 See expand_inline_function.  */
6426
6427      if (context != 0 && context != current_function_decl
6428	  && context != inline_function_decl
6429	  /* If var is static, we don't need a static chain to access it.  */
6430	  && ! (GET_CODE (DECL_RTL (exp)) == MEM
6431		&& CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6432	{
6433	  rtx addr;
6434
6435	  /* Mark as non-local and addressable.  */
6436	  DECL_NONLOCAL (exp) = 1;
6437	  if (DECL_NO_STATIC_CHAIN (current_function_decl))
6438	    abort ();
6439	  (*lang_hooks.mark_addressable) (exp);
6440	  if (GET_CODE (DECL_RTL (exp)) != MEM)
6441	    abort ();
6442	  addr = XEXP (DECL_RTL (exp), 0);
6443	  if (GET_CODE (addr) == MEM)
6444	    addr
6445	      = replace_equiv_address (addr,
6446				       fix_lexical_addr (XEXP (addr, 0), exp));
6447	  else
6448	    addr = fix_lexical_addr (addr, exp);
6449
6450	  temp = replace_equiv_address (DECL_RTL (exp), addr);
6451	}
6452
6453      /* This is the case of an array whose size is to be determined
6454	 from its initializer, while the initializer is still being parsed.
6455	 See expand_decl.  */
6456
6457      else if (GET_CODE (DECL_RTL (exp)) == MEM
6458	       && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6459	temp = validize_mem (DECL_RTL (exp));
6460
6461      /* If DECL_RTL is memory, we are in the normal case and either
6462	 the address is not valid or it is not a register and -fforce-addr
6463	 is specified, get the address into a register.  */
6464
6465      else if (GET_CODE (DECL_RTL (exp)) == MEM
6466	       && modifier != EXPAND_CONST_ADDRESS
6467	       && modifier != EXPAND_SUM
6468	       && modifier != EXPAND_INITIALIZER
6469	       && (! memory_address_p (DECL_MODE (exp),
6470				       XEXP (DECL_RTL (exp), 0))
6471		   || (flag_force_addr
6472		       && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6473	{
6474	  if (alt_rtl)
6475	    *alt_rtl = DECL_RTL (exp);
6476	  temp = replace_equiv_address (DECL_RTL (exp),
6477					copy_rtx (XEXP (DECL_RTL (exp), 0)));
6478	}
6479
6480      /* If we got something, return it.  But first, set the alignment
6481	 if the address is a register.  */
6482      if (temp != 0)
6483	{
6484	  if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6485	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6486
6487	  return temp;
6488	}
6489
6490      /* If the mode of DECL_RTL does not match that of the decl, it
6491	 must be a promoted value.  We return a SUBREG of the wanted mode,
6492	 but mark it so that we know that it was already extended.  */
6493
6494      if (GET_CODE (DECL_RTL (exp)) == REG
6495	  && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6496	{
6497	  /* Get the signedness used for this variable.  Ensure we get the
6498	     same mode we got when the variable was declared.  */
6499	  if (GET_MODE (DECL_RTL (exp))
6500	      != promote_mode (type, DECL_MODE (exp), &unsignedp,
6501			       (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6502	    abort ();
6503
6504	  temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6505	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6506	  SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6507	  return temp;
6508	}
6509
6510      return DECL_RTL (exp);
6511
6512    case INTEGER_CST:
6513      temp = immed_double_const (TREE_INT_CST_LOW (exp),
6514				 TREE_INT_CST_HIGH (exp), mode);
6515
6516      /* ??? If overflow is set, fold will have done an incomplete job,
6517	 which can result in (plus xx (const_int 0)), which can get
6518	 simplified by validate_replace_rtx during virtual register
6519	 instantiation, which can result in unrecognizable insns.
6520	 Avoid this by forcing all overflows into registers.  */
6521      if (TREE_CONSTANT_OVERFLOW (exp)
6522	  && modifier != EXPAND_INITIALIZER)
6523	temp = force_reg (mode, temp);
6524
6525      return temp;
6526
6527    case VECTOR_CST:
6528      return const_vector_from_tree (exp);
6529
6530    case CONST_DECL:
6531      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6532
6533    case REAL_CST:
6534      /* If optimized, generate immediate CONST_DOUBLE
6535	 which will be turned into memory by reload if necessary.
6536
6537	 We used to force a register so that loop.c could see it.  But
6538	 this does not allow gen_* patterns to perform optimizations with
6539	 the constants.  It also produces two insns in cases like "x = 1.0;".
6540	 On most machines, floating-point constants are not permitted in
6541	 many insns, so we'd end up copying it to a register in any case.
6542
6543	 Now, we do the copying in expand_binop, if appropriate.  */
6544      return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6545					   TYPE_MODE (TREE_TYPE (exp)));
6546
6547    case COMPLEX_CST:
6548      /* Handle evaluating a complex constant in a CONCAT target.  */
6549      if (original_target && GET_CODE (original_target) == CONCAT)
6550	{
6551	  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6552	  rtx rtarg, itarg;
6553
6554	  rtarg = XEXP (original_target, 0);
6555	  itarg = XEXP (original_target, 1);
6556
6557	  /* Move the real and imaginary parts separately.  */
6558	  op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6559	  op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6560
6561	  if (op0 != rtarg)
6562	    emit_move_insn (rtarg, op0);
6563	  if (op1 != itarg)
6564	    emit_move_insn (itarg, op1);
6565
6566	  return original_target;
6567	}
6568
6569      /* ... fall through ...  */
6570
6571    case STRING_CST:
6572      temp = output_constant_def (exp, 1);
6573
6574      /* temp contains a constant address.
6575	 On RISC machines where a constant address isn't valid,
6576	 make some insns to get that address into a register.  */
6577      if (modifier != EXPAND_CONST_ADDRESS
6578	  && modifier != EXPAND_INITIALIZER
6579	  && modifier != EXPAND_SUM
6580	  && (! memory_address_p (mode, XEXP (temp, 0))
6581	      || flag_force_addr))
6582	return replace_equiv_address (temp,
6583				      copy_rtx (XEXP (temp, 0)));
6584      return temp;
6585
6586    case EXPR_WITH_FILE_LOCATION:
6587      {
6588	rtx to_return;
6589	struct file_stack fs;
6590
6591	fs.location = input_location;
6592	fs.next = expr_wfl_stack;
6593	input_filename = EXPR_WFL_FILENAME (exp);
6594	input_line = EXPR_WFL_LINENO (exp);
6595	expr_wfl_stack = &fs;
6596	if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6597	  emit_line_note (input_location);
6598	/* Possibly avoid switching back and forth here.  */
6599	to_return = expand_expr (EXPR_WFL_NODE (exp),
6600				 (ignore ? const0_rtx : target),
6601				 tmode, modifier);
6602	if (expr_wfl_stack != &fs)
6603	  abort ();
6604	input_location = fs.location;
6605	expr_wfl_stack = fs.next;
6606	return to_return;
6607      }
6608
6609    case SAVE_EXPR:
6610      context = decl_function_context (exp);
6611
6612      /* If this SAVE_EXPR was at global context, assume we are an
6613	 initialization function and move it into our context.  */
6614      if (context == 0)
6615	SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6616
6617      /* We treat inline_function_decl as an alias for the current function
6618	 because that is the inline function whose vars, types, etc.
6619	 are being merged into the current function.
6620	 See expand_inline_function.  */
6621      if (context == current_function_decl || context == inline_function_decl)
6622	context = 0;
6623
6624      /* If this is non-local, handle it.  */
6625      if (context)
6626	{
6627	  /* The following call just exists to abort if the context is
6628	     not of a containing function.  */
6629	  find_function_data (context);
6630
6631	  temp = SAVE_EXPR_RTL (exp);
6632	  if (temp && GET_CODE (temp) == REG)
6633	    {
6634	      put_var_into_stack (exp, /*rescan=*/true);
6635	      temp = SAVE_EXPR_RTL (exp);
6636	    }
6637	  if (temp == 0 || GET_CODE (temp) != MEM)
6638	    abort ();
6639	  return
6640	    replace_equiv_address (temp,
6641				   fix_lexical_addr (XEXP (temp, 0), exp));
6642	}
6643      if (SAVE_EXPR_RTL (exp) == 0)
6644	{
6645	  if (mode == VOIDmode)
6646	    temp = const0_rtx;
6647	  else
6648	    temp = assign_temp (build_qualified_type (type,
6649						      (TYPE_QUALS (type)
6650						       | TYPE_QUAL_CONST)),
6651				3, 0, 0);
6652
6653	  SAVE_EXPR_RTL (exp) = temp;
6654	  if (!optimize && GET_CODE (temp) == REG)
6655	    save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6656						save_expr_regs);
6657
6658	  /* If the mode of TEMP does not match that of the expression, it
6659	     must be a promoted value.  We pass store_expr a SUBREG of the
6660	     wanted mode but mark it so that we know that it was already
6661	     extended.  */
6662
6663	  if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6664	    {
6665	      temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6666	      promote_mode (type, mode, &unsignedp, 0);
6667	      SUBREG_PROMOTED_VAR_P (temp) = 1;
6668	      SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6669	    }
6670
6671	  if (temp == const0_rtx)
6672	    expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6673	  else
6674	    store_expr (TREE_OPERAND (exp, 0), temp,
6675			modifier == EXPAND_STACK_PARM ? 2 : 0);
6676
6677	  TREE_USED (exp) = 1;
6678	}
6679
6680      /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6681	 must be a promoted value.  We return a SUBREG of the wanted mode,
6682	 but mark it so that we know that it was already extended.  */
6683
6684      if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6685	  && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6686	{
6687	  /* Compute the signedness and make the proper SUBREG.  */
6688	  promote_mode (type, mode, &unsignedp, 0);
6689	  temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6690	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6691	  SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6692	  return temp;
6693	}
6694
6695      return SAVE_EXPR_RTL (exp);
6696
6697    case UNSAVE_EXPR:
6698      {
6699	rtx temp;
6700	temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6701	TREE_OPERAND (exp, 0)
6702	  = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6703	return temp;
6704      }
6705
6706    case PLACEHOLDER_EXPR:
6707      {
6708	tree old_list = placeholder_list;
6709	tree placeholder_expr = 0;
6710
6711	exp = find_placeholder (exp, &placeholder_expr);
6712	if (exp == 0)
6713	  abort ();
6714
6715	placeholder_list = TREE_CHAIN (placeholder_expr);
6716	temp = expand_expr (exp, original_target, tmode, modifier);
6717	placeholder_list = old_list;
6718	return temp;
6719      }
6720
6721    case WITH_RECORD_EXPR:
6722      /* Put the object on the placeholder list, expand our first operand,
6723	 and pop the list.  */
6724      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6725				    placeholder_list);
6726      target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6727			    modifier);
6728      placeholder_list = TREE_CHAIN (placeholder_list);
6729      return target;
6730
6731    case GOTO_EXPR:
6732      if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6733	expand_goto (TREE_OPERAND (exp, 0));
6734      else
6735	expand_computed_goto (TREE_OPERAND (exp, 0));
6736      return const0_rtx;
6737
6738    case EXIT_EXPR:
6739      expand_exit_loop_if_false (NULL,
6740				 invert_truthvalue (TREE_OPERAND (exp, 0)));
6741      return const0_rtx;
6742
6743    case LABELED_BLOCK_EXPR:
6744      if (LABELED_BLOCK_BODY (exp))
6745	expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6746      /* Should perhaps use expand_label, but this is simpler and safer.  */
6747      do_pending_stack_adjust ();
6748      emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6749      return const0_rtx;
6750
6751    case EXIT_BLOCK_EXPR:
6752      if (EXIT_BLOCK_RETURN (exp))
6753	sorry ("returned value in block_exit_expr");
6754      expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6755      return const0_rtx;
6756
6757    case LOOP_EXPR:
6758      push_temp_slots ();
6759      expand_start_loop (1);
6760      expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6761      expand_end_loop ();
6762      pop_temp_slots ();
6763
6764      return const0_rtx;
6765
6766    case BIND_EXPR:
6767      {
6768	tree vars = TREE_OPERAND (exp, 0);
6769
6770	/* Need to open a binding contour here because
6771	   if there are any cleanups they must be contained here.  */
6772	expand_start_bindings (2);
6773
6774	/* Mark the corresponding BLOCK for output in its proper place.  */
6775	if (TREE_OPERAND (exp, 2) != 0
6776	    && ! TREE_USED (TREE_OPERAND (exp, 2)))
6777	  (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6778
6779	/* If VARS have not yet been expanded, expand them now.  */
6780	while (vars)
6781	  {
6782	    if (!DECL_RTL_SET_P (vars))
6783	      expand_decl (vars);
6784	    expand_decl_init (vars);
6785	    vars = TREE_CHAIN (vars);
6786	  }
6787
6788	temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6789
6790	expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6791
6792	return temp;
6793      }
6794
6795    case RTL_EXPR:
6796      if (RTL_EXPR_SEQUENCE (exp))
6797	{
6798	  if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6799	    abort ();
6800	  emit_insn (RTL_EXPR_SEQUENCE (exp));
6801	  RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6802	}
6803      preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6804      free_temps_for_rtl_expr (exp);
6805      if (alt_rtl)
6806	*alt_rtl = RTL_EXPR_ALT_RTL (exp);
6807      return RTL_EXPR_RTL (exp);
6808
6809    case CONSTRUCTOR:
6810      /* If we don't need the result, just ensure we evaluate any
6811	 subexpressions.  */
6812      if (ignore)
6813	{
6814	  tree elt;
6815
6816	  for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6817	    expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6818
6819	  return const0_rtx;
6820	}
6821
6822      /* All elts simple constants => refer to a constant in memory.  But
6823	 if this is a non-BLKmode mode, let it store a field at a time
6824	 since that should make a CONST_INT or CONST_DOUBLE when we
6825	 fold.  Likewise, if we have a target we can use, it is best to
6826	 store directly into the target unless the type is large enough
6827	 that memcpy will be used.  If we are making an initializer and
6828	 all operands are constant, put it in memory as well.
6829
6830	FIXME: Avoid trying to fill vector constructors piece-meal.
6831	Output them with output_constant_def below unless we're sure
6832	they're zeros.  This should go away when vector initializers
6833	are treated like VECTOR_CST instead of arrays.
6834      */
6835      else if ((TREE_STATIC (exp)
6836		&& ((mode == BLKmode
6837		     && ! (target != 0 && safe_from_p (target, exp, 1)))
6838		    || TREE_ADDRESSABLE (exp)
6839		    || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6840			&& (! MOVE_BY_PIECES_P
6841			    (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6842			     TYPE_ALIGN (type)))
6843			&& ((TREE_CODE (type) == VECTOR_TYPE
6844			     && !is_zeros_p (exp))
6845			    || ! mostly_zeros_p (exp)))))
6846	       || ((modifier == EXPAND_INITIALIZER
6847		    || modifier == EXPAND_CONST_ADDRESS)
6848		   && TREE_CONSTANT (exp)))
6849	{
6850	  rtx constructor = output_constant_def (exp, 1);
6851
6852	  if (modifier != EXPAND_CONST_ADDRESS
6853	      && modifier != EXPAND_INITIALIZER
6854	      && modifier != EXPAND_SUM)
6855	    constructor = validize_mem (constructor);
6856
6857	  return constructor;
6858	}
6859      else
6860	{
6861	  /* Handle calls that pass values in multiple non-contiguous
6862	     locations.  The Irix 6 ABI has examples of this.  */
6863	  if (target == 0 || ! safe_from_p (target, exp, 1)
6864	      || GET_CODE (target) == PARALLEL
6865	      || modifier == EXPAND_STACK_PARM)
6866	    target
6867	      = assign_temp (build_qualified_type (type,
6868						   (TYPE_QUALS (type)
6869						    | (TREE_READONLY (exp)
6870						       * TYPE_QUAL_CONST))),
6871			     0, TREE_ADDRESSABLE (exp), 1);
6872
6873	  store_constructor (exp, target, 0, int_expr_size (exp));
6874	  return target;
6875	}
6876
6877    case INDIRECT_REF:
6878      {
6879	tree exp1 = TREE_OPERAND (exp, 0);
6880	tree index;
6881	tree string = string_constant (exp1, &index);
6882
6883	/* Try to optimize reads from const strings.  */
6884	if (string
6885	    && TREE_CODE (string) == STRING_CST
6886	    && TREE_CODE (index) == INTEGER_CST
6887	    && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6888	    && GET_MODE_CLASS (mode) == MODE_INT
6889	    && GET_MODE_SIZE (mode) == 1
6890	    && modifier != EXPAND_WRITE)
6891	  return gen_int_mode (TREE_STRING_POINTER (string)
6892			       [TREE_INT_CST_LOW (index)], mode);
6893
6894	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6895	op0 = memory_address (mode, op0);
6896	temp = gen_rtx_MEM (mode, op0);
6897	set_mem_attributes (temp, exp, 0);
6898
6899	/* If we are writing to this object and its type is a record with
6900	   readonly fields, we must mark it as readonly so it will
6901	   conflict with readonly references to those fields.  */
6902	if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6903	  RTX_UNCHANGING_P (temp) = 1;
6904
6905	return temp;
6906      }
6907
6908    case ARRAY_REF:
6909      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6910	abort ();
6911
6912      {
6913	tree array = TREE_OPERAND (exp, 0);
6914	tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6915	tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6916	tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6917	HOST_WIDE_INT i;
6918
6919	/* Optimize the special-case of a zero lower bound.
6920
6921	   We convert the low_bound to sizetype to avoid some problems
6922	   with constant folding.  (E.g. suppose the lower bound is 1,
6923	   and its mode is QI.  Without the conversion,  (ARRAY
6924	   +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6925	   +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
6926
6927	if (! integer_zerop (low_bound))
6928	  index = size_diffop (index, convert (sizetype, low_bound));
6929
6930	/* Fold an expression like: "foo"[2].
6931	   This is not done in fold so it won't happen inside &.
6932	   Don't fold if this is for wide characters since it's too
6933	   difficult to do correctly and this is a very rare case.  */
6934
6935	if (modifier != EXPAND_CONST_ADDRESS
6936	    && modifier != EXPAND_INITIALIZER
6937	    && modifier != EXPAND_MEMORY
6938	    && TREE_CODE (array) == STRING_CST
6939	    && TREE_CODE (index) == INTEGER_CST
6940	    && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6941	    && GET_MODE_CLASS (mode) == MODE_INT
6942	    && GET_MODE_SIZE (mode) == 1)
6943	  return gen_int_mode (TREE_STRING_POINTER (array)
6944			       [TREE_INT_CST_LOW (index)], mode);
6945
6946	/* If this is a constant index into a constant array,
6947	   just get the value from the array.  Handle both the cases when
6948	   we have an explicit constructor and when our operand is a variable
6949	   that was declared const.  */
6950
6951	if (modifier != EXPAND_CONST_ADDRESS
6952	    && modifier != EXPAND_INITIALIZER
6953	    && modifier != EXPAND_MEMORY
6954	    && TREE_CODE (array) == CONSTRUCTOR
6955	    && ! TREE_SIDE_EFFECTS (array)
6956	    && TREE_CODE (index) == INTEGER_CST
6957	    && 0 > compare_tree_int (index,
6958				     list_length (CONSTRUCTOR_ELTS
6959						  (TREE_OPERAND (exp, 0)))))
6960	  {
6961	    tree elem;
6962
6963	    for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6964		 i = TREE_INT_CST_LOW (index);
6965		 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6966	      ;
6967
6968	    if (elem)
6969	      return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6970				  modifier);
6971	  }
6972
6973	else if (optimize >= 1
6974		 && modifier != EXPAND_CONST_ADDRESS
6975		 && modifier != EXPAND_INITIALIZER
6976		 && modifier != EXPAND_MEMORY
6977		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6978		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6979		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6980		 && targetm.binds_local_p (array))
6981	  {
6982	    if (TREE_CODE (index) == INTEGER_CST)
6983	      {
6984		tree init = DECL_INITIAL (array);
6985
6986		if (TREE_CODE (init) == CONSTRUCTOR)
6987		  {
6988		    tree elem;
6989
6990		    for (elem = CONSTRUCTOR_ELTS (init);
6991			 (elem
6992			  && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6993			 elem = TREE_CHAIN (elem))
6994		      ;
6995
6996		    if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6997		      return expand_expr (fold (TREE_VALUE (elem)), target,
6998					  tmode, modifier);
6999		  }
7000		else if (TREE_CODE (init) == STRING_CST
7001			 && 0 > compare_tree_int (index,
7002						  TREE_STRING_LENGTH (init)))
7003		  {
7004		    tree type = TREE_TYPE (TREE_TYPE (init));
7005		    enum machine_mode mode = TYPE_MODE (type);
7006
7007		    if (GET_MODE_CLASS (mode) == MODE_INT
7008			&& GET_MODE_SIZE (mode) == 1)
7009		      return gen_int_mode (TREE_STRING_POINTER (init)
7010					   [TREE_INT_CST_LOW (index)], mode);
7011		  }
7012	      }
7013	  }
7014      }
7015      goto normal_inner_ref;
7016
7017    case COMPONENT_REF:
7018      /* If the operand is a CONSTRUCTOR, we can just extract the
7019	 appropriate field if it is present.  */
7020      if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7021	{
7022	  tree elt;
7023
7024	  for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7025	       elt = TREE_CHAIN (elt))
7026	    if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7027		/* We can normally use the value of the field in the
7028		   CONSTRUCTOR.  However, if this is a bitfield in
7029		   an integral mode that we can fit in a HOST_WIDE_INT,
7030		   we must mask only the number of bits in the bitfield,
7031		   since this is done implicitly by the constructor.  If
7032		   the bitfield does not meet either of those conditions,
7033		   we can't do this optimization.  */
7034		&& (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7035		    || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7036			 == MODE_INT)
7037			&& (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7038			    <= HOST_BITS_PER_WIDE_INT))))
7039	      {
7040		if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7041		    && modifier == EXPAND_STACK_PARM)
7042		  target = 0;
7043		op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7044		if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7045		  {
7046		    HOST_WIDE_INT bitsize
7047		      = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7048		    enum machine_mode imode
7049		      = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7050
7051		    if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7052		      {
7053			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7054			op0 = expand_and (imode, op0, op1, target);
7055		      }
7056		    else
7057		      {
7058			tree count
7059			  = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7060					 0);
7061
7062			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7063					    target, 0);
7064			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7065					    target, 0);
7066		      }
7067		  }
7068
7069		return op0;
7070	      }
7071	}
7072      goto normal_inner_ref;
7073
7074    case BIT_FIELD_REF:
7075    case ARRAY_RANGE_REF:
7076    normal_inner_ref:
7077      {
7078	enum machine_mode mode1;
7079	HOST_WIDE_INT bitsize, bitpos;
7080	tree offset;
7081	int volatilep = 0;
7082	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7083					&mode1, &unsignedp, &volatilep);
7084	rtx orig_op0;
7085
7086	/* If we got back the original object, something is wrong.  Perhaps
7087	   we are evaluating an expression too early.  In any event, don't
7088	   infinitely recurse.  */
7089	if (tem == exp)
7090	  abort ();
7091
7092	/* If TEM's type is a union of variable size, pass TARGET to the inner
7093	   computation, since it will need a temporary and TARGET is known
7094	   to have to do.  This occurs in unchecked conversion in Ada.  */
7095
7096	orig_op0 = op0
7097	  = expand_expr (tem,
7098			 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7099			  && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7100			      != INTEGER_CST)
7101			  && modifier != EXPAND_STACK_PARM
7102			  ? target : NULL_RTX),
7103			 VOIDmode,
7104			 (modifier == EXPAND_INITIALIZER
7105			  || modifier == EXPAND_CONST_ADDRESS
7106			  || modifier == EXPAND_STACK_PARM)
7107			 ? modifier : EXPAND_NORMAL);
7108
7109	/* If this is a constant, put it into a register if it is a
7110	   legitimate constant and OFFSET is 0 and memory if it isn't.  */
7111	if (CONSTANT_P (op0))
7112	  {
7113	    enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7114	    if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7115		&& offset == 0)
7116	      op0 = force_reg (mode, op0);
7117	    else
7118	      op0 = validize_mem (force_const_mem (mode, op0));
7119	  }
7120
7121 	/* Otherwise, if this object not in memory and we either have an
7122 	   offset or a BLKmode result, put it there.  This case can't occur in
7123 	   C, but can in Ada if we have unchecked conversion of an expression
7124 	   from a scalar type to an array or record type or for an
7125 	   ARRAY_RANGE_REF whose type is BLKmode.  */
7126	else if (GET_CODE (op0) != MEM
7127		 && (offset != 0
7128		     || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7129	  {
7130	    /* If the operand is a SAVE_EXPR, we can deal with this by
7131	       forcing the SAVE_EXPR into memory.  */
7132	    if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7133	      {
7134		put_var_into_stack (TREE_OPERAND (exp, 0),
7135				    /*rescan=*/true);
7136		op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7137	      }
7138	    else
7139	      {
7140		tree nt
7141		  = build_qualified_type (TREE_TYPE (tem),
7142					  (TYPE_QUALS (TREE_TYPE (tem))
7143					   | TYPE_QUAL_CONST));
7144		rtx memloc = assign_temp (nt, 1, 1, 1);
7145
7146		emit_move_insn (memloc, op0);
7147		op0 = memloc;
7148	      }
7149	  }
7150
7151	if (offset != 0)
7152	  {
7153	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7154					  EXPAND_SUM);
7155
7156	    if (GET_CODE (op0) != MEM)
7157	      abort ();
7158
7159#ifdef POINTERS_EXTEND_UNSIGNED
7160	    if (GET_MODE (offset_rtx) != Pmode)
7161	      offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7162#else
7163	    if (GET_MODE (offset_rtx) != ptr_mode)
7164	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7165#endif
7166
7167	    if (GET_MODE (op0) == BLKmode
7168		/* A constant address in OP0 can have VOIDmode, we must
7169		   not try to call force_reg in that case.  */
7170		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
7171		&& bitsize != 0
7172		&& (bitpos % bitsize) == 0
7173		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7174		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7175	      {
7176		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7177		bitpos = 0;
7178	      }
7179
7180	    op0 = offset_address (op0, offset_rtx,
7181				  highest_pow2_factor (offset));
7182	  }
7183
7184	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7185	   record its alignment as BIGGEST_ALIGNMENT.  */
7186	if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7187	    && is_aligning_offset (offset, tem))
7188	  set_mem_align (op0, BIGGEST_ALIGNMENT);
7189
7190	/* Don't forget about volatility even if this is a bitfield.  */
7191	if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7192	  {
7193	    if (op0 == orig_op0)
7194	      op0 = copy_rtx (op0);
7195
7196	    MEM_VOLATILE_P (op0) = 1;
7197	  }
7198
7199	/* The following code doesn't handle CONCAT.
7200	   Assume only bitpos == 0 can be used for CONCAT, due to
7201	   one element arrays having the same mode as its element.  */
7202	if (GET_CODE (op0) == CONCAT)
7203	  {
7204	    if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7205	      abort ();
7206	    return op0;
7207	  }
7208
7209	/* In cases where an aligned union has an unaligned object
7210	   as a field, we might be extracting a BLKmode value from
7211	   an integer-mode (e.g., SImode) object.  Handle this case
7212	   by doing the extract into an object as wide as the field
7213	   (which we know to be the width of a basic mode), then
7214	   storing into memory, and changing the mode to BLKmode.  */
7215	if (mode1 == VOIDmode
7216	    || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7217	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
7218		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7219		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7220		&& modifier != EXPAND_CONST_ADDRESS
7221		&& modifier != EXPAND_INITIALIZER)
7222	    /* If the field isn't aligned enough to fetch as a memref,
7223	       fetch it as a bit field.  */
7224	    || (mode1 != BLKmode
7225		&& (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7226		      || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7227		      || (GET_CODE (op0) == MEM
7228			  && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7229			      || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7230		     && ((modifier == EXPAND_CONST_ADDRESS
7231			  || modifier == EXPAND_INITIALIZER)
7232			 ? STRICT_ALIGNMENT
7233			 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7234		    || (bitpos % BITS_PER_UNIT != 0)))
7235	    /* If the type and the field are a constant size and the
7236	       size of the type isn't the same size as the bitfield,
7237	       we must use bitfield operations.  */
7238	    || (bitsize >= 0
7239		&& (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7240		    == INTEGER_CST)
7241		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7242					  bitsize)))
7243	  {
7244	    enum machine_mode ext_mode = mode;
7245
7246	    if (ext_mode == BLKmode
7247		&& ! (target != 0 && GET_CODE (op0) == MEM
7248		      && GET_CODE (target) == MEM
7249		      && bitpos % BITS_PER_UNIT == 0))
7250	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7251
7252	    if (ext_mode == BLKmode)
7253	      {
7254		if (target == 0)
7255		  target = assign_temp (type, 0, 1, 1);
7256
7257		if (bitsize == 0)
7258		  return target;
7259
7260		/* In this case, BITPOS must start at a byte boundary and
7261		   TARGET, if specified, must be a MEM.  */
7262		if (GET_CODE (op0) != MEM
7263		    || (target != 0 && GET_CODE (target) != MEM)
7264		    || bitpos % BITS_PER_UNIT != 0)
7265		  abort ();
7266
7267		emit_block_move (target,
7268				 adjust_address (op0, VOIDmode,
7269						 bitpos / BITS_PER_UNIT),
7270				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7271					  / BITS_PER_UNIT),
7272				 (modifier == EXPAND_STACK_PARM
7273				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7274
7275		return target;
7276	      }
7277
7278	    op0 = validize_mem (op0);
7279
7280	    if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7281	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7282
7283	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7284				     (modifier == EXPAND_STACK_PARM
7285				      ? NULL_RTX : target),
7286				     ext_mode, ext_mode,
7287				     int_size_in_bytes (TREE_TYPE (tem)));
7288
7289	    /* If the result is a record type and BITSIZE is narrower than
7290	       the mode of OP0, an integral mode, and this is a big endian
7291	       machine, we must put the field into the high-order bits.  */
7292	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7293		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7294		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7295	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7296				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7297					    - bitsize),
7298				  op0, 1);
7299
7300	    if (mode == BLKmode)
7301	      {
7302		rtx new = assign_temp (build_qualified_type
7303				       ((*lang_hooks.types.type_for_mode)
7304					(ext_mode, 0),
7305					TYPE_QUAL_CONST), 0, 1, 1);
7306
7307		emit_move_insn (new, op0);
7308		op0 = copy_rtx (new);
7309		PUT_MODE (op0, BLKmode);
7310		set_mem_attributes (op0, exp, 1);
7311	      }
7312
7313	    return op0;
7314	  }
7315
7316	/* If the result is BLKmode, use that to access the object
7317	   now as well.  */
7318	if (mode == BLKmode)
7319	  mode1 = BLKmode;
7320
7321	/* Get a reference to just this component.  */
7322	if (modifier == EXPAND_CONST_ADDRESS
7323	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7324	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7325	else
7326	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7327
7328	if (op0 == orig_op0)
7329	  op0 = copy_rtx (op0);
7330
7331	set_mem_attributes (op0, exp, 0);
7332	if (GET_CODE (XEXP (op0, 0)) == REG)
7333	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7334
7335	MEM_VOLATILE_P (op0) |= volatilep;
7336	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7337	    || modifier == EXPAND_CONST_ADDRESS
7338	    || modifier == EXPAND_INITIALIZER)
7339	  return op0;
7340	else if (target == 0)
7341	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7342
7343	convert_move (target, op0, unsignedp);
7344	return target;
7345      }
7346
7347    case VTABLE_REF:
7348      {
7349	rtx insn, before = get_last_insn (), vtbl_ref;
7350
7351	/* Evaluate the interior expression.  */
7352	subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7353				 tmode, modifier);
7354
7355	/* Get or create an instruction off which to hang a note.  */
7356	if (REG_P (subtarget))
7357	  {
7358	    target = subtarget;
7359	    insn = get_last_insn ();
7360	    if (insn == before)
7361	      abort ();
7362	    if (! INSN_P (insn))
7363	      insn = prev_nonnote_insn (insn);
7364	  }
7365	else
7366	  {
7367	    target = gen_reg_rtx (GET_MODE (subtarget));
7368	    insn = emit_move_insn (target, subtarget);
7369	  }
7370
7371	/* Collect the data for the note.  */
7372	vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7373	vtbl_ref = plus_constant (vtbl_ref,
7374				  tree_low_cst (TREE_OPERAND (exp, 2), 0));
7375	/* Discard the initial CONST that was added.  */
7376	vtbl_ref = XEXP (vtbl_ref, 0);
7377
7378	REG_NOTES (insn)
7379	  = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7380
7381	return target;
7382      }
7383
7384      /* Intended for a reference to a buffer of a file-object in Pascal.
7385	 But it's not certain that a special tree code will really be
7386	 necessary for these.  INDIRECT_REF might work for them.  */
7387    case BUFFER_REF:
7388      abort ();
7389
7390    case IN_EXPR:
7391      {
7392	/* Pascal set IN expression.
7393
7394	   Algorithm:
7395	       rlo       = set_low - (set_low%bits_per_word);
7396	       the_word  = set [ (index - rlo)/bits_per_word ];
7397	       bit_index = index % bits_per_word;
7398	       bitmask   = 1 << bit_index;
7399	       return !!(the_word & bitmask);  */
7400
7401	tree set = TREE_OPERAND (exp, 0);
7402	tree index = TREE_OPERAND (exp, 1);
7403	int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7404	tree set_type = TREE_TYPE (set);
7405	tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7406	tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7407	rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7408	rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7409	rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7410	rtx setval = expand_expr (set, 0, VOIDmode, 0);
7411	rtx setaddr = XEXP (setval, 0);
7412	enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7413	rtx rlow;
7414	rtx diff, quo, rem, addr, bit, result;
7415
7416	/* If domain is empty, answer is no.  Likewise if index is constant
7417	   and out of bounds.  */
7418	if (((TREE_CODE (set_high_bound) == INTEGER_CST
7419	     && TREE_CODE (set_low_bound) == INTEGER_CST
7420	     && tree_int_cst_lt (set_high_bound, set_low_bound))
7421	     || (TREE_CODE (index) == INTEGER_CST
7422		 && TREE_CODE (set_low_bound) == INTEGER_CST
7423		 && tree_int_cst_lt (index, set_low_bound))
7424	     || (TREE_CODE (set_high_bound) == INTEGER_CST
7425		 && TREE_CODE (index) == INTEGER_CST
7426		 && tree_int_cst_lt (set_high_bound, index))))
7427	  return const0_rtx;
7428
7429	if (target == 0)
7430	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7431
7432	/* If we get here, we have to generate the code for both cases
7433	   (in range and out of range).  */
7434
7435	op0 = gen_label_rtx ();
7436	op1 = gen_label_rtx ();
7437
7438	if (! (GET_CODE (index_val) == CONST_INT
7439	       && GET_CODE (lo_r) == CONST_INT))
7440	  emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7441				   GET_MODE (index_val), iunsignedp, op1);
7442
7443	if (! (GET_CODE (index_val) == CONST_INT
7444	       && GET_CODE (hi_r) == CONST_INT))
7445	  emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7446				   GET_MODE (index_val), iunsignedp, op1);
7447
7448	/* Calculate the element number of bit zero in the first word
7449	   of the set.  */
7450	if (GET_CODE (lo_r) == CONST_INT)
7451	  rlow = GEN_INT (INTVAL (lo_r)
7452			  & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7453	else
7454	  rlow = expand_binop (index_mode, and_optab, lo_r,
7455			       GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7456			       NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7457
7458	diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7459			     NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7460
7461	quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7462			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7463	rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7464			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7465
7466	addr = memory_address (byte_mode,
7467			       expand_binop (index_mode, add_optab, diff,
7468					     setaddr, NULL_RTX, iunsignedp,
7469					     OPTAB_LIB_WIDEN));
7470
7471	/* Extract the bit we want to examine.  */
7472	bit = expand_shift (RSHIFT_EXPR, byte_mode,
7473			    gen_rtx_MEM (byte_mode, addr),
7474			    make_tree (TREE_TYPE (index), rem),
7475			    NULL_RTX, 1);
7476	result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7477			       GET_MODE (target) == byte_mode ? target : 0,
7478			       1, OPTAB_LIB_WIDEN);
7479
7480	if (result != target)
7481	  convert_move (target, result, 1);
7482
7483	/* Output the code to handle the out-of-range case.  */
7484	emit_jump (op0);
7485	emit_label (op1);
7486	emit_move_insn (target, const0_rtx);
7487	emit_label (op0);
7488	return target;
7489      }
7490
7491    case WITH_CLEANUP_EXPR:
7492      if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7493	{
7494	  WITH_CLEANUP_EXPR_RTL (exp)
7495	    = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7496	  expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7497				  CLEANUP_EH_ONLY (exp));
7498
7499	  /* That's it for this cleanup.  */
7500	  TREE_OPERAND (exp, 1) = 0;
7501	}
7502      return WITH_CLEANUP_EXPR_RTL (exp);
7503
7504    case CLEANUP_POINT_EXPR:
7505      {
7506	/* Start a new binding layer that will keep track of all cleanup
7507	   actions to be performed.  */
7508	expand_start_bindings (2);
7509
7510	target_temp_slot_level = temp_slot_level;
7511
7512	op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7513	/* If we're going to use this value, load it up now.  */
7514	if (! ignore)
7515	  op0 = force_not_mem (op0);
7516	preserve_temp_slots (op0);
7517	expand_end_bindings (NULL_TREE, 0, 0);
7518      }
7519      return op0;
7520
7521    case CALL_EXPR:
7522      /* Check for a built-in function.  */
7523      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7524	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7525	      == FUNCTION_DECL)
7526	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7527	{
7528	  if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7529	      == BUILT_IN_FRONTEND)
7530	    return (*lang_hooks.expand_expr) (exp, original_target,
7531					      tmode, modifier,
7532					      alt_rtl);
7533	  else
7534	    return expand_builtin (exp, target, subtarget, tmode, ignore);
7535	}
7536
7537      return expand_call (exp, target, ignore);
7538
7539    case NON_LVALUE_EXPR:
7540    case NOP_EXPR:
7541    case CONVERT_EXPR:
7542    case REFERENCE_EXPR:
7543      if (TREE_OPERAND (exp, 0) == error_mark_node)
7544	return const0_rtx;
7545
7546      if (TREE_CODE (type) == UNION_TYPE)
7547	{
7548	  tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7549
7550	  /* If both input and output are BLKmode, this conversion isn't doing
7551	     anything except possibly changing memory attribute.  */
7552	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7553	    {
7554	      rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7555					modifier);
7556
7557	      result = copy_rtx (result);
7558	      set_mem_attributes (result, exp, 0);
7559	      return result;
7560	    }
7561
7562	  if (target == 0)
7563	    {
7564	      if (TYPE_MODE (type) != BLKmode)
7565		target = gen_reg_rtx (TYPE_MODE (type));
7566	      else
7567		target = assign_temp (type, 0, 1, 1);
7568	    }
7569
7570	  if (GET_CODE (target) == MEM)
7571	    /* Store data into beginning of memory target.  */
7572	    store_expr (TREE_OPERAND (exp, 0),
7573			adjust_address (target, TYPE_MODE (valtype), 0),
7574			modifier == EXPAND_STACK_PARM ? 2 : 0);
7575
7576	  else if (GET_CODE (target) == REG)
7577	    /* Store this field into a union of the proper type.  */
7578	    store_field (target,
7579			 MIN ((int_size_in_bytes (TREE_TYPE
7580						  (TREE_OPERAND (exp, 0)))
7581			       * BITS_PER_UNIT),
7582			      (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7583			 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7584			 VOIDmode, 0, type, 0);
7585	  else
7586	    abort ();
7587
7588	  /* Return the entire union.  */
7589	  return target;
7590	}
7591
7592      if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7593	{
7594	  op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7595			     modifier);
7596
7597	  /* If the signedness of the conversion differs and OP0 is
7598	     a promoted SUBREG, clear that indication since we now
7599	     have to do the proper extension.  */
7600	  if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7601	      && GET_CODE (op0) == SUBREG)
7602	    SUBREG_PROMOTED_VAR_P (op0) = 0;
7603
7604	  return op0;
7605	}
7606
7607      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7608      if (GET_MODE (op0) == mode)
7609	return op0;
7610
7611      /* If OP0 is a constant, just convert it into the proper mode.  */
7612      if (CONSTANT_P (op0))
7613	{
7614	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7615	  enum machine_mode inner_mode = TYPE_MODE (inner_type);
7616
7617	  if (modifier == EXPAND_INITIALIZER)
7618	    return simplify_gen_subreg (mode, op0, inner_mode,
7619					subreg_lowpart_offset (mode,
7620							       inner_mode));
7621	  else
7622	    return convert_modes (mode, inner_mode, op0,
7623				  TREE_UNSIGNED (inner_type));
7624	}
7625
7626      if (modifier == EXPAND_INITIALIZER)
7627	return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7628
7629      if (target == 0)
7630	return
7631	  convert_to_mode (mode, op0,
7632			   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7633      else
7634	convert_move (target, op0,
7635		      TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7636      return target;
7637
7638    case VIEW_CONVERT_EXPR:
7639      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7640
7641      /* If the input and output modes are both the same, we are done.
7642	 Otherwise, if neither mode is BLKmode and both are integral and within
7643	 a word, we can use gen_lowpart.  If neither is true, make sure the
7644	 operand is in memory and convert the MEM to the new mode.  */
7645      if (TYPE_MODE (type) == GET_MODE (op0))
7646	;
7647      else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7648	       && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7649	       && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7650	       && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7651	       && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7652	op0 = gen_lowpart (TYPE_MODE (type), op0);
7653      else if (GET_CODE (op0) != MEM)
7654	{
7655	  /* If the operand is not a MEM, force it into memory.  Since we
7656	     are going to be be changing the mode of the MEM, don't call
7657	     force_const_mem for constants because we don't allow pool
7658	     constants to change mode.  */
7659	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7660
7661	  if (TREE_ADDRESSABLE (exp))
7662	    abort ();
7663
7664	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7665	    target
7666	      = assign_stack_temp_for_type
7667		(TYPE_MODE (inner_type),
7668		 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7669
7670	  emit_move_insn (target, op0);
7671	  op0 = target;
7672	}
7673
7674      /* At this point, OP0 is in the correct mode.  If the output type is such
7675	 that the operand is known to be aligned, indicate that it is.
7676	 Otherwise, we need only be concerned about alignment for non-BLKmode
7677	 results.  */
7678      if (GET_CODE (op0) == MEM)
7679	{
7680	  op0 = copy_rtx (op0);
7681
7682	  if (TYPE_ALIGN_OK (type))
7683	    set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7684	  else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7685		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7686	    {
7687	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7688	      HOST_WIDE_INT temp_size
7689		= MAX (int_size_in_bytes (inner_type),
7690		       (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7691	      rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7692						    temp_size, 0, type);
7693	      rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7694
7695	      if (TREE_ADDRESSABLE (exp))
7696		abort ();
7697
7698	      if (GET_MODE (op0) == BLKmode)
7699		emit_block_move (new_with_op0_mode, op0,
7700				 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7701				 (modifier == EXPAND_STACK_PARM
7702				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7703	      else
7704		emit_move_insn (new_with_op0_mode, op0);
7705
7706	      op0 = new;
7707	    }
7708
7709	  op0 = adjust_address (op0, TYPE_MODE (type), 0);
7710	}
7711
7712      return op0;
7713
7714    case PLUS_EXPR:
7715      this_optab = ! unsignedp && flag_trapv
7716                   && (GET_MODE_CLASS (mode) == MODE_INT)
7717                   ? addv_optab : add_optab;
7718
7719      /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7720	 something else, make sure we add the register to the constant and
7721	 then to the other thing.  This case can occur during strength
7722	 reduction and doing it this way will produce better code if the
7723	 frame pointer or argument pointer is eliminated.
7724
7725	 fold-const.c will ensure that the constant is always in the inner
7726	 PLUS_EXPR, so the only case we need to do anything about is if
7727	 sp, ap, or fp is our second argument, in which case we must swap
7728	 the innermost first argument and our second argument.  */
7729
7730      if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7731	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7732	  && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7733	  && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7734	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7735	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7736	{
7737	  tree t = TREE_OPERAND (exp, 1);
7738
7739	  TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7740	  TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7741	}
7742
7743      /* If the result is to be ptr_mode and we are adding an integer to
7744	 something, we might be forming a constant.  So try to use
7745	 plus_constant.  If it produces a sum and we can't accept it,
7746	 use force_operand.  This allows P = &ARR[const] to generate
7747	 efficient code on machines where a SYMBOL_REF is not a valid
7748	 address.
7749
7750	 If this is an EXPAND_SUM call, always return the sum.  */
7751      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7752	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7753	{
7754	  if (modifier == EXPAND_STACK_PARM)
7755	    target = 0;
7756	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7757	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7758	      && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7759	    {
7760	      rtx constant_part;
7761
7762	      op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7763				 EXPAND_SUM);
7764	      /* Use immed_double_const to ensure that the constant is
7765		 truncated according to the mode of OP1, then sign extended
7766		 to a HOST_WIDE_INT.  Using the constant directly can result
7767		 in non-canonical RTL in a 64x32 cross compile.  */
7768	      constant_part
7769		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7770				      (HOST_WIDE_INT) 0,
7771				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7772	      op1 = plus_constant (op1, INTVAL (constant_part));
7773	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7774		op1 = force_operand (op1, target);
7775	      return op1;
7776	    }
7777
7778	  else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7779		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7780		   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7781	    {
7782	      rtx constant_part;
7783
7784	      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7785				 (modifier == EXPAND_INITIALIZER
7786				 ? EXPAND_INITIALIZER : EXPAND_SUM));
7787	      if (! CONSTANT_P (op0))
7788		{
7789		  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7790				     VOIDmode, modifier);
7791		  /* Return a PLUS if modifier says it's OK.  */
7792		  if (modifier == EXPAND_SUM
7793		      || modifier == EXPAND_INITIALIZER)
7794		    return simplify_gen_binary (PLUS, mode, op0, op1);
7795		  goto binop2;
7796		}
7797	      /* Use immed_double_const to ensure that the constant is
7798		 truncated according to the mode of OP1, then sign extended
7799		 to a HOST_WIDE_INT.  Using the constant directly can result
7800		 in non-canonical RTL in a 64x32 cross compile.  */
7801	      constant_part
7802		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7803				      (HOST_WIDE_INT) 0,
7804				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7805	      op0 = plus_constant (op0, INTVAL (constant_part));
7806	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7807		op0 = force_operand (op0, target);
7808	      return op0;
7809	    }
7810	}
7811
7812      /* No sense saving up arithmetic to be done
7813	 if it's all in the wrong mode to form part of an address.
7814	 And force_operand won't know whether to sign-extend or
7815	 zero-extend.  */
7816      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7817	  || mode != ptr_mode)
7818	{
7819	  expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7820			   subtarget, &op0, &op1, 0);
7821	  if (op0 == const0_rtx)
7822	    return op1;
7823	  if (op1 == const0_rtx)
7824	    return op0;
7825	  goto binop2;
7826	}
7827
7828      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7829		       subtarget, &op0, &op1, modifier);
7830      return simplify_gen_binary (PLUS, mode, op0, op1);
7831
7832    case MINUS_EXPR:
7833      /* For initializers, we are allowed to return a MINUS of two
7834	 symbolic constants.  Here we handle all cases when both operands
7835	 are constant.  */
7836      /* Handle difference of two symbolic constants,
7837	 for the sake of an initializer.  */
7838      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7839	  && really_constant_p (TREE_OPERAND (exp, 0))
7840	  && really_constant_p (TREE_OPERAND (exp, 1)))
7841	{
7842	  expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7843			   NULL_RTX, &op0, &op1, modifier);
7844
7845	  /* If the last operand is a CONST_INT, use plus_constant of
7846	     the negated constant.  Else make the MINUS.  */
7847	  if (GET_CODE (op1) == CONST_INT)
7848	    return plus_constant (op0, - INTVAL (op1));
7849	  else
7850	    return gen_rtx_MINUS (mode, op0, op1);
7851	}
7852
7853      this_optab = ! unsignedp && flag_trapv
7854                   && (GET_MODE_CLASS(mode) == MODE_INT)
7855                   ? subv_optab : sub_optab;
7856
7857      /* No sense saving up arithmetic to be done
7858	 if it's all in the wrong mode to form part of an address.
7859	 And force_operand won't know whether to sign-extend or
7860	 zero-extend.  */
7861      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7862	  || mode != ptr_mode)
7863	goto binop;
7864
7865      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7866		       subtarget, &op0, &op1, modifier);
7867
7868      /* Convert A - const to A + (-const).  */
7869      if (GET_CODE (op1) == CONST_INT)
7870	{
7871	  op1 = negate_rtx (mode, op1);
7872	  return simplify_gen_binary (PLUS, mode, op0, op1);
7873	}
7874
7875      goto binop2;
7876
7877    case MULT_EXPR:
7878      /* If first operand is constant, swap them.
7879	 Thus the following special case checks need only
7880	 check the second operand.  */
7881      if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7882	{
7883	  tree t1 = TREE_OPERAND (exp, 0);
7884	  TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7885	  TREE_OPERAND (exp, 1) = t1;
7886	}
7887
7888      /* Attempt to return something suitable for generating an
7889	 indexed address, for machines that support that.  */
7890
7891      if (modifier == EXPAND_SUM && mode == ptr_mode
7892	  && host_integerp (TREE_OPERAND (exp, 1), 0))
7893	{
7894	  tree exp1 = TREE_OPERAND (exp, 1);
7895
7896	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7897			     EXPAND_SUM);
7898
7899	  if (GET_CODE (op0) != REG)
7900	    op0 = force_operand (op0, NULL_RTX);
7901	  if (GET_CODE (op0) != REG)
7902	    op0 = copy_to_mode_reg (mode, op0);
7903
7904	  return gen_rtx_MULT (mode, op0,
7905			       gen_int_mode (tree_low_cst (exp1, 0),
7906					     TYPE_MODE (TREE_TYPE (exp1))));
7907	}
7908
7909      if (modifier == EXPAND_STACK_PARM)
7910	target = 0;
7911
7912      /* Check for multiplying things that have been extended
7913	 from a narrower type.  If this machine supports multiplying
7914	 in that narrower type with a result in the desired type,
7915	 do it that way, and avoid the explicit type-conversion.  */
7916      if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7917	  && TREE_CODE (type) == INTEGER_TYPE
7918	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7919	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7920	  && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7921	       && int_fits_type_p (TREE_OPERAND (exp, 1),
7922				   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7923	       /* Don't use a widening multiply if a shift will do.  */
7924	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7925		    > HOST_BITS_PER_WIDE_INT)
7926		   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7927	      ||
7928	      (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7929	       && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7930		   ==
7931		   TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7932	       /* If both operands are extended, they must either both
7933		  be zero-extended or both be sign-extended.  */
7934	       && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7935		   ==
7936		   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7937	{
7938	  enum machine_mode innermode
7939	    = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7940	  optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7941			? smul_widen_optab : umul_widen_optab);
7942	  this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7943			? umul_widen_optab : smul_widen_optab);
7944	  if (mode == GET_MODE_WIDER_MODE (innermode))
7945	    {
7946	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7947		{
7948		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7949		    expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7950				     TREE_OPERAND (exp, 1),
7951				     NULL_RTX, &op0, &op1, 0);
7952		  else
7953		    expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7954				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7955				     NULL_RTX, &op0, &op1, 0);
7956		  goto binop2;
7957		}
7958	      else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7959		       && innermode == word_mode)
7960		{
7961		  rtx htem;
7962		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7963				     NULL_RTX, VOIDmode, 0);
7964		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7965		    op1 = convert_modes (innermode, mode,
7966					 expand_expr (TREE_OPERAND (exp, 1),
7967						      NULL_RTX, VOIDmode, 0),
7968					 unsignedp);
7969		  else
7970		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7971				       NULL_RTX, VOIDmode, 0);
7972		  temp = expand_binop (mode, other_optab, op0, op1, target,
7973				       unsignedp, OPTAB_LIB_WIDEN);
7974		  htem = expand_mult_highpart_adjust (innermode,
7975						      gen_highpart (innermode, temp),
7976						      op0, op1,
7977						      gen_highpart (innermode, temp),
7978						      unsignedp);
7979		  emit_move_insn (gen_highpart (innermode, temp), htem);
7980		  return temp;
7981		}
7982	    }
7983	}
7984      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7985		       subtarget, &op0, &op1, 0);
7986      return expand_mult (mode, op0, op1, target, unsignedp);
7987
7988    case TRUNC_DIV_EXPR:
7989    case FLOOR_DIV_EXPR:
7990    case CEIL_DIV_EXPR:
7991    case ROUND_DIV_EXPR:
7992    case EXACT_DIV_EXPR:
7993      if (modifier == EXPAND_STACK_PARM)
7994	target = 0;
7995      /* Possible optimization: compute the dividend with EXPAND_SUM
7996	 then if the divisor is constant can optimize the case
7997	 where some terms of the dividend have coeffs divisible by it.  */
7998      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7999		       subtarget, &op0, &op1, 0);
8000      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8001
8002    case RDIV_EXPR:
8003      /* Emit a/b as a*(1/b).  Later we may manage CSE the reciprocal saving
8004         expensive divide.  If not, combine will rebuild the original
8005         computation.  */
8006      if (flag_unsafe_math_optimizations && optimize && !optimize_size
8007	  && TREE_CODE (type) == REAL_TYPE
8008	  && !real_onep (TREE_OPERAND (exp, 0)))
8009        return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8010				   build (RDIV_EXPR, type,
8011					  build_real (type, dconst1),
8012					  TREE_OPERAND (exp, 1))),
8013			    target, tmode, modifier);
8014      this_optab = sdiv_optab;
8015      goto binop;
8016
8017    case TRUNC_MOD_EXPR:
8018    case FLOOR_MOD_EXPR:
8019    case CEIL_MOD_EXPR:
8020    case ROUND_MOD_EXPR:
8021      if (modifier == EXPAND_STACK_PARM)
8022	target = 0;
8023      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8024		       subtarget, &op0, &op1, 0);
8025      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8026
8027    case FIX_ROUND_EXPR:
8028    case FIX_FLOOR_EXPR:
8029    case FIX_CEIL_EXPR:
8030      abort ();			/* Not used for C.  */
8031
8032    case FIX_TRUNC_EXPR:
8033      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8034      if (target == 0 || modifier == EXPAND_STACK_PARM)
8035	target = gen_reg_rtx (mode);
8036      expand_fix (target, op0, unsignedp);
8037      return target;
8038
8039    case FLOAT_EXPR:
8040      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8041      if (target == 0 || modifier == EXPAND_STACK_PARM)
8042	target = gen_reg_rtx (mode);
8043      /* expand_float can't figure out what to do if FROM has VOIDmode.
8044	 So give it the correct mode.  With -O, cse will optimize this.  */
8045      if (GET_MODE (op0) == VOIDmode)
8046	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8047				op0);
8048      expand_float (target, op0,
8049		    TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8050      return target;
8051
8052    case NEGATE_EXPR:
8053      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8054      if (modifier == EXPAND_STACK_PARM)
8055	target = 0;
8056      temp = expand_unop (mode,
8057			  ! unsignedp && flag_trapv
8058			  && (GET_MODE_CLASS(mode) == MODE_INT)
8059			  ? negv_optab : neg_optab, op0, target, 0);
8060      if (temp == 0)
8061	abort ();
8062      return temp;
8063
8064    case ABS_EXPR:
8065      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8066      if (modifier == EXPAND_STACK_PARM)
8067	target = 0;
8068
8069      /* ABS_EXPR is not valid for complex arguments.  */
8070      if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8071	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8072	abort ();
8073
8074      /* Unsigned abs is simply the operand.  Testing here means we don't
8075	 risk generating incorrect code below.  */
8076      if (TREE_UNSIGNED (type))
8077	return op0;
8078
8079      return expand_abs (mode, op0, target, unsignedp,
8080			 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8081
8082    case MAX_EXPR:
8083    case MIN_EXPR:
8084      target = original_target;
8085      if (target == 0
8086	  || modifier == EXPAND_STACK_PARM
8087	  || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8088	  || GET_MODE (target) != mode
8089	  || (GET_CODE (target) == REG
8090	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
8091	target = gen_reg_rtx (mode);
8092      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8093		       target, &op0, &op1, 0);
8094
8095      /* First try to do it with a special MIN or MAX instruction.
8096	 If that does not win, use a conditional jump to select the proper
8097	 value.  */
8098      this_optab = (unsignedp
8099		    ? (code == MIN_EXPR ? umin_optab : umax_optab)
8100		    : (code == MIN_EXPR ? smin_optab : smax_optab));
8101
8102      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8103			   OPTAB_WIDEN);
8104      if (temp != 0)
8105	return temp;
8106
8107      /* At this point, a MEM target is no longer useful; we will get better
8108	 code without it.  */
8109
8110      if (! REG_P (target))
8111	target = gen_reg_rtx (mode);
8112
8113      /* If op1 was placed in target, swap op0 and op1.  */
8114      if (target != op0 && target == op1)
8115	{
8116	  rtx tem = op0;
8117	  op0 = op1;
8118	  op1 = tem;
8119	}
8120
8121      /* We generate better code and avoid problems with op1 mentioning
8122	 target by forcing op1 into a pseudo if it isn't a constant.  */
8123      if (! CONSTANT_P (op1))
8124	op1 = force_reg (mode, op1);
8125
8126      if (target != op0)
8127	emit_move_insn (target, op0);
8128
8129      op0 = gen_label_rtx ();
8130
8131      /* If this mode is an integer too wide to compare properly,
8132	 compare word by word.  Rely on cse to optimize constant cases.  */
8133      if (GET_MODE_CLASS (mode) == MODE_INT
8134	  && ! can_compare_p (GE, mode, ccp_jump))
8135	{
8136	  if (code == MAX_EXPR)
8137	    do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8138					  NULL_RTX, op0);
8139	  else
8140	    do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8141					  NULL_RTX, op0);
8142	}
8143      else
8144	{
8145	  do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8146				   unsignedp, mode, NULL_RTX, NULL_RTX, op0);
8147	}
8148      emit_move_insn (target, op1);
8149      emit_label (op0);
8150      return target;
8151
8152    case BIT_NOT_EXPR:
8153      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8154      if (modifier == EXPAND_STACK_PARM)
8155	target = 0;
8156      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8157      if (temp == 0)
8158	abort ();
8159      return temp;
8160
8161      /* ??? Can optimize bitwise operations with one arg constant.
8162	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8163	 and (a bitwise1 b) bitwise2 b (etc)
8164	 but that is probably not worth while.  */
8165
8166      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
8167	 boolean values when we want in all cases to compute both of them.  In
8168	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8169	 as actual zero-or-1 values and then bitwise anding.  In cases where
8170	 there cannot be any side effects, better code would be made by
8171	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8172	 how to recognize those cases.  */
8173
8174    case TRUTH_AND_EXPR:
8175    case BIT_AND_EXPR:
8176      this_optab = and_optab;
8177      goto binop;
8178
8179    case TRUTH_OR_EXPR:
8180    case BIT_IOR_EXPR:
8181      this_optab = ior_optab;
8182      goto binop;
8183
8184    case TRUTH_XOR_EXPR:
8185    case BIT_XOR_EXPR:
8186      this_optab = xor_optab;
8187      goto binop;
8188
8189    case LSHIFT_EXPR:
8190    case RSHIFT_EXPR:
8191    case LROTATE_EXPR:
8192    case RROTATE_EXPR:
8193      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8194	subtarget = 0;
8195      if (modifier == EXPAND_STACK_PARM)
8196	target = 0;
8197      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8198      return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8199			   unsignedp);
8200
8201      /* Could determine the answer when only additive constants differ.  Also,
8202	 the addition of one can be handled by changing the condition.  */
8203    case LT_EXPR:
8204    case LE_EXPR:
8205    case GT_EXPR:
8206    case GE_EXPR:
8207    case EQ_EXPR:
8208    case NE_EXPR:
8209    case UNORDERED_EXPR:
8210    case ORDERED_EXPR:
8211    case UNLT_EXPR:
8212    case UNLE_EXPR:
8213    case UNGT_EXPR:
8214    case UNGE_EXPR:
8215    case UNEQ_EXPR:
8216      temp = do_store_flag (exp,
8217			    modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8218			    tmode != VOIDmode ? tmode : mode, 0);
8219      if (temp != 0)
8220	return temp;
8221
8222      /* For foo != 0, load foo, and if it is nonzero load 1 instead.  */
8223      if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8224	  && original_target
8225	  && GET_CODE (original_target) == REG
8226	  && (GET_MODE (original_target)
8227	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8228	{
8229	  temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8230			      VOIDmode, 0);
8231
8232	  /* If temp is constant, we can just compute the result.  */
8233	  if (GET_CODE (temp) == CONST_INT)
8234	    {
8235	      if (INTVAL (temp) != 0)
8236	        emit_move_insn (target, const1_rtx);
8237	      else
8238	        emit_move_insn (target, const0_rtx);
8239
8240	      return target;
8241	    }
8242
8243	  if (temp != original_target)
8244	    {
8245	      enum machine_mode mode1 = GET_MODE (temp);
8246	      if (mode1 == VOIDmode)
8247		mode1 = tmode != VOIDmode ? tmode : mode;
8248
8249	      temp = copy_to_mode_reg (mode1, temp);
8250	    }
8251
8252	  op1 = gen_label_rtx ();
8253	  emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8254				   GET_MODE (temp), unsignedp, op1);
8255	  emit_move_insn (temp, const1_rtx);
8256	  emit_label (op1);
8257	  return temp;
8258	}
8259
8260      /* If no set-flag instruction, must generate a conditional
8261	 store into a temporary variable.  Drop through
8262	 and handle this like && and ||.  */
8263
8264    case TRUTH_ANDIF_EXPR:
8265    case TRUTH_ORIF_EXPR:
8266      if (! ignore
8267	  && (target == 0
8268	      || modifier == EXPAND_STACK_PARM
8269	      || ! safe_from_p (target, exp, 1)
8270	      /* Make sure we don't have a hard reg (such as function's return
8271		 value) live across basic blocks, if not optimizing.  */
8272	      || (!optimize && GET_CODE (target) == REG
8273		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8274	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8275
8276      if (target)
8277	emit_clr_insn (target);
8278
8279      op1 = gen_label_rtx ();
8280      jumpifnot (exp, op1);
8281
8282      if (target)
8283	emit_0_to_1_insn (target);
8284
8285      emit_label (op1);
8286      return ignore ? const0_rtx : target;
8287
8288    case TRUTH_NOT_EXPR:
8289      if (modifier == EXPAND_STACK_PARM)
8290	target = 0;
8291      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8292      /* The parser is careful to generate TRUTH_NOT_EXPR
8293	 only with operands that are always zero or one.  */
8294      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8295			   target, 1, OPTAB_LIB_WIDEN);
8296      if (temp == 0)
8297	abort ();
8298      return temp;
8299
8300    case COMPOUND_EXPR:
8301      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8302      emit_queue ();
8303      return expand_expr_real (TREE_OPERAND (exp, 1),
8304			       (ignore ? const0_rtx : target),
8305			       VOIDmode, modifier, alt_rtl);
8306
8307    case COND_EXPR:
8308      /* If we would have a "singleton" (see below) were it not for a
8309	 conversion in each arm, bring that conversion back out.  */
8310      if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8311	  && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8312	  && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8313	      == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8314	{
8315	  tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8316	  tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8317
8318	  if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8319	       && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8320	      || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8321		  && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8322	      || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8323		  && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8324	      || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8325		  && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8326	    return expand_expr (build1 (NOP_EXPR, type,
8327					build (COND_EXPR, TREE_TYPE (iftrue),
8328					       TREE_OPERAND (exp, 0),
8329					       iftrue, iffalse)),
8330				target, tmode, modifier);
8331	}
8332
8333      {
8334	/* Note that COND_EXPRs whose type is a structure or union
8335	   are required to be constructed to contain assignments of
8336	   a temporary variable, so that we can evaluate them here
8337	   for side effect only.  If type is void, we must do likewise.  */
8338
8339	/* If an arm of the branch requires a cleanup,
8340	   only that cleanup is performed.  */
8341
8342	tree singleton = 0;
8343	tree binary_op = 0, unary_op = 0;
8344
8345	/* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8346	   convert it to our mode, if necessary.  */
8347	if (integer_onep (TREE_OPERAND (exp, 1))
8348	    && integer_zerop (TREE_OPERAND (exp, 2))
8349	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8350	  {
8351	    if (ignore)
8352	      {
8353		expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8354			     modifier);
8355		return const0_rtx;
8356	      }
8357
8358	    if (modifier == EXPAND_STACK_PARM)
8359	      target = 0;
8360	    op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8361	    if (GET_MODE (op0) == mode)
8362	      return op0;
8363
8364	    if (target == 0)
8365	      target = gen_reg_rtx (mode);
8366	    convert_move (target, op0, unsignedp);
8367	    return target;
8368	  }
8369
8370	/* Check for X ? A + B : A.  If we have this, we can copy A to the
8371	   output and conditionally add B.  Similarly for unary operations.
8372	   Don't do this if X has side-effects because those side effects
8373	   might affect A or B and the "?" operation is a sequence point in
8374	   ANSI.  (operand_equal_p tests for side effects.)  */
8375
8376	if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8377	    && operand_equal_p (TREE_OPERAND (exp, 2),
8378				TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8379	  singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8380	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8381		 && operand_equal_p (TREE_OPERAND (exp, 1),
8382				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8383	  singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8384	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8385		 && operand_equal_p (TREE_OPERAND (exp, 2),
8386				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8387	  singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8388	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8389		 && operand_equal_p (TREE_OPERAND (exp, 1),
8390				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8391	  singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8392
8393	/* If we are not to produce a result, we have no target.  Otherwise,
8394	   if a target was specified use it; it will not be used as an
8395	   intermediate target unless it is safe.  If no target, use a
8396	   temporary.  */
8397
8398	if (ignore)
8399	  temp = 0;
8400	else if (modifier == EXPAND_STACK_PARM)
8401	  temp = assign_temp (type, 0, 0, 1);
8402	else if (original_target
8403		 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8404		     || (singleton && GET_CODE (original_target) == REG
8405			 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8406			 && original_target == var_rtx (singleton)))
8407		 && GET_MODE (original_target) == mode
8408#ifdef HAVE_conditional_move
8409		 && (! can_conditionally_move_p (mode)
8410		     || GET_CODE (original_target) == REG
8411		     || TREE_ADDRESSABLE (type))
8412#endif
8413		 && (GET_CODE (original_target) != MEM
8414		     || TREE_ADDRESSABLE (type)))
8415	  temp = original_target;
8416	else if (TREE_ADDRESSABLE (type))
8417	  abort ();
8418	else
8419	  temp = assign_temp (type, 0, 0, 1);
8420
8421	/* If we had X ? A + C : A, with C a constant power of 2, and we can
8422	   do the test of X as a store-flag operation, do this as
8423	   A + ((X != 0) << log C).  Similarly for other simple binary
8424	   operators.  Only do for C == 1 if BRANCH_COST is low.  */
8425	if (temp && singleton && binary_op
8426	    && (TREE_CODE (binary_op) == PLUS_EXPR
8427		|| TREE_CODE (binary_op) == MINUS_EXPR
8428		|| TREE_CODE (binary_op) == BIT_IOR_EXPR
8429		|| TREE_CODE (binary_op) == BIT_XOR_EXPR)
8430	    && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8431		: integer_onep (TREE_OPERAND (binary_op, 1)))
8432	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8433	  {
8434	    rtx result;
8435	    tree cond;
8436	    optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8437			    ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8438			       ? addv_optab : add_optab)
8439			    : TREE_CODE (binary_op) == MINUS_EXPR
8440			    ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8441			       ? subv_optab : sub_optab)
8442			    : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8443			    : xor_optab);
8444
8445	    /* If we had X ? A : A + 1, do this as A + (X == 0).  */
8446	    if (singleton == TREE_OPERAND (exp, 1))
8447	      cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8448	    else
8449	      cond = TREE_OPERAND (exp, 0);
8450
8451	    result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8452					   ? temp : NULL_RTX),
8453				    mode, BRANCH_COST <= 1);
8454
8455	    if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8456	      result = expand_shift (LSHIFT_EXPR, mode, result,
8457				     build_int_2 (tree_log2
8458						  (TREE_OPERAND
8459						   (binary_op, 1)),
8460						  0),
8461				     (safe_from_p (temp, singleton, 1)
8462				      ? temp : NULL_RTX), 0);
8463
8464	    if (result)
8465	      {
8466		op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8467		return expand_binop (mode, boptab, op1, result, temp,
8468				     unsignedp, OPTAB_LIB_WIDEN);
8469	      }
8470	  }
8471
8472	do_pending_stack_adjust ();
8473	NO_DEFER_POP;
8474	op0 = gen_label_rtx ();
8475
8476	if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8477	  {
8478	    if (temp != 0)
8479	      {
8480		/* If the target conflicts with the other operand of the
8481		   binary op, we can't use it.  Also, we can't use the target
8482		   if it is a hard register, because evaluating the condition
8483		   might clobber it.  */
8484		if ((binary_op
8485		     && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8486		    || (GET_CODE (temp) == REG
8487			&& REGNO (temp) < FIRST_PSEUDO_REGISTER))
8488		  temp = gen_reg_rtx (mode);
8489		store_expr (singleton, temp,
8490			    modifier == EXPAND_STACK_PARM ? 2 : 0);
8491	      }
8492	    else
8493	      expand_expr (singleton,
8494			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8495	    if (singleton == TREE_OPERAND (exp, 1))
8496	      jumpif (TREE_OPERAND (exp, 0), op0);
8497	    else
8498	      jumpifnot (TREE_OPERAND (exp, 0), op0);
8499
8500	    start_cleanup_deferral ();
8501	    if (binary_op && temp == 0)
8502	      /* Just touch the other operand.  */
8503	      expand_expr (TREE_OPERAND (binary_op, 1),
8504			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8505	    else if (binary_op)
8506	      store_expr (build (TREE_CODE (binary_op), type,
8507				 make_tree (type, temp),
8508				 TREE_OPERAND (binary_op, 1)),
8509			  temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8510	    else
8511	      store_expr (build1 (TREE_CODE (unary_op), type,
8512				  make_tree (type, temp)),
8513			  temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8514	    op1 = op0;
8515	  }
8516	/* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8517	   comparison operator.  If we have one of these cases, set the
8518	   output to A, branch on A (cse will merge these two references),
8519	   then set the output to FOO.  */
8520	else if (temp
8521		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8522		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8523		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8524				     TREE_OPERAND (exp, 1), 0)
8525		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8526		     || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8527		 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8528	  {
8529	    if (GET_CODE (temp) == REG
8530		&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
8531	      temp = gen_reg_rtx (mode);
8532	    store_expr (TREE_OPERAND (exp, 1), temp,
8533			modifier == EXPAND_STACK_PARM ? 2 : 0);
8534	    jumpif (TREE_OPERAND (exp, 0), op0);
8535
8536	    start_cleanup_deferral ();
8537	    if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8538	      store_expr (TREE_OPERAND (exp, 2), temp,
8539			  modifier == EXPAND_STACK_PARM ? 2 : 0);
8540	    else
8541	      expand_expr (TREE_OPERAND (exp, 2),
8542			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8543	    op1 = op0;
8544	  }
8545	else if (temp
8546		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8547		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8548		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8549				     TREE_OPERAND (exp, 2), 0)
8550		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8551		     || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8552		 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8553	  {
8554	    if (GET_CODE (temp) == REG
8555		&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
8556	      temp = gen_reg_rtx (mode);
8557	    store_expr (TREE_OPERAND (exp, 2), temp,
8558			modifier == EXPAND_STACK_PARM ? 2 : 0);
8559	    jumpifnot (TREE_OPERAND (exp, 0), op0);
8560
8561	    start_cleanup_deferral ();
8562	    if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8563	      store_expr (TREE_OPERAND (exp, 1), temp,
8564			  modifier == EXPAND_STACK_PARM ? 2 : 0);
8565	    else
8566	      expand_expr (TREE_OPERAND (exp, 1),
8567			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8568	    op1 = op0;
8569	  }
8570	else
8571	  {
8572	    op1 = gen_label_rtx ();
8573	    jumpifnot (TREE_OPERAND (exp, 0), op0);
8574
8575	    start_cleanup_deferral ();
8576
8577	    /* One branch of the cond can be void, if it never returns. For
8578	       example A ? throw : E  */
8579	    if (temp != 0
8580		&& TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8581	      store_expr (TREE_OPERAND (exp, 1), temp,
8582			  modifier == EXPAND_STACK_PARM ? 2 : 0);
8583	    else
8584	      expand_expr (TREE_OPERAND (exp, 1),
8585			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8586	    end_cleanup_deferral ();
8587	    emit_queue ();
8588	    emit_jump_insn (gen_jump (op1));
8589	    emit_barrier ();
8590	    emit_label (op0);
8591	    start_cleanup_deferral ();
8592	    if (temp != 0
8593		&& TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8594	      store_expr (TREE_OPERAND (exp, 2), temp,
8595			  modifier == EXPAND_STACK_PARM ? 2 : 0);
8596	    else
8597	      expand_expr (TREE_OPERAND (exp, 2),
8598			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8599	  }
8600
8601	end_cleanup_deferral ();
8602
8603	emit_queue ();
8604	emit_label (op1);
8605	OK_DEFER_POP;
8606
8607	return temp;
8608      }
8609
8610    case TARGET_EXPR:
8611      {
8612	/* Something needs to be initialized, but we didn't know
8613	   where that thing was when building the tree.  For example,
8614	   it could be the return value of a function, or a parameter
8615	   to a function which lays down in the stack, or a temporary
8616	   variable which must be passed by reference.
8617
8618	   We guarantee that the expression will either be constructed
8619	   or copied into our original target.  */
8620
8621	tree slot = TREE_OPERAND (exp, 0);
8622	tree cleanups = NULL_TREE;
8623	tree exp1;
8624
8625	if (TREE_CODE (slot) != VAR_DECL)
8626	  abort ();
8627
8628	if (! ignore)
8629	  target = original_target;
8630
8631	/* Set this here so that if we get a target that refers to a
8632	   register variable that's already been used, put_reg_into_stack
8633	   knows that it should fix up those uses.  */
8634	TREE_USED (slot) = 1;
8635
8636	if (target == 0)
8637	  {
8638	    if (DECL_RTL_SET_P (slot))
8639	      {
8640		target = DECL_RTL (slot);
8641		/* If we have already expanded the slot, so don't do
8642		   it again.  (mrs)  */
8643		if (TREE_OPERAND (exp, 1) == NULL_TREE)
8644		  return target;
8645	      }
8646	    else
8647	      {
8648		target = assign_temp (type, 2, 0, 1);
8649		SET_DECL_RTL (slot, target);
8650		if (TREE_ADDRESSABLE (slot))
8651		  put_var_into_stack (slot, /*rescan=*/false);
8652
8653		/* Since SLOT is not known to the called function
8654		   to belong to its stack frame, we must build an explicit
8655		   cleanup.  This case occurs when we must build up a reference
8656		   to pass the reference as an argument.  In this case,
8657		   it is very likely that such a reference need not be
8658		   built here.  */
8659
8660		if (TREE_OPERAND (exp, 2) == 0)
8661		  TREE_OPERAND (exp, 2)
8662		    = (*lang_hooks.maybe_build_cleanup) (slot);
8663		cleanups = TREE_OPERAND (exp, 2);
8664	      }
8665	  }
8666	else
8667	  {
8668	    /* This case does occur, when expanding a parameter which
8669	       needs to be constructed on the stack.  The target
8670	       is the actual stack address that we want to initialize.
8671	       The function we call will perform the cleanup in this case.  */
8672
8673	    /* If we have already assigned it space, use that space,
8674	       not target that we were passed in, as our target
8675	       parameter is only a hint.  */
8676	    if (DECL_RTL_SET_P (slot))
8677	      {
8678		target = DECL_RTL (slot);
8679		/* If we have already expanded the slot, so don't do
8680                   it again.  (mrs)  */
8681		if (TREE_OPERAND (exp, 1) == NULL_TREE)
8682		  return target;
8683	      }
8684	    else
8685	      {
8686		SET_DECL_RTL (slot, target);
8687		/* If we must have an addressable slot, then make sure that
8688		   the RTL that we just stored in slot is OK.  */
8689		if (TREE_ADDRESSABLE (slot))
8690		  put_var_into_stack (slot, /*rescan=*/true);
8691	      }
8692	  }
8693
8694	exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8695	/* Mark it as expanded.  */
8696	TREE_OPERAND (exp, 1) = NULL_TREE;
8697
8698	store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8699
8700	expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8701
8702	return target;
8703      }
8704
8705    case INIT_EXPR:
8706      {
8707	tree lhs = TREE_OPERAND (exp, 0);
8708	tree rhs = TREE_OPERAND (exp, 1);
8709
8710	temp = expand_assignment (lhs, rhs, ! ignore);
8711	return temp;
8712      }
8713
8714    case MODIFY_EXPR:
8715      {
8716	/* If lhs is complex, expand calls in rhs before computing it.
8717	   That's so we don't compute a pointer and save it over a
8718	   call.  If lhs is simple, compute it first so we can give it
8719	   as a target if the rhs is just a call.  This avoids an
8720	   extra temp and copy and that prevents a partial-subsumption
8721	   which makes bad code.  Actually we could treat
8722	   component_ref's of vars like vars.  */
8723
8724	tree lhs = TREE_OPERAND (exp, 0);
8725	tree rhs = TREE_OPERAND (exp, 1);
8726
8727	temp = 0;
8728
8729	/* Check for |= or &= of a bitfield of size one into another bitfield
8730	   of size 1.  In this case, (unless we need the result of the
8731	   assignment) we can do this more efficiently with a
8732	   test followed by an assignment, if necessary.
8733
8734	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
8735	   things change so we do, this code should be enhanced to
8736	   support it.  */
8737	if (ignore
8738	    && TREE_CODE (lhs) == COMPONENT_REF
8739	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
8740		|| TREE_CODE (rhs) == BIT_AND_EXPR)
8741	    && TREE_OPERAND (rhs, 0) == lhs
8742	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8743	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8744	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8745	  {
8746	    rtx label = gen_label_rtx ();
8747
8748	    do_jump (TREE_OPERAND (rhs, 1),
8749		     TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8750		     TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8751	    expand_assignment (lhs, convert (TREE_TYPE (rhs),
8752					     (TREE_CODE (rhs) == BIT_IOR_EXPR
8753					      ? integer_one_node
8754					      : integer_zero_node)),
8755			       0);
8756	    do_pending_stack_adjust ();
8757	    emit_label (label);
8758	    return const0_rtx;
8759	  }
8760
8761	temp = expand_assignment (lhs, rhs, ! ignore);
8762
8763	return temp;
8764      }
8765
8766    case RETURN_EXPR:
8767      if (!TREE_OPERAND (exp, 0))
8768	expand_null_return ();
8769      else
8770	expand_return (TREE_OPERAND (exp, 0));
8771      return const0_rtx;
8772
8773    case PREINCREMENT_EXPR:
8774    case PREDECREMENT_EXPR:
8775      return expand_increment (exp, 0, ignore);
8776
8777    case POSTINCREMENT_EXPR:
8778    case POSTDECREMENT_EXPR:
8779      /* Faster to treat as pre-increment if result is not used.  */
8780      return expand_increment (exp, ! ignore, ignore);
8781
8782    case ADDR_EXPR:
8783      if (modifier == EXPAND_STACK_PARM)
8784	target = 0;
8785      /* Are we taking the address of a nested function?  */
8786      if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8787	  && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8788	  && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8789	  && ! TREE_STATIC (exp))
8790	{
8791	  op0 = trampoline_address (TREE_OPERAND (exp, 0));
8792	  op0 = force_operand (op0, target);
8793	}
8794      /* If we are taking the address of something erroneous, just
8795	 return a zero.  */
8796      else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8797	return const0_rtx;
8798      /* If we are taking the address of a constant and are at the
8799	 top level, we have to use output_constant_def since we can't
8800	 call force_const_mem at top level.  */
8801      else if (cfun == 0
8802	       && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8803		   || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8804		       == 'c')))
8805	op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8806      else
8807	{
8808	  /* We make sure to pass const0_rtx down if we came in with
8809	     ignore set, to avoid doing the cleanups twice for something.  */
8810	  op0 = expand_expr (TREE_OPERAND (exp, 0),
8811			     ignore ? const0_rtx : NULL_RTX, VOIDmode,
8812			     (modifier == EXPAND_INITIALIZER
8813			      ? modifier : EXPAND_CONST_ADDRESS));
8814
8815	  /* If we are going to ignore the result, OP0 will have been set
8816	     to const0_rtx, so just return it.  Don't get confused and
8817	     think we are taking the address of the constant.  */
8818	  if (ignore)
8819	    return op0;
8820
8821	  /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8822	     clever and returns a REG when given a MEM.  */
8823	  op0 = protect_from_queue (op0, 1);
8824
8825	  /* We would like the object in memory.  If it is a constant, we can
8826	     have it be statically allocated into memory.  For a non-constant,
8827	     we need to allocate some memory and store the value into it.  */
8828
8829	  if (CONSTANT_P (op0))
8830	    op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8831				   op0);
8832	  else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8833		   || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8834		   || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8835	    {
8836	      /* If the operand is a SAVE_EXPR, we can deal with this by
8837		 forcing the SAVE_EXPR into memory.  */
8838	      if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8839		{
8840		  put_var_into_stack (TREE_OPERAND (exp, 0),
8841				      /*rescan=*/true);
8842		  op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8843		}
8844	      else
8845		{
8846		  /* If this object is in a register, it can't be BLKmode.  */
8847		  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8848		  rtx memloc = assign_temp (inner_type, 1, 1, 1);
8849
8850		  if (GET_CODE (op0) == PARALLEL)
8851		    /* Handle calls that pass values in multiple
8852		       non-contiguous locations.  The Irix 6 ABI has examples
8853		       of this.  */
8854		    emit_group_store (memloc, op0, inner_type,
8855				      int_size_in_bytes (inner_type));
8856		  else
8857		    emit_move_insn (memloc, op0);
8858
8859		  op0 = memloc;
8860		}
8861	    }
8862
8863	  if (GET_CODE (op0) != MEM)
8864	    abort ();
8865
8866	  mark_temp_addr_taken (op0);
8867	  if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8868	    {
8869	      op0 = XEXP (op0, 0);
8870	      if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8871		op0 = convert_memory_address (ptr_mode, op0);
8872	      return op0;
8873	    }
8874
8875	  /* If OP0 is not aligned as least as much as the type requires, we
8876	     need to make a temporary, copy OP0 to it, and take the address of
8877	     the temporary.  We want to use the alignment of the type, not of
8878	     the operand.  Note that this is incorrect for FUNCTION_TYPE, but
8879	     the test for BLKmode means that can't happen.  The test for
8880	     BLKmode is because we never make mis-aligned MEMs with
8881	     non-BLKmode.
8882
8883	     We don't need to do this at all if the machine doesn't have
8884	     strict alignment.  */
8885	  if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8886	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8887		  > MEM_ALIGN (op0))
8888	      && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8889	    {
8890	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8891	      rtx new;
8892
8893	      if (TYPE_ALIGN_OK (inner_type))
8894		abort ();
8895
8896	      if (TREE_ADDRESSABLE (inner_type))
8897		{
8898		  /* We can't make a bitwise copy of this object, so fail.  */
8899		  error ("cannot take the address of an unaligned member");
8900		  return const0_rtx;
8901		}
8902
8903	      new = assign_stack_temp_for_type
8904		(TYPE_MODE (inner_type),
8905		 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8906		 : int_size_in_bytes (inner_type),
8907		 1, build_qualified_type (inner_type,
8908					  (TYPE_QUALS (inner_type)
8909					   | TYPE_QUAL_CONST)));
8910
8911	      emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8912			       (modifier == EXPAND_STACK_PARM
8913				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8914
8915	      op0 = new;
8916	    }
8917
8918	  op0 = force_operand (XEXP (op0, 0), target);
8919	}
8920
8921      if (flag_force_addr
8922	  && GET_CODE (op0) != REG
8923	  && modifier != EXPAND_CONST_ADDRESS
8924	  && modifier != EXPAND_INITIALIZER
8925	  && modifier != EXPAND_SUM)
8926	op0 = force_reg (Pmode, op0);
8927
8928      if (GET_CODE (op0) == REG
8929	  && ! REG_USERVAR_P (op0))
8930	mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8931
8932      if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8933	op0 = convert_memory_address (ptr_mode, op0);
8934
8935      return op0;
8936
8937    case ENTRY_VALUE_EXPR:
8938      abort ();
8939
8940    /* COMPLEX type for Extended Pascal & Fortran  */
8941    case COMPLEX_EXPR:
8942      {
8943	enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8944	rtx insns;
8945
8946	/* Get the rtx code of the operands.  */
8947	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8948	op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8949
8950	if (! target)
8951	  target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8952
8953	start_sequence ();
8954
8955	/* Move the real (op0) and imaginary (op1) parts to their location.  */
8956	emit_move_insn (gen_realpart (mode, target), op0);
8957	emit_move_insn (gen_imagpart (mode, target), op1);
8958
8959	insns = get_insns ();
8960	end_sequence ();
8961
8962	/* Complex construction should appear as a single unit.  */
8963	/* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8964	   each with a separate pseudo as destination.
8965	   It's not correct for flow to treat them as a unit.  */
8966	if (GET_CODE (target) != CONCAT)
8967	  emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8968	else
8969	  emit_insn (insns);
8970
8971	return target;
8972      }
8973
8974    case REALPART_EXPR:
8975      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8976      return gen_realpart (mode, op0);
8977
8978    case IMAGPART_EXPR:
8979      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8980      return gen_imagpart (mode, op0);
8981
8982    case CONJ_EXPR:
8983      {
8984	enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8985	rtx imag_t;
8986	rtx insns;
8987
8988	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8989
8990	if (! target)
8991	  target = gen_reg_rtx (mode);
8992
8993	start_sequence ();
8994
8995	/* Store the realpart and the negated imagpart to target.  */
8996	emit_move_insn (gen_realpart (partmode, target),
8997			gen_realpart (partmode, op0));
8998
8999	imag_t = gen_imagpart (partmode, target);
9000	temp = expand_unop (partmode,
9001			    ! unsignedp && flag_trapv
9002			    && (GET_MODE_CLASS(partmode) == MODE_INT)
9003			    ? negv_optab : neg_optab,
9004			    gen_imagpart (partmode, op0), imag_t, 0);
9005	if (temp != imag_t)
9006	  emit_move_insn (imag_t, temp);
9007
9008	insns = get_insns ();
9009	end_sequence ();
9010
9011	/* Conjugate should appear as a single unit
9012	   If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9013	   each with a separate pseudo as destination.
9014	   It's not correct for flow to treat them as a unit.  */
9015	if (GET_CODE (target) != CONCAT)
9016	  emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9017	else
9018	  emit_insn (insns);
9019
9020	return target;
9021      }
9022
9023    case TRY_CATCH_EXPR:
9024      {
9025	tree handler = TREE_OPERAND (exp, 1);
9026
9027	expand_eh_region_start ();
9028
9029	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9030
9031	expand_eh_region_end_cleanup (handler);
9032
9033	return op0;
9034      }
9035
9036    case TRY_FINALLY_EXPR:
9037      {
9038	tree try_block = TREE_OPERAND (exp, 0);
9039	tree finally_block = TREE_OPERAND (exp, 1);
9040
9041        if (!optimize || unsafe_for_reeval (finally_block) > 1)
9042	  {
9043	    /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9044	       is not sufficient, so we cannot expand the block twice.
9045	       So we play games with GOTO_SUBROUTINE_EXPR to let us
9046	       expand the thing only once.  */
9047	    /* When not optimizing, we go ahead with this form since
9048	       (1) user breakpoints operate more predictably without
9049		   code duplication, and
9050	       (2) we're not running any of the global optimizers
9051	           that would explode in time/space with the highly
9052		   connected CFG created by the indirect branching.  */
9053
9054	    rtx finally_label = gen_label_rtx ();
9055	    rtx done_label = gen_label_rtx ();
9056	    rtx return_link = gen_reg_rtx (Pmode);
9057	    tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9058			          (tree) finally_label, (tree) return_link);
9059	    TREE_SIDE_EFFECTS (cleanup) = 1;
9060
9061	    /* Start a new binding layer that will keep track of all cleanup
9062	       actions to be performed.  */
9063	    expand_start_bindings (2);
9064	    target_temp_slot_level = temp_slot_level;
9065
9066	    expand_decl_cleanup (NULL_TREE, cleanup);
9067	    op0 = expand_expr (try_block, target, tmode, modifier);
9068
9069	    preserve_temp_slots (op0);
9070	    expand_end_bindings (NULL_TREE, 0, 0);
9071	    emit_jump (done_label);
9072	    emit_label (finally_label);
9073	    expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9074	    emit_indirect_jump (return_link);
9075	    emit_label (done_label);
9076	  }
9077	else
9078	  {
9079	    expand_start_bindings (2);
9080	    target_temp_slot_level = temp_slot_level;
9081
9082	    expand_decl_cleanup (NULL_TREE, finally_block);
9083	    op0 = expand_expr (try_block, target, tmode, modifier);
9084
9085	    preserve_temp_slots (op0);
9086	    expand_end_bindings (NULL_TREE, 0, 0);
9087	  }
9088
9089	return op0;
9090      }
9091
9092    case GOTO_SUBROUTINE_EXPR:
9093      {
9094	rtx subr = (rtx) TREE_OPERAND (exp, 0);
9095	rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9096	rtx return_address = gen_label_rtx ();
9097	emit_move_insn (return_link,
9098			gen_rtx_LABEL_REF (Pmode, return_address));
9099	emit_jump (subr);
9100	emit_label (return_address);
9101	return const0_rtx;
9102      }
9103
9104    case VA_ARG_EXPR:
9105      return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9106
9107    case EXC_PTR_EXPR:
9108      return get_exception_pointer (cfun);
9109
9110    case FDESC_EXPR:
9111      /* Function descriptors are not valid except for as
9112	 initialization constants, and should not be expanded.  */
9113      abort ();
9114
9115    default:
9116      return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier,
9117					alt_rtl);
9118    }
9119
9120  /* Here to do an ordinary binary operator, generating an instruction
9121     from the optab already placed in `this_optab'.  */
9122 binop:
9123  expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9124		   subtarget, &op0, &op1, 0);
9125 binop2:
9126  if (modifier == EXPAND_STACK_PARM)
9127    target = 0;
9128  temp = expand_binop (mode, this_optab, op0, op1, target,
9129		       unsignedp, OPTAB_LIB_WIDEN);
9130  if (temp == 0)
9131    abort ();
9132  return temp;
9133}
9134
9135/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9136   when applied to the address of EXP produces an address known to be
9137   aligned more than BIGGEST_ALIGNMENT.  */
9138
9139static int
9140is_aligning_offset (tree offset, tree exp)
9141{
9142  /* Strip off any conversions and WITH_RECORD_EXPR nodes.  */
9143  while (TREE_CODE (offset) == NON_LVALUE_EXPR
9144	 || TREE_CODE (offset) == NOP_EXPR
9145	 || TREE_CODE (offset) == CONVERT_EXPR
9146	 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9147    offset = TREE_OPERAND (offset, 0);
9148
9149  /* We must now have a BIT_AND_EXPR with a constant that is one less than
9150     power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
9151  if (TREE_CODE (offset) != BIT_AND_EXPR
9152      || !host_integerp (TREE_OPERAND (offset, 1), 1)
9153      || compare_tree_int (TREE_OPERAND (offset, 1),
9154			   BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9155      || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9156    return 0;
9157
9158  /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9159     It must be NEGATE_EXPR.  Then strip any more conversions.  */
9160  offset = TREE_OPERAND (offset, 0);
9161  while (TREE_CODE (offset) == NON_LVALUE_EXPR
9162	 || TREE_CODE (offset) == NOP_EXPR
9163	 || TREE_CODE (offset) == CONVERT_EXPR)
9164    offset = TREE_OPERAND (offset, 0);
9165
9166  if (TREE_CODE (offset) != NEGATE_EXPR)
9167    return 0;
9168
9169  offset = TREE_OPERAND (offset, 0);
9170  while (TREE_CODE (offset) == NON_LVALUE_EXPR
9171	 || TREE_CODE (offset) == NOP_EXPR
9172	 || TREE_CODE (offset) == CONVERT_EXPR)
9173    offset = TREE_OPERAND (offset, 0);
9174
9175  /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9176     whose type is the same as EXP.  */
9177  return (TREE_CODE (offset) == ADDR_EXPR
9178	  && (TREE_OPERAND (offset, 0) == exp
9179	      || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9180		  && (TREE_TYPE (TREE_OPERAND (offset, 0))
9181		      == TREE_TYPE (exp)))));
9182}
9183
9184/* Return the tree node if an ARG corresponds to a string constant or zero
9185   if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
9186   in bytes within the string that ARG is accessing.  The type of the
9187   offset will be `sizetype'.  */
9188
9189tree
9190string_constant (tree arg, tree *ptr_offset)
9191{
9192  STRIP_NOPS (arg);
9193
9194  if (TREE_CODE (arg) == ADDR_EXPR
9195      && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9196    {
9197      *ptr_offset = size_zero_node;
9198      return TREE_OPERAND (arg, 0);
9199    }
9200  else if (TREE_CODE (arg) == PLUS_EXPR)
9201    {
9202      tree arg0 = TREE_OPERAND (arg, 0);
9203      tree arg1 = TREE_OPERAND (arg, 1);
9204
9205      STRIP_NOPS (arg0);
9206      STRIP_NOPS (arg1);
9207
9208      if (TREE_CODE (arg0) == ADDR_EXPR
9209	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9210	{
9211	  *ptr_offset = convert (sizetype, arg1);
9212	  return TREE_OPERAND (arg0, 0);
9213	}
9214      else if (TREE_CODE (arg1) == ADDR_EXPR
9215	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9216	{
9217	  *ptr_offset = convert (sizetype, arg0);
9218	  return TREE_OPERAND (arg1, 0);
9219	}
9220    }
9221
9222  return 0;
9223}
9224
9225/* Expand code for a post- or pre- increment or decrement
9226   and return the RTX for the result.
9227   POST is 1 for postinc/decrements and 0 for preinc/decrements.  */
9228
9229static rtx
9230expand_increment (tree exp, int post, int ignore)
9231{
9232  rtx op0, op1;
9233  rtx temp, value;
9234  tree incremented = TREE_OPERAND (exp, 0);
9235  optab this_optab = add_optab;
9236  int icode;
9237  enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9238  int op0_is_copy = 0;
9239  int single_insn = 0;
9240  /* 1 means we can't store into OP0 directly,
9241     because it is a subreg narrower than a word,
9242     and we don't dare clobber the rest of the word.  */
9243  int bad_subreg = 0;
9244
9245  /* Stabilize any component ref that might need to be
9246     evaluated more than once below.  */
9247  if (!post
9248      || TREE_CODE (incremented) == BIT_FIELD_REF
9249      || (TREE_CODE (incremented) == COMPONENT_REF
9250	  && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9251	      || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9252    incremented = stabilize_reference (incremented);
9253  /* Nested *INCREMENT_EXPRs can happen in C++.  We must force innermost
9254     ones into save exprs so that they don't accidentally get evaluated
9255     more than once by the code below.  */
9256  if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9257      || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9258    incremented = save_expr (incremented);
9259
9260  /* Compute the operands as RTX.
9261     Note whether OP0 is the actual lvalue or a copy of it:
9262     I believe it is a copy iff it is a register or subreg
9263     and insns were generated in computing it.  */
9264
9265  temp = get_last_insn ();
9266  op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9267
9268  /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9269     in place but instead must do sign- or zero-extension during assignment,
9270     so we copy it into a new register and let the code below use it as
9271     a copy.
9272
9273     Note that we can safely modify this SUBREG since it is know not to be
9274     shared (it was made by the expand_expr call above).  */
9275
9276  if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9277    {
9278      if (post)
9279	SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9280      else
9281	bad_subreg = 1;
9282    }
9283  else if (GET_CODE (op0) == SUBREG
9284	   && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9285    {
9286      /* We cannot increment this SUBREG in place.  If we are
9287	 post-incrementing, get a copy of the old value.  Otherwise,
9288	 just mark that we cannot increment in place.  */
9289      if (post)
9290	op0 = copy_to_reg (op0);
9291      else
9292	bad_subreg = 1;
9293    }
9294
9295  op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9296		 && temp != get_last_insn ());
9297  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9298
9299  /* Decide whether incrementing or decrementing.  */
9300  if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9301      || TREE_CODE (exp) == PREDECREMENT_EXPR)
9302    this_optab = sub_optab;
9303
9304  /* Convert decrement by a constant into a negative increment.  */
9305  if (this_optab == sub_optab
9306      && GET_CODE (op1) == CONST_INT)
9307    {
9308      op1 = GEN_INT (-INTVAL (op1));
9309      this_optab = add_optab;
9310    }
9311
9312  if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9313    this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9314
9315  /* For a preincrement, see if we can do this with a single instruction.  */
9316  if (!post)
9317    {
9318      icode = (int) this_optab->handlers[(int) mode].insn_code;
9319      if (icode != (int) CODE_FOR_nothing
9320	  /* Make sure that OP0 is valid for operands 0 and 1
9321	     of the insn we want to queue.  */
9322	  && (*insn_data[icode].operand[0].predicate) (op0, mode)
9323	  && (*insn_data[icode].operand[1].predicate) (op0, mode)
9324	  && (*insn_data[icode].operand[2].predicate) (op1, mode))
9325	single_insn = 1;
9326    }
9327
9328  /* If OP0 is not the actual lvalue, but rather a copy in a register,
9329     then we cannot just increment OP0.  We must therefore contrive to
9330     increment the original value.  Then, for postincrement, we can return
9331     OP0 since it is a copy of the old value.  For preincrement, expand here
9332     unless we can do it with a single insn.
9333
9334     Likewise if storing directly into OP0 would clobber high bits
9335     we need to preserve (bad_subreg).  */
9336  if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9337    {
9338      /* This is the easiest way to increment the value wherever it is.
9339	 Problems with multiple evaluation of INCREMENTED are prevented
9340	 because either (1) it is a component_ref or preincrement,
9341	 in which case it was stabilized above, or (2) it is an array_ref
9342	 with constant index in an array in a register, which is
9343	 safe to reevaluate.  */
9344      tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9345			     || TREE_CODE (exp) == PREDECREMENT_EXPR)
9346			    ? MINUS_EXPR : PLUS_EXPR),
9347			   TREE_TYPE (exp),
9348			   incremented,
9349			   TREE_OPERAND (exp, 1));
9350
9351      while (TREE_CODE (incremented) == NOP_EXPR
9352	     || TREE_CODE (incremented) == CONVERT_EXPR)
9353	{
9354	  newexp = convert (TREE_TYPE (incremented), newexp);
9355	  incremented = TREE_OPERAND (incremented, 0);
9356	}
9357
9358      temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9359      return post ? op0 : temp;
9360    }
9361
9362  if (post)
9363    {
9364      /* We have a true reference to the value in OP0.
9365	 If there is an insn to add or subtract in this mode, queue it.
9366	 Queuing the increment insn avoids the register shuffling
9367	 that often results if we must increment now and first save
9368	 the old value for subsequent use.  */
9369
9370#if 0  /* Turned off to avoid making extra insn for indexed memref.  */
9371      op0 = stabilize (op0);
9372#endif
9373
9374      icode = (int) this_optab->handlers[(int) mode].insn_code;
9375      if (icode != (int) CODE_FOR_nothing
9376	  /* Make sure that OP0 is valid for operands 0 and 1
9377	     of the insn we want to queue.  */
9378	  && (*insn_data[icode].operand[0].predicate) (op0, mode)
9379	  && (*insn_data[icode].operand[1].predicate) (op0, mode))
9380	{
9381	  if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9382	    op1 = force_reg (mode, op1);
9383
9384	  return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9385	}
9386      if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9387	{
9388	  rtx addr = (general_operand (XEXP (op0, 0), mode)
9389		      ? force_reg (Pmode, XEXP (op0, 0))
9390		      : copy_to_reg (XEXP (op0, 0)));
9391	  rtx temp, result;
9392
9393	  op0 = replace_equiv_address (op0, addr);
9394	  temp = force_reg (GET_MODE (op0), op0);
9395	  if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9396	    op1 = force_reg (mode, op1);
9397
9398	  /* The increment queue is LIFO, thus we have to `queue'
9399	     the instructions in reverse order.  */
9400	  enqueue_insn (op0, gen_move_insn (op0, temp));
9401	  result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9402	  return result;
9403	}
9404    }
9405
9406  /* Preincrement, or we can't increment with one simple insn.  */
9407  if (post)
9408    /* Save a copy of the value before inc or dec, to return it later.  */
9409    temp = value = copy_to_reg (op0);
9410  else
9411    /* Arrange to return the incremented value.  */
9412    /* Copy the rtx because expand_binop will protect from the queue,
9413       and the results of that would be invalid for us to return
9414       if our caller does emit_queue before using our result.  */
9415    temp = copy_rtx (value = op0);
9416
9417  /* Increment however we can.  */
9418  op1 = expand_binop (mode, this_optab, value, op1, op0,
9419		      TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9420
9421  /* Make sure the value is stored into OP0.  */
9422  if (op1 != op0)
9423    emit_move_insn (op0, op1);
9424
9425  return temp;
9426}
9427
9428/* Generate code to calculate EXP using a store-flag instruction
9429   and return an rtx for the result.  EXP is either a comparison
9430   or a TRUTH_NOT_EXPR whose operand is a comparison.
9431
9432   If TARGET is nonzero, store the result there if convenient.
9433
9434   If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9435   cheap.
9436
9437   Return zero if there is no suitable set-flag instruction
9438   available on this machine.
9439
9440   Once expand_expr has been called on the arguments of the comparison,
9441   we are committed to doing the store flag, since it is not safe to
9442   re-evaluate the expression.  We emit the store-flag insn by calling
9443   emit_store_flag, but only expand the arguments if we have a reason
9444   to believe that emit_store_flag will be successful.  If we think that
9445   it will, but it isn't, we have to simulate the store-flag with a
9446   set/jump/set sequence.  */
9447
9448static rtx
9449do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9450{
9451  enum rtx_code code;
9452  tree arg0, arg1, type;
9453  tree tem;
9454  enum machine_mode operand_mode;
9455  int invert = 0;
9456  int unsignedp;
9457  rtx op0, op1;
9458  enum insn_code icode;
9459  rtx subtarget = target;
9460  rtx result, label;
9461
9462  /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9463     result at the end.  We can't simply invert the test since it would
9464     have already been inverted if it were valid.  This case occurs for
9465     some floating-point comparisons.  */
9466
9467  if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9468    invert = 1, exp = TREE_OPERAND (exp, 0);
9469
9470  arg0 = TREE_OPERAND (exp, 0);
9471  arg1 = TREE_OPERAND (exp, 1);
9472
9473  /* Don't crash if the comparison was erroneous.  */
9474  if (arg0 == error_mark_node || arg1 == error_mark_node)
9475    return const0_rtx;
9476
9477  type = TREE_TYPE (arg0);
9478  operand_mode = TYPE_MODE (type);
9479  unsignedp = TREE_UNSIGNED (type);
9480
9481  /* We won't bother with BLKmode store-flag operations because it would mean
9482     passing a lot of information to emit_store_flag.  */
9483  if (operand_mode == BLKmode)
9484    return 0;
9485
9486  /* We won't bother with store-flag operations involving function pointers
9487     when function pointers must be canonicalized before comparisons.  */
9488#ifdef HAVE_canonicalize_funcptr_for_compare
9489  if (HAVE_canonicalize_funcptr_for_compare
9490      && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9491	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9492	       == FUNCTION_TYPE))
9493	  || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9494	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9495		  == FUNCTION_TYPE))))
9496    return 0;
9497#endif
9498
9499  STRIP_NOPS (arg0);
9500  STRIP_NOPS (arg1);
9501
9502  /* Get the rtx comparison code to use.  We know that EXP is a comparison
9503     operation of some type.  Some comparisons against 1 and -1 can be
9504     converted to comparisons with zero.  Do so here so that the tests
9505     below will be aware that we have a comparison with zero.   These
9506     tests will not catch constants in the first operand, but constants
9507     are rarely passed as the first operand.  */
9508
9509  switch (TREE_CODE (exp))
9510    {
9511    case EQ_EXPR:
9512      code = EQ;
9513      break;
9514    case NE_EXPR:
9515      code = NE;
9516      break;
9517    case LT_EXPR:
9518      if (integer_onep (arg1))
9519	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9520      else
9521	code = unsignedp ? LTU : LT;
9522      break;
9523    case LE_EXPR:
9524      if (! unsignedp && integer_all_onesp (arg1))
9525	arg1 = integer_zero_node, code = LT;
9526      else
9527	code = unsignedp ? LEU : LE;
9528      break;
9529    case GT_EXPR:
9530      if (! unsignedp && integer_all_onesp (arg1))
9531	arg1 = integer_zero_node, code = GE;
9532      else
9533	code = unsignedp ? GTU : GT;
9534      break;
9535    case GE_EXPR:
9536      if (integer_onep (arg1))
9537	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9538      else
9539	code = unsignedp ? GEU : GE;
9540      break;
9541
9542    case UNORDERED_EXPR:
9543      code = UNORDERED;
9544      break;
9545    case ORDERED_EXPR:
9546      code = ORDERED;
9547      break;
9548    case UNLT_EXPR:
9549      code = UNLT;
9550      break;
9551    case UNLE_EXPR:
9552      code = UNLE;
9553      break;
9554    case UNGT_EXPR:
9555      code = UNGT;
9556      break;
9557    case UNGE_EXPR:
9558      code = UNGE;
9559      break;
9560    case UNEQ_EXPR:
9561      code = UNEQ;
9562      break;
9563
9564    default:
9565      abort ();
9566    }
9567
9568  /* Put a constant second.  */
9569  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9570    {
9571      tem = arg0; arg0 = arg1; arg1 = tem;
9572      code = swap_condition (code);
9573    }
9574
9575  /* If this is an equality or inequality test of a single bit, we can
9576     do this by shifting the bit being tested to the low-order bit and
9577     masking the result with the constant 1.  If the condition was EQ,
9578     we xor it with 1.  This does not require an scc insn and is faster
9579     than an scc insn even if we have it.
9580
9581     The code to make this transformation was moved into fold_single_bit_test,
9582     so we just call into the folder and expand its result.  */
9583
9584  if ((code == NE || code == EQ)
9585      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9586      && integer_pow2p (TREE_OPERAND (arg0, 1)))
9587    {
9588      tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9589      return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9590						arg0, arg1, type),
9591			  target, VOIDmode, EXPAND_NORMAL);
9592    }
9593
9594  /* Now see if we are likely to be able to do this.  Return if not.  */
9595  if (! can_compare_p (code, operand_mode, ccp_store_flag))
9596    return 0;
9597
9598  icode = setcc_gen_code[(int) code];
9599  if (icode == CODE_FOR_nothing
9600      || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9601    {
9602      /* We can only do this if it is one of the special cases that
9603	 can be handled without an scc insn.  */
9604      if ((code == LT && integer_zerop (arg1))
9605	  || (! only_cheap && code == GE && integer_zerop (arg1)))
9606	;
9607      else if (BRANCH_COST >= 0
9608	       && ! only_cheap && (code == NE || code == EQ)
9609	       && TREE_CODE (type) != REAL_TYPE
9610	       && ((abs_optab->handlers[(int) operand_mode].insn_code
9611		    != CODE_FOR_nothing)
9612		   || (ffs_optab->handlers[(int) operand_mode].insn_code
9613		       != CODE_FOR_nothing)))
9614	;
9615      else
9616	return 0;
9617    }
9618
9619  if (! get_subtarget (target)
9620      || GET_MODE (subtarget) != operand_mode)
9621    subtarget = 0;
9622
9623  expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9624
9625  if (target == 0)
9626    target = gen_reg_rtx (mode);
9627
9628  /* Pass copies of OP0 and OP1 in case they contain a QUEUED.  This is safe
9629     because, if the emit_store_flag does anything it will succeed and
9630     OP0 and OP1 will not be used subsequently.  */
9631
9632  result = emit_store_flag (target, code,
9633			    queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9634			    queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9635			    operand_mode, unsignedp, 1);
9636
9637  if (result)
9638    {
9639      if (invert)
9640	result = expand_binop (mode, xor_optab, result, const1_rtx,
9641			       result, 0, OPTAB_LIB_WIDEN);
9642      return result;
9643    }
9644
9645  /* If this failed, we have to do this with set/compare/jump/set code.  */
9646  if (GET_CODE (target) != REG
9647      || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9648    target = gen_reg_rtx (GET_MODE (target));
9649
9650  emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9651  result = compare_from_rtx (op0, op1, code, unsignedp,
9652			     operand_mode, NULL_RTX);
9653  if (GET_CODE (result) == CONST_INT)
9654    return (((result == const0_rtx && ! invert)
9655	     || (result != const0_rtx && invert))
9656	    ? const0_rtx : const1_rtx);
9657
9658  /* The code of RESULT may not match CODE if compare_from_rtx
9659     decided to swap its operands and reverse the original code.
9660
9661     We know that compare_from_rtx returns either a CONST_INT or
9662     a new comparison code, so it is safe to just extract the
9663     code from RESULT.  */
9664  code = GET_CODE (result);
9665
9666  label = gen_label_rtx ();
9667  if (bcc_gen_fctn[(int) code] == 0)
9668    abort ();
9669
9670  emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9671  emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9672  emit_label (label);
9673
9674  return target;
9675}
9676
9677
9678/* Stubs in case we haven't got a casesi insn.  */
9679#ifndef HAVE_casesi
9680# define HAVE_casesi 0
9681# define gen_casesi(a, b, c, d, e) (0)
9682# define CODE_FOR_casesi CODE_FOR_nothing
9683#endif
9684
9685/* If the machine does not have a case insn that compares the bounds,
9686   this means extra overhead for dispatch tables, which raises the
9687   threshold for using them.  */
9688#ifndef CASE_VALUES_THRESHOLD
9689#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9690#endif /* CASE_VALUES_THRESHOLD */
9691
9692unsigned int
9693case_values_threshold (void)
9694{
9695  return CASE_VALUES_THRESHOLD;
9696}
9697
9698/* Attempt to generate a casesi instruction.  Returns 1 if successful,
9699   0 otherwise (i.e. if there is no casesi instruction).  */
9700int
9701try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9702	    rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9703{
9704  enum machine_mode index_mode = SImode;
9705  int index_bits = GET_MODE_BITSIZE (index_mode);
9706  rtx op1, op2, index;
9707  enum machine_mode op_mode;
9708
9709  if (! HAVE_casesi)
9710    return 0;
9711
9712  /* Convert the index to SImode.  */
9713  if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9714    {
9715      enum machine_mode omode = TYPE_MODE (index_type);
9716      rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9717
9718      /* We must handle the endpoints in the original mode.  */
9719      index_expr = build (MINUS_EXPR, index_type,
9720			  index_expr, minval);
9721      minval = integer_zero_node;
9722      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9723      emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9724			       omode, 1, default_label);
9725      /* Now we can safely truncate.  */
9726      index = convert_to_mode (index_mode, index, 0);
9727    }
9728  else
9729    {
9730      if (TYPE_MODE (index_type) != index_mode)
9731	{
9732	  index_expr = convert ((*lang_hooks.types.type_for_size)
9733				(index_bits, 0), index_expr);
9734	  index_type = TREE_TYPE (index_expr);
9735	}
9736
9737      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9738    }
9739  emit_queue ();
9740  index = protect_from_queue (index, 0);
9741  do_pending_stack_adjust ();
9742
9743  op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9744  if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9745      (index, op_mode))
9746    index = copy_to_mode_reg (op_mode, index);
9747
9748  op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9749
9750  op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9751  op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9752		       op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9753  if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9754      (op1, op_mode))
9755    op1 = copy_to_mode_reg (op_mode, op1);
9756
9757  op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9758
9759  op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9760  op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9761		       op2, TREE_UNSIGNED (TREE_TYPE (range)));
9762  if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9763      (op2, op_mode))
9764    op2 = copy_to_mode_reg (op_mode, op2);
9765
9766  emit_jump_insn (gen_casesi (index, op1, op2,
9767			      table_label, default_label));
9768  return 1;
9769}
9770
9771/* Attempt to generate a tablejump instruction; same concept.  */
9772#ifndef HAVE_tablejump
9773#define HAVE_tablejump 0
9774#define gen_tablejump(x, y) (0)
9775#endif
9776
9777/* Subroutine of the next function.
9778
9779   INDEX is the value being switched on, with the lowest value
9780   in the table already subtracted.
9781   MODE is its expected mode (needed if INDEX is constant).
9782   RANGE is the length of the jump table.
9783   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9784
9785   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9786   index value is out of range.  */
9787
9788static void
9789do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9790	      rtx default_label)
9791{
9792  rtx temp, vector;
9793
9794  if (INTVAL (range) > cfun->max_jumptable_ents)
9795    cfun->max_jumptable_ents = INTVAL (range);
9796
9797  /* Do an unsigned comparison (in the proper mode) between the index
9798     expression and the value which represents the length of the range.
9799     Since we just finished subtracting the lower bound of the range
9800     from the index expression, this comparison allows us to simultaneously
9801     check that the original index expression value is both greater than
9802     or equal to the minimum value of the range and less than or equal to
9803     the maximum value of the range.  */
9804
9805  emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9806			   default_label);
9807
9808  /* If index is in range, it must fit in Pmode.
9809     Convert to Pmode so we can index with it.  */
9810  if (mode != Pmode)
9811    index = convert_to_mode (Pmode, index, 1);
9812
9813  /* Don't let a MEM slip through, because then INDEX that comes
9814     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9815     and break_out_memory_refs will go to work on it and mess it up.  */
9816#ifdef PIC_CASE_VECTOR_ADDRESS
9817  if (flag_pic && GET_CODE (index) != REG)
9818    index = copy_to_mode_reg (Pmode, index);
9819#endif
9820
9821  /* If flag_force_addr were to affect this address
9822     it could interfere with the tricky assumptions made
9823     about addresses that contain label-refs,
9824     which may be valid only very near the tablejump itself.  */
9825  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9826     GET_MODE_SIZE, because this indicates how large insns are.  The other
9827     uses should all be Pmode, because they are addresses.  This code
9828     could fail if addresses and insns are not the same size.  */
9829  index = gen_rtx_PLUS (Pmode,
9830			gen_rtx_MULT (Pmode, index,
9831				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9832			gen_rtx_LABEL_REF (Pmode, table_label));
9833#ifdef PIC_CASE_VECTOR_ADDRESS
9834  if (flag_pic)
9835    index = PIC_CASE_VECTOR_ADDRESS (index);
9836  else
9837#endif
9838    index = memory_address_noforce (CASE_VECTOR_MODE, index);
9839  temp = gen_reg_rtx (CASE_VECTOR_MODE);
9840  vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9841  RTX_UNCHANGING_P (vector) = 1;
9842  MEM_NOTRAP_P (vector) = 1;
9843  convert_move (temp, vector, 0);
9844
9845  emit_jump_insn (gen_tablejump (temp, table_label));
9846
9847  /* If we are generating PIC code or if the table is PC-relative, the
9848     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
9849  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9850    emit_barrier ();
9851}
9852
9853int
9854try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9855	       rtx table_label, rtx default_label)
9856{
9857  rtx index;
9858
9859  if (! HAVE_tablejump)
9860    return 0;
9861
9862  index_expr = fold (build (MINUS_EXPR, index_type,
9863			    convert (index_type, index_expr),
9864			    convert (index_type, minval)));
9865  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9866  emit_queue ();
9867  index = protect_from_queue (index, 0);
9868  do_pending_stack_adjust ();
9869
9870  do_tablejump (index, TYPE_MODE (index_type),
9871		convert_modes (TYPE_MODE (index_type),
9872			       TYPE_MODE (TREE_TYPE (range)),
9873			       expand_expr (range, NULL_RTX,
9874					    VOIDmode, 0),
9875			       TREE_UNSIGNED (TREE_TYPE (range))),
9876		table_label, default_label);
9877  return 1;
9878}
9879
9880/* Nonzero if the mode is a valid vector mode for this architecture.
9881   This returns nonzero even if there is no hardware support for the
9882   vector mode, but we can emulate with narrower modes.  */
9883
9884int
9885vector_mode_valid_p (enum machine_mode mode)
9886{
9887  enum mode_class class = GET_MODE_CLASS (mode);
9888  enum machine_mode innermode;
9889
9890  /* Doh!  What's going on?  */
9891  if (class != MODE_VECTOR_INT
9892      && class != MODE_VECTOR_FLOAT)
9893    return 0;
9894
9895  /* Hardware support.  Woo hoo!  */
9896  if (VECTOR_MODE_SUPPORTED_P (mode))
9897    return 1;
9898
9899  innermode = GET_MODE_INNER (mode);
9900
9901  /* We should probably return 1 if requesting V4DI and we have no DI,
9902     but we have V2DI, but this is probably very unlikely.  */
9903
9904  /* If we have support for the inner mode, we can safely emulate it.
9905     We may not have V2DI, but me can emulate with a pair of DIs.  */
9906  return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9907}
9908
9909/* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
9910static rtx
9911const_vector_from_tree (tree exp)
9912{
9913  rtvec v;
9914  int units, i;
9915  tree link, elt;
9916  enum machine_mode inner, mode;
9917
9918  mode = TYPE_MODE (TREE_TYPE (exp));
9919
9920  if (is_zeros_p (exp))
9921    return CONST0_RTX (mode);
9922
9923  units = GET_MODE_NUNITS (mode);
9924  inner = GET_MODE_INNER (mode);
9925
9926  v = rtvec_alloc (units);
9927
9928  link = TREE_VECTOR_CST_ELTS (exp);
9929  for (i = 0; link; link = TREE_CHAIN (link), ++i)
9930    {
9931      elt = TREE_VALUE (link);
9932
9933      if (TREE_CODE (elt) == REAL_CST)
9934	RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9935							 inner);
9936      else
9937	RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9938					       TREE_INT_CST_HIGH (elt),
9939					       inner);
9940    }
9941
9942  /* Initialize remaining elements to 0.  */
9943  for (; i < units; ++i)
9944    RTVEC_ELT (v, i) = CONST0_RTX (inner);
9945
9946  return gen_rtx_raw_CONST_VECTOR (mode, v);
9947}
9948
9949#include "gt-expr.h"
9950