expr.c revision 119256
1/* Convert tree expression to rtl instructions, for GNU compiler.
2   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "machmode.h"
25#include "real.h"
26#include "rtl.h"
27#include "tree.h"
28#include "flags.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "except.h"
32#include "function.h"
33#include "insn-config.h"
34#include "insn-attr.h"
35/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
36#include "expr.h"
37#include "optabs.h"
38#include "libfuncs.h"
39#include "recog.h"
40#include "reload.h"
41#include "output.h"
42#include "typeclass.h"
43#include "toplev.h"
44#include "ggc.h"
45#include "langhooks.h"
46#include "intl.h"
47#include "tm_p.h"
48
49/* Decide whether a function's arguments should be processed
50   from first to last or from last to first.
51
52   They should if the stack and args grow in opposite directions, but
53   only if we have push insns.  */
54
55#ifdef PUSH_ROUNDING
56
57#ifndef PUSH_ARGS_REVERSED
58#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59#define PUSH_ARGS_REVERSED	/* If it's last to first.  */
60#endif
61#endif
62
63#endif
64
65#ifndef STACK_PUSH_CODE
66#ifdef STACK_GROWS_DOWNWARD
67#define STACK_PUSH_CODE PRE_DEC
68#else
69#define STACK_PUSH_CODE PRE_INC
70#endif
71#endif
72
73/* Assume that case vectors are not pc-relative.  */
74#ifndef CASE_VECTOR_PC_RELATIVE
75#define CASE_VECTOR_PC_RELATIVE 0
76#endif
77
78/* Convert defined/undefined to boolean.  */
79#ifdef TARGET_MEM_FUNCTIONS
80#undef TARGET_MEM_FUNCTIONS
81#define TARGET_MEM_FUNCTIONS 1
82#else
83#define TARGET_MEM_FUNCTIONS 0
84#endif
85
86
87/* If this is nonzero, we do not bother generating VOLATILE
88   around volatile memory references, and we are willing to
89   output indirect addresses.  If cse is to follow, we reject
90   indirect addresses so a useful potential cse is generated;
91   if it is used only once, instruction combination will produce
92   the same indirect address eventually.  */
93int cse_not_expected;
94
95/* Chain of pending expressions for PLACEHOLDER_EXPR to replace.  */
96static tree placeholder_list = 0;
97
98/* This structure is used by move_by_pieces to describe the move to
99   be performed.  */
100struct move_by_pieces
101{
102  rtx to;
103  rtx to_addr;
104  int autinc_to;
105  int explicit_inc_to;
106  rtx from;
107  rtx from_addr;
108  int autinc_from;
109  int explicit_inc_from;
110  unsigned HOST_WIDE_INT len;
111  HOST_WIDE_INT offset;
112  int reverse;
113};
114
115/* This structure is used by store_by_pieces to describe the clear to
116   be performed.  */
117
118struct store_by_pieces
119{
120  rtx to;
121  rtx to_addr;
122  int autinc_to;
123  int explicit_inc_to;
124  unsigned HOST_WIDE_INT len;
125  HOST_WIDE_INT offset;
126  rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
127  PTR constfundata;
128  int reverse;
129};
130
131static rtx enqueue_insn		PARAMS ((rtx, rtx));
132static unsigned HOST_WIDE_INT move_by_pieces_ninsns
133				PARAMS ((unsigned HOST_WIDE_INT,
134					 unsigned int));
135static void move_by_pieces_1	PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
136					 struct move_by_pieces *));
137static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
138static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
139static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
140static tree emit_block_move_libcall_fn PARAMS ((int));
141static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
142static rtx clear_by_pieces_1	PARAMS ((PTR, HOST_WIDE_INT,
143					 enum machine_mode));
144static void clear_by_pieces	PARAMS ((rtx, unsigned HOST_WIDE_INT,
145					 unsigned int));
146static void store_by_pieces_1	PARAMS ((struct store_by_pieces *,
147					 unsigned int));
148static void store_by_pieces_2	PARAMS ((rtx (*) (rtx, ...),
149					 enum machine_mode,
150					 struct store_by_pieces *));
151static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
152static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
153static tree clear_storage_libcall_fn PARAMS ((int));
154static rtx compress_float_constant PARAMS ((rtx, rtx));
155static rtx get_subtarget	PARAMS ((rtx));
156static int is_zeros_p         PARAMS ((tree));
157static int mostly_zeros_p	PARAMS ((tree));
158static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
159					     HOST_WIDE_INT, enum machine_mode,
160					     tree, tree, int, int));
161static void store_constructor	PARAMS ((tree, rtx, int, HOST_WIDE_INT));
162static rtx store_field		PARAMS ((rtx, HOST_WIDE_INT,
163					 HOST_WIDE_INT, enum machine_mode,
164					 tree, enum machine_mode, int, tree,
165					 int));
166static rtx var_rtx		PARAMS ((tree));
167static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
168static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
169static int is_aligning_offset	PARAMS ((tree, tree));
170static rtx expand_increment	PARAMS ((tree, int, int));
171static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
172static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
173static void do_compare_and_jump	PARAMS ((tree, enum rtx_code, enum rtx_code,
174					 rtx, rtx));
175static rtx do_store_flag	PARAMS ((tree, rtx, enum machine_mode, int));
176#ifdef PUSH_ROUNDING
177static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
178#endif
179static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
180static rtx const_vector_from_tree PARAMS ((tree));
181
182/* Record for each mode whether we can move a register directly to or
183   from an object of that mode in memory.  If we can't, we won't try
184   to use that mode directly when accessing a field of that mode.  */
185
186static char direct_load[NUM_MACHINE_MODES];
187static char direct_store[NUM_MACHINE_MODES];
188
189/* Record for each mode whether we can float-extend from memory.  */
190
191static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
192
193/* If a memory-to-memory move would take MOVE_RATIO or more simple
194   move-instruction sequences, we will do a movstr or libcall instead.  */
195
196#ifndef MOVE_RATIO
197#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
198#define MOVE_RATIO 2
199#else
200/* If we are optimizing for space (-Os), cut down the default move ratio.  */
201#define MOVE_RATIO (optimize_size ? 3 : 15)
202#endif
203#endif
204
205/* This macro is used to determine whether move_by_pieces should be called
206   to perform a structure copy.  */
207#ifndef MOVE_BY_PIECES_P
208#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
209  (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
210#endif
211
212/* If a clear memory operation would take CLEAR_RATIO or more simple
213   move-instruction sequences, we will do a clrstr or libcall instead.  */
214
215#ifndef CLEAR_RATIO
216#if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
217#define CLEAR_RATIO 2
218#else
219/* If we are optimizing for space, cut down the default clear ratio.  */
220#define CLEAR_RATIO (optimize_size ? 3 : 15)
221#endif
222#endif
223
224/* This macro is used to determine whether clear_by_pieces should be
225   called to clear storage.  */
226#ifndef CLEAR_BY_PIECES_P
227#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
228  (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
229#endif
230
231/* This array records the insn_code of insns to perform block moves.  */
232enum insn_code movstr_optab[NUM_MACHINE_MODES];
233
234/* This array records the insn_code of insns to perform block clears.  */
235enum insn_code clrstr_optab[NUM_MACHINE_MODES];
236
237/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow.  */
238
239#ifndef SLOW_UNALIGNED_ACCESS
240#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
241#endif
242
243/* This is run once per compilation to set up which modes can be used
244   directly in memory and to initialize the block move optab.  */
245
246void
247init_expr_once ()
248{
249  rtx insn, pat;
250  enum machine_mode mode;
251  int num_clobbers;
252  rtx mem, mem1;
253  rtx reg;
254
255  /* Try indexing by frame ptr and try by stack ptr.
256     It is known that on the Convex the stack ptr isn't a valid index.
257     With luck, one or the other is valid on any machine.  */
258  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
259  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
260
261  /* A scratch register we can modify in-place below to avoid
262     useless RTL allocations.  */
263  reg = gen_rtx_REG (VOIDmode, -1);
264
265  insn = rtx_alloc (INSN);
266  pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
267  PATTERN (insn) = pat;
268
269  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
270       mode = (enum machine_mode) ((int) mode + 1))
271    {
272      int regno;
273
274      direct_load[(int) mode] = direct_store[(int) mode] = 0;
275      PUT_MODE (mem, mode);
276      PUT_MODE (mem1, mode);
277      PUT_MODE (reg, mode);
278
279      /* See if there is some register that can be used in this mode and
280	 directly loaded or stored from memory.  */
281
282      if (mode != VOIDmode && mode != BLKmode)
283	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
284	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
285	     regno++)
286	  {
287	    if (! HARD_REGNO_MODE_OK (regno, mode))
288	      continue;
289
290	    REGNO (reg) = regno;
291
292	    SET_SRC (pat) = mem;
293	    SET_DEST (pat) = reg;
294	    if (recog (pat, insn, &num_clobbers) >= 0)
295	      direct_load[(int) mode] = 1;
296
297	    SET_SRC (pat) = mem1;
298	    SET_DEST (pat) = reg;
299	    if (recog (pat, insn, &num_clobbers) >= 0)
300	      direct_load[(int) mode] = 1;
301
302	    SET_SRC (pat) = reg;
303	    SET_DEST (pat) = mem;
304	    if (recog (pat, insn, &num_clobbers) >= 0)
305	      direct_store[(int) mode] = 1;
306
307	    SET_SRC (pat) = reg;
308	    SET_DEST (pat) = mem1;
309	    if (recog (pat, insn, &num_clobbers) >= 0)
310	      direct_store[(int) mode] = 1;
311	  }
312    }
313
314  mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
315
316  for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
317       mode = GET_MODE_WIDER_MODE (mode))
318    {
319      enum machine_mode srcmode;
320      for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
321	   srcmode = GET_MODE_WIDER_MODE (srcmode))
322	{
323	  enum insn_code ic;
324
325	  ic = can_extend_p (mode, srcmode, 0);
326	  if (ic == CODE_FOR_nothing)
327	    continue;
328
329	  PUT_MODE (mem, srcmode);
330
331	  if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
332	    float_extend_from_mem[mode][srcmode] = true;
333	}
334    }
335}
336
337/* This is run at the start of compiling a function.  */
338
339void
340init_expr ()
341{
342  cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
343
344  pending_chain = 0;
345  pending_stack_adjust = 0;
346  stack_pointer_delta = 0;
347  inhibit_defer_pop = 0;
348  saveregs_value = 0;
349  apply_args_value = 0;
350  forced_labels = 0;
351}
352
353/* Small sanity check that the queue is empty at the end of a function.  */
354
355void
356finish_expr_for_function ()
357{
358  if (pending_chain)
359    abort ();
360}
361
362/* Manage the queue of increment instructions to be output
363   for POSTINCREMENT_EXPR expressions, etc.  */
364
365/* Queue up to increment (or change) VAR later.  BODY says how:
366   BODY should be the same thing you would pass to emit_insn
367   to increment right away.  It will go to emit_insn later on.
368
369   The value is a QUEUED expression to be used in place of VAR
370   where you want to guarantee the pre-incrementation value of VAR.  */
371
372static rtx
373enqueue_insn (var, body)
374     rtx var, body;
375{
376  pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
377				  body, pending_chain);
378  return pending_chain;
379}
380
381/* Use protect_from_queue to convert a QUEUED expression
382   into something that you can put immediately into an instruction.
383   If the queued incrementation has not happened yet,
384   protect_from_queue returns the variable itself.
385   If the incrementation has happened, protect_from_queue returns a temp
386   that contains a copy of the old value of the variable.
387
388   Any time an rtx which might possibly be a QUEUED is to be put
389   into an instruction, it must be passed through protect_from_queue first.
390   QUEUED expressions are not meaningful in instructions.
391
392   Do not pass a value through protect_from_queue and then hold
393   on to it for a while before putting it in an instruction!
394   If the queue is flushed in between, incorrect code will result.  */
395
396rtx
397protect_from_queue (x, modify)
398     rtx x;
399     int modify;
400{
401  RTX_CODE code = GET_CODE (x);
402
403#if 0  /* A QUEUED can hang around after the queue is forced out.  */
404  /* Shortcut for most common case.  */
405  if (pending_chain == 0)
406    return x;
407#endif
408
409  if (code != QUEUED)
410    {
411      /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
412	 use of autoincrement.  Make a copy of the contents of the memory
413	 location rather than a copy of the address, but not if the value is
414	 of mode BLKmode.  Don't modify X in place since it might be
415	 shared.  */
416      if (code == MEM && GET_MODE (x) != BLKmode
417	  && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
418	{
419	  rtx y = XEXP (x, 0);
420	  rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
421
422	  if (QUEUED_INSN (y))
423	    {
424	      rtx temp = gen_reg_rtx (GET_MODE (x));
425
426	      emit_insn_before (gen_move_insn (temp, new),
427				QUEUED_INSN (y));
428	      return temp;
429	    }
430
431	  /* Copy the address into a pseudo, so that the returned value
432	     remains correct across calls to emit_queue.  */
433	  return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
434	}
435
436      /* Otherwise, recursively protect the subexpressions of all
437	 the kinds of rtx's that can contain a QUEUED.  */
438      if (code == MEM)
439	{
440	  rtx tem = protect_from_queue (XEXP (x, 0), 0);
441	  if (tem != XEXP (x, 0))
442	    {
443	      x = copy_rtx (x);
444	      XEXP (x, 0) = tem;
445	    }
446	}
447      else if (code == PLUS || code == MULT)
448	{
449	  rtx new0 = protect_from_queue (XEXP (x, 0), 0);
450	  rtx new1 = protect_from_queue (XEXP (x, 1), 0);
451	  if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
452	    {
453	      x = copy_rtx (x);
454	      XEXP (x, 0) = new0;
455	      XEXP (x, 1) = new1;
456	    }
457	}
458      return x;
459    }
460  /* If the increment has not happened, use the variable itself.  Copy it
461     into a new pseudo so that the value remains correct across calls to
462     emit_queue.  */
463  if (QUEUED_INSN (x) == 0)
464    return copy_to_reg (QUEUED_VAR (x));
465  /* If the increment has happened and a pre-increment copy exists,
466     use that copy.  */
467  if (QUEUED_COPY (x) != 0)
468    return QUEUED_COPY (x);
469  /* The increment has happened but we haven't set up a pre-increment copy.
470     Set one up now, and use it.  */
471  QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
472  emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
473		    QUEUED_INSN (x));
474  return QUEUED_COPY (x);
475}
476
477/* Return nonzero if X contains a QUEUED expression:
478   if it contains anything that will be altered by a queued increment.
479   We handle only combinations of MEM, PLUS, MINUS and MULT operators
480   since memory addresses generally contain only those.  */
481
482int
483queued_subexp_p (x)
484     rtx x;
485{
486  enum rtx_code code = GET_CODE (x);
487  switch (code)
488    {
489    case QUEUED:
490      return 1;
491    case MEM:
492      return queued_subexp_p (XEXP (x, 0));
493    case MULT:
494    case PLUS:
495    case MINUS:
496      return (queued_subexp_p (XEXP (x, 0))
497	      || queued_subexp_p (XEXP (x, 1)));
498    default:
499      return 0;
500    }
501}
502
503/* Perform all the pending incrementations.  */
504
505void
506emit_queue ()
507{
508  rtx p;
509  while ((p = pending_chain))
510    {
511      rtx body = QUEUED_BODY (p);
512
513      switch (GET_CODE (body))
514	{
515	case INSN:
516	case JUMP_INSN:
517	case CALL_INSN:
518	case CODE_LABEL:
519	case BARRIER:
520	case NOTE:
521	  QUEUED_INSN (p) = body;
522	  emit_insn (body);
523	  break;
524
525#ifdef ENABLE_CHECKING
526	case SEQUENCE:
527	  abort ();
528	  break;
529#endif
530
531	default:
532	  QUEUED_INSN (p) = emit_insn (body);
533	  break;
534	}
535
536      pending_chain = QUEUED_NEXT (p);
537    }
538}
539
540/* Copy data from FROM to TO, where the machine modes are not the same.
541   Both modes may be integer, or both may be floating.
542   UNSIGNEDP should be nonzero if FROM is an unsigned type.
543   This causes zero-extension instead of sign-extension.  */
544
545void
546convert_move (to, from, unsignedp)
547     rtx to, from;
548     int unsignedp;
549{
550  enum machine_mode to_mode = GET_MODE (to);
551  enum machine_mode from_mode = GET_MODE (from);
552  int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
553  int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
554  enum insn_code code;
555  rtx libcall;
556
557  /* rtx code for making an equivalent value.  */
558  enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
559			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
560
561  to = protect_from_queue (to, 1);
562  from = protect_from_queue (from, 0);
563
564  if (to_real != from_real)
565    abort ();
566
567  /* If FROM is a SUBREG that indicates that we have already done at least
568     the required extension, strip it.  We don't handle such SUBREGs as
569     TO here.  */
570
571  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
572      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
573	  >= GET_MODE_SIZE (to_mode))
574      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
575    from = gen_lowpart (to_mode, from), from_mode = to_mode;
576
577  if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
578    abort ();
579
580  if (to_mode == from_mode
581      || (from_mode == VOIDmode && CONSTANT_P (from)))
582    {
583      emit_move_insn (to, from);
584      return;
585    }
586
587  if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
588    {
589      if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
590	abort ();
591
592      if (VECTOR_MODE_P (to_mode))
593	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
594      else
595	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
596
597      emit_move_insn (to, from);
598      return;
599    }
600
601  if (to_real != from_real)
602    abort ();
603
604  if (to_real)
605    {
606      rtx value, insns;
607
608      if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
609	{
610	  /* Try converting directly if the insn is supported.  */
611	  if ((code = can_extend_p (to_mode, from_mode, 0))
612	      != CODE_FOR_nothing)
613	    {
614	      emit_unop_insn (code, to, from, UNKNOWN);
615	      return;
616	    }
617	}
618
619#ifdef HAVE_trunchfqf2
620      if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
621	{
622	  emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
623	  return;
624	}
625#endif
626#ifdef HAVE_trunctqfqf2
627      if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
628	{
629	  emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
630	  return;
631	}
632#endif
633#ifdef HAVE_truncsfqf2
634      if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
635	{
636	  emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
637	  return;
638	}
639#endif
640#ifdef HAVE_truncdfqf2
641      if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
642	{
643	  emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
644	  return;
645	}
646#endif
647#ifdef HAVE_truncxfqf2
648      if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
649	{
650	  emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
651	  return;
652	}
653#endif
654#ifdef HAVE_trunctfqf2
655      if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
656	{
657	  emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
658	  return;
659	}
660#endif
661
662#ifdef HAVE_trunctqfhf2
663      if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
664	{
665	  emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
666	  return;
667	}
668#endif
669#ifdef HAVE_truncsfhf2
670      if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
671	{
672	  emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
673	  return;
674	}
675#endif
676#ifdef HAVE_truncdfhf2
677      if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
678	{
679	  emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
680	  return;
681	}
682#endif
683#ifdef HAVE_truncxfhf2
684      if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
685	{
686	  emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
687	  return;
688	}
689#endif
690#ifdef HAVE_trunctfhf2
691      if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
692	{
693	  emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
694	  return;
695	}
696#endif
697
698#ifdef HAVE_truncsftqf2
699      if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
700	{
701	  emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
702	  return;
703	}
704#endif
705#ifdef HAVE_truncdftqf2
706      if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
707	{
708	  emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
709	  return;
710	}
711#endif
712#ifdef HAVE_truncxftqf2
713      if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
714	{
715	  emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
716	  return;
717	}
718#endif
719#ifdef HAVE_trunctftqf2
720      if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
721	{
722	  emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
723	  return;
724	}
725#endif
726
727#ifdef HAVE_truncdfsf2
728      if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
729	{
730	  emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
731	  return;
732	}
733#endif
734#ifdef HAVE_truncxfsf2
735      if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
736	{
737	  emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
738	  return;
739	}
740#endif
741#ifdef HAVE_trunctfsf2
742      if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
743	{
744	  emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
745	  return;
746	}
747#endif
748#ifdef HAVE_truncxfdf2
749      if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
750	{
751	  emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
752	  return;
753	}
754#endif
755#ifdef HAVE_trunctfdf2
756      if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
757	{
758	  emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
759	  return;
760	}
761#endif
762
763      libcall = (rtx) 0;
764      switch (from_mode)
765	{
766	case SFmode:
767	  switch (to_mode)
768	    {
769	    case DFmode:
770	      libcall = extendsfdf2_libfunc;
771	      break;
772
773	    case XFmode:
774	      libcall = extendsfxf2_libfunc;
775	      break;
776
777	    case TFmode:
778	      libcall = extendsftf2_libfunc;
779	      break;
780
781	    default:
782	      break;
783	    }
784	  break;
785
786	case DFmode:
787	  switch (to_mode)
788	    {
789	    case SFmode:
790	      libcall = truncdfsf2_libfunc;
791	      break;
792
793	    case XFmode:
794	      libcall = extenddfxf2_libfunc;
795	      break;
796
797	    case TFmode:
798	      libcall = extenddftf2_libfunc;
799	      break;
800
801	    default:
802	      break;
803	    }
804	  break;
805
806	case XFmode:
807	  switch (to_mode)
808	    {
809	    case SFmode:
810	      libcall = truncxfsf2_libfunc;
811	      break;
812
813	    case DFmode:
814	      libcall = truncxfdf2_libfunc;
815	      break;
816
817	    default:
818	      break;
819	    }
820	  break;
821
822	case TFmode:
823	  switch (to_mode)
824	    {
825	    case SFmode:
826	      libcall = trunctfsf2_libfunc;
827	      break;
828
829	    case DFmode:
830	      libcall = trunctfdf2_libfunc;
831	      break;
832
833	    default:
834	      break;
835	    }
836	  break;
837
838	default:
839	  break;
840	}
841
842      if (libcall == (rtx) 0)
843	/* This conversion is not implemented yet.  */
844	abort ();
845
846      start_sequence ();
847      value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
848				       1, from, from_mode);
849      insns = get_insns ();
850      end_sequence ();
851      emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
852								    from));
853      return;
854    }
855
856  /* Now both modes are integers.  */
857
858  /* Handle expanding beyond a word.  */
859  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
860      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
861    {
862      rtx insns;
863      rtx lowpart;
864      rtx fill_value;
865      rtx lowfrom;
866      int i;
867      enum machine_mode lowpart_mode;
868      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
869
870      /* Try converting directly if the insn is supported.  */
871      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
872	  != CODE_FOR_nothing)
873	{
874	  /* If FROM is a SUBREG, put it into a register.  Do this
875	     so that we always generate the same set of insns for
876	     better cse'ing; if an intermediate assignment occurred,
877	     we won't be doing the operation directly on the SUBREG.  */
878	  if (optimize > 0 && GET_CODE (from) == SUBREG)
879	    from = force_reg (from_mode, from);
880	  emit_unop_insn (code, to, from, equiv_code);
881	  return;
882	}
883      /* Next, try converting via full word.  */
884      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
885	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
886		   != CODE_FOR_nothing))
887	{
888	  if (GET_CODE (to) == REG)
889	    emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
890	  convert_move (gen_lowpart (word_mode, to), from, unsignedp);
891	  emit_unop_insn (code, to,
892			  gen_lowpart (word_mode, to), equiv_code);
893	  return;
894	}
895
896      /* No special multiword conversion insn; do it by hand.  */
897      start_sequence ();
898
899      /* Since we will turn this into a no conflict block, we must ensure
900	 that the source does not overlap the target.  */
901
902      if (reg_overlap_mentioned_p (to, from))
903	from = force_reg (from_mode, from);
904
905      /* Get a copy of FROM widened to a word, if necessary.  */
906      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
907	lowpart_mode = word_mode;
908      else
909	lowpart_mode = from_mode;
910
911      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
912
913      lowpart = gen_lowpart (lowpart_mode, to);
914      emit_move_insn (lowpart, lowfrom);
915
916      /* Compute the value to put in each remaining word.  */
917      if (unsignedp)
918	fill_value = const0_rtx;
919      else
920	{
921#ifdef HAVE_slt
922	  if (HAVE_slt
923	      && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
924	      && STORE_FLAG_VALUE == -1)
925	    {
926	      emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
927			     lowpart_mode, 0);
928	      fill_value = gen_reg_rtx (word_mode);
929	      emit_insn (gen_slt (fill_value));
930	    }
931	  else
932#endif
933	    {
934	      fill_value
935		= expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
936				size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
937				NULL_RTX, 0);
938	      fill_value = convert_to_mode (word_mode, fill_value, 1);
939	    }
940	}
941
942      /* Fill the remaining words.  */
943      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
944	{
945	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
946	  rtx subword = operand_subword (to, index, 1, to_mode);
947
948	  if (subword == 0)
949	    abort ();
950
951	  if (fill_value != subword)
952	    emit_move_insn (subword, fill_value);
953	}
954
955      insns = get_insns ();
956      end_sequence ();
957
958      emit_no_conflict_block (insns, to, from, NULL_RTX,
959			      gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
960      return;
961    }
962
963  /* Truncating multi-word to a word or less.  */
964  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
965      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
966    {
967      if (!((GET_CODE (from) == MEM
968	     && ! MEM_VOLATILE_P (from)
969	     && direct_load[(int) to_mode]
970	     && ! mode_dependent_address_p (XEXP (from, 0)))
971	    || GET_CODE (from) == REG
972	    || GET_CODE (from) == SUBREG))
973	from = force_reg (from_mode, from);
974      convert_move (to, gen_lowpart (word_mode, from), 0);
975      return;
976    }
977
978  /* Handle pointer conversion.  */			/* SPEE 900220.  */
979  if (to_mode == PQImode)
980    {
981      if (from_mode != QImode)
982	from = convert_to_mode (QImode, from, unsignedp);
983
984#ifdef HAVE_truncqipqi2
985      if (HAVE_truncqipqi2)
986	{
987	  emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
988	  return;
989	}
990#endif /* HAVE_truncqipqi2 */
991      abort ();
992    }
993
994  if (from_mode == PQImode)
995    {
996      if (to_mode != QImode)
997	{
998	  from = convert_to_mode (QImode, from, unsignedp);
999	  from_mode = QImode;
1000	}
1001      else
1002	{
1003#ifdef HAVE_extendpqiqi2
1004	  if (HAVE_extendpqiqi2)
1005	    {
1006	      emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1007	      return;
1008	    }
1009#endif /* HAVE_extendpqiqi2 */
1010	  abort ();
1011	}
1012    }
1013
1014  if (to_mode == PSImode)
1015    {
1016      if (from_mode != SImode)
1017	from = convert_to_mode (SImode, from, unsignedp);
1018
1019#ifdef HAVE_truncsipsi2
1020      if (HAVE_truncsipsi2)
1021	{
1022	  emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1023	  return;
1024	}
1025#endif /* HAVE_truncsipsi2 */
1026      abort ();
1027    }
1028
1029  if (from_mode == PSImode)
1030    {
1031      if (to_mode != SImode)
1032	{
1033	  from = convert_to_mode (SImode, from, unsignedp);
1034	  from_mode = SImode;
1035	}
1036      else
1037	{
1038#ifdef HAVE_extendpsisi2
1039	  if (! unsignedp && HAVE_extendpsisi2)
1040	    {
1041	      emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1042	      return;
1043	    }
1044#endif /* HAVE_extendpsisi2 */
1045#ifdef HAVE_zero_extendpsisi2
1046	  if (unsignedp && HAVE_zero_extendpsisi2)
1047	    {
1048	      emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1049	      return;
1050	    }
1051#endif /* HAVE_zero_extendpsisi2 */
1052	  abort ();
1053	}
1054    }
1055
1056  if (to_mode == PDImode)
1057    {
1058      if (from_mode != DImode)
1059	from = convert_to_mode (DImode, from, unsignedp);
1060
1061#ifdef HAVE_truncdipdi2
1062      if (HAVE_truncdipdi2)
1063	{
1064	  emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1065	  return;
1066	}
1067#endif /* HAVE_truncdipdi2 */
1068      abort ();
1069    }
1070
1071  if (from_mode == PDImode)
1072    {
1073      if (to_mode != DImode)
1074	{
1075	  from = convert_to_mode (DImode, from, unsignedp);
1076	  from_mode = DImode;
1077	}
1078      else
1079	{
1080#ifdef HAVE_extendpdidi2
1081	  if (HAVE_extendpdidi2)
1082	    {
1083	      emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1084	      return;
1085	    }
1086#endif /* HAVE_extendpdidi2 */
1087	  abort ();
1088	}
1089    }
1090
1091  /* Now follow all the conversions between integers
1092     no more than a word long.  */
1093
1094  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
1095  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1096      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1097				GET_MODE_BITSIZE (from_mode)))
1098    {
1099      if (!((GET_CODE (from) == MEM
1100	     && ! MEM_VOLATILE_P (from)
1101	     && direct_load[(int) to_mode]
1102	     && ! mode_dependent_address_p (XEXP (from, 0)))
1103	    || GET_CODE (from) == REG
1104	    || GET_CODE (from) == SUBREG))
1105	from = force_reg (from_mode, from);
1106      if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1107	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1108	from = copy_to_reg (from);
1109      emit_move_insn (to, gen_lowpart (to_mode, from));
1110      return;
1111    }
1112
1113  /* Handle extension.  */
1114  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1115    {
1116      /* Convert directly if that works.  */
1117      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1118	  != CODE_FOR_nothing)
1119	{
1120	  if (flag_force_mem)
1121	    from = force_not_mem (from);
1122
1123	  emit_unop_insn (code, to, from, equiv_code);
1124	  return;
1125	}
1126      else
1127	{
1128	  enum machine_mode intermediate;
1129	  rtx tmp;
1130	  tree shift_amount;
1131
1132	  /* Search for a mode to convert via.  */
1133	  for (intermediate = from_mode; intermediate != VOIDmode;
1134	       intermediate = GET_MODE_WIDER_MODE (intermediate))
1135	    if (((can_extend_p (to_mode, intermediate, unsignedp)
1136		  != CODE_FOR_nothing)
1137		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1138		     && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1139					       GET_MODE_BITSIZE (intermediate))))
1140		&& (can_extend_p (intermediate, from_mode, unsignedp)
1141		    != CODE_FOR_nothing))
1142	      {
1143		convert_move (to, convert_to_mode (intermediate, from,
1144						   unsignedp), unsignedp);
1145		return;
1146	      }
1147
1148	  /* No suitable intermediate mode.
1149	     Generate what we need with	shifts.  */
1150	  shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1151				      - GET_MODE_BITSIZE (from_mode), 0);
1152	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
1153	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1154			      to, unsignedp);
1155	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1156			      to, unsignedp);
1157	  if (tmp != to)
1158	    emit_move_insn (to, tmp);
1159	  return;
1160	}
1161    }
1162
1163  /* Support special truncate insns for certain modes.  */
1164
1165  if (from_mode == DImode && to_mode == SImode)
1166    {
1167#ifdef HAVE_truncdisi2
1168      if (HAVE_truncdisi2)
1169	{
1170	  emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1171	  return;
1172	}
1173#endif
1174      convert_move (to, force_reg (from_mode, from), unsignedp);
1175      return;
1176    }
1177
1178  if (from_mode == DImode && to_mode == HImode)
1179    {
1180#ifdef HAVE_truncdihi2
1181      if (HAVE_truncdihi2)
1182	{
1183	  emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1184	  return;
1185	}
1186#endif
1187      convert_move (to, force_reg (from_mode, from), unsignedp);
1188      return;
1189    }
1190
1191  if (from_mode == DImode && to_mode == QImode)
1192    {
1193#ifdef HAVE_truncdiqi2
1194      if (HAVE_truncdiqi2)
1195	{
1196	  emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1197	  return;
1198	}
1199#endif
1200      convert_move (to, force_reg (from_mode, from), unsignedp);
1201      return;
1202    }
1203
1204  if (from_mode == SImode && to_mode == HImode)
1205    {
1206#ifdef HAVE_truncsihi2
1207      if (HAVE_truncsihi2)
1208	{
1209	  emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1210	  return;
1211	}
1212#endif
1213      convert_move (to, force_reg (from_mode, from), unsignedp);
1214      return;
1215    }
1216
1217  if (from_mode == SImode && to_mode == QImode)
1218    {
1219#ifdef HAVE_truncsiqi2
1220      if (HAVE_truncsiqi2)
1221	{
1222	  emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1223	  return;
1224	}
1225#endif
1226      convert_move (to, force_reg (from_mode, from), unsignedp);
1227      return;
1228    }
1229
1230  if (from_mode == HImode && to_mode == QImode)
1231    {
1232#ifdef HAVE_trunchiqi2
1233      if (HAVE_trunchiqi2)
1234	{
1235	  emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1236	  return;
1237	}
1238#endif
1239      convert_move (to, force_reg (from_mode, from), unsignedp);
1240      return;
1241    }
1242
1243  if (from_mode == TImode && to_mode == DImode)
1244    {
1245#ifdef HAVE_trunctidi2
1246      if (HAVE_trunctidi2)
1247	{
1248	  emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1249	  return;
1250	}
1251#endif
1252      convert_move (to, force_reg (from_mode, from), unsignedp);
1253      return;
1254    }
1255
1256  if (from_mode == TImode && to_mode == SImode)
1257    {
1258#ifdef HAVE_trunctisi2
1259      if (HAVE_trunctisi2)
1260	{
1261	  emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1262	  return;
1263	}
1264#endif
1265      convert_move (to, force_reg (from_mode, from), unsignedp);
1266      return;
1267    }
1268
1269  if (from_mode == TImode && to_mode == HImode)
1270    {
1271#ifdef HAVE_trunctihi2
1272      if (HAVE_trunctihi2)
1273	{
1274	  emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1275	  return;
1276	}
1277#endif
1278      convert_move (to, force_reg (from_mode, from), unsignedp);
1279      return;
1280    }
1281
1282  if (from_mode == TImode && to_mode == QImode)
1283    {
1284#ifdef HAVE_trunctiqi2
1285      if (HAVE_trunctiqi2)
1286	{
1287	  emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1288	  return;
1289	}
1290#endif
1291      convert_move (to, force_reg (from_mode, from), unsignedp);
1292      return;
1293    }
1294
1295  /* Handle truncation of volatile memrefs, and so on;
1296     the things that couldn't be truncated directly,
1297     and for which there was no special instruction.  */
1298  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1299    {
1300      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1301      emit_move_insn (to, temp);
1302      return;
1303    }
1304
1305  /* Mode combination is not recognized.  */
1306  abort ();
1307}
1308
1309/* Return an rtx for a value that would result
1310   from converting X to mode MODE.
1311   Both X and MODE may be floating, or both integer.
1312   UNSIGNEDP is nonzero if X is an unsigned value.
1313   This can be done by referring to a part of X in place
1314   or by copying to a new temporary with conversion.
1315
1316   This function *must not* call protect_from_queue
1317   except when putting X into an insn (in which case convert_move does it).  */
1318
1319rtx
1320convert_to_mode (mode, x, unsignedp)
1321     enum machine_mode mode;
1322     rtx x;
1323     int unsignedp;
1324{
1325  return convert_modes (mode, VOIDmode, x, unsignedp);
1326}
1327
1328/* Return an rtx for a value that would result
1329   from converting X from mode OLDMODE to mode MODE.
1330   Both modes may be floating, or both integer.
1331   UNSIGNEDP is nonzero if X is an unsigned value.
1332
1333   This can be done by referring to a part of X in place
1334   or by copying to a new temporary with conversion.
1335
1336   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1337
1338   This function *must not* call protect_from_queue
1339   except when putting X into an insn (in which case convert_move does it).  */
1340
1341rtx
1342convert_modes (mode, oldmode, x, unsignedp)
1343     enum machine_mode mode, oldmode;
1344     rtx x;
1345     int unsignedp;
1346{
1347  rtx temp;
1348
1349  /* If FROM is a SUBREG that indicates that we have already done at least
1350     the required extension, strip it.  */
1351
1352  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1353      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1354      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1355    x = gen_lowpart (mode, x);
1356
1357  if (GET_MODE (x) != VOIDmode)
1358    oldmode = GET_MODE (x);
1359
1360  if (mode == oldmode)
1361    return x;
1362
1363  /* There is one case that we must handle specially: If we are converting
1364     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1365     we are to interpret the constant as unsigned, gen_lowpart will do
1366     the wrong if the constant appears negative.  What we want to do is
1367     make the high-order word of the constant zero, not all ones.  */
1368
1369  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1370      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1371      && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1372    {
1373      HOST_WIDE_INT val = INTVAL (x);
1374
1375      if (oldmode != VOIDmode
1376	  && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1377	{
1378	  int width = GET_MODE_BITSIZE (oldmode);
1379
1380	  /* We need to zero extend VAL.  */
1381	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1382	}
1383
1384      return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1385    }
1386
1387  /* We can do this with a gen_lowpart if both desired and current modes
1388     are integer, and this is either a constant integer, a register, or a
1389     non-volatile MEM.  Except for the constant case where MODE is no
1390     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
1391
1392  if ((GET_CODE (x) == CONST_INT
1393       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1394      || (GET_MODE_CLASS (mode) == MODE_INT
1395	  && GET_MODE_CLASS (oldmode) == MODE_INT
1396	  && (GET_CODE (x) == CONST_DOUBLE
1397	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1398		  && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1399		       && direct_load[(int) mode])
1400		      || (GET_CODE (x) == REG
1401			  && (! HARD_REGISTER_P (x)
1402			      || HARD_REGNO_MODE_OK (REGNO (x), mode))
1403			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1404						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
1405    {
1406      /* ?? If we don't know OLDMODE, we have to assume here that
1407	 X does not need sign- or zero-extension.   This may not be
1408	 the case, but it's the best we can do.  */
1409      if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1410	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1411	{
1412	  HOST_WIDE_INT val = INTVAL (x);
1413	  int width = GET_MODE_BITSIZE (oldmode);
1414
1415	  /* We must sign or zero-extend in this case.  Start by
1416	     zero-extending, then sign extend if we need to.  */
1417	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1418	  if (! unsignedp
1419	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1420	    val |= (HOST_WIDE_INT) (-1) << width;
1421
1422	  return gen_int_mode (val, mode);
1423	}
1424
1425      return gen_lowpart (mode, x);
1426    }
1427
1428  temp = gen_reg_rtx (mode);
1429  convert_move (temp, x, unsignedp);
1430  return temp;
1431}
1432
1433/* This macro is used to determine what the largest unit size that
1434   move_by_pieces can use is.  */
1435
1436/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1437   move efficiently, as opposed to  MOVE_MAX which is the maximum
1438   number of bytes we can move with a single instruction.  */
1439
1440#ifndef MOVE_MAX_PIECES
1441#define MOVE_MAX_PIECES   MOVE_MAX
1442#endif
1443
1444/* STORE_MAX_PIECES is the number of bytes at a time that we can
1445   store efficiently.  Due to internal GCC limitations, this is
1446   MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1447   for an immediate constant.  */
1448
1449#define STORE_MAX_PIECES  MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1450
1451/* Generate several move instructions to copy LEN bytes from block FROM to
1452   block TO.  (These are MEM rtx's with BLKmode).  The caller must pass FROM
1453   and TO through protect_from_queue before calling.
1454
1455   If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1456   used to push FROM to the stack.
1457
1458   ALIGN is maximum alignment we can assume.  */
1459
1460void
1461move_by_pieces (to, from, len, align)
1462     rtx to, from;
1463     unsigned HOST_WIDE_INT len;
1464     unsigned int align;
1465{
1466  struct move_by_pieces data;
1467  rtx to_addr, from_addr = XEXP (from, 0);
1468  unsigned int max_size = MOVE_MAX_PIECES + 1;
1469  enum machine_mode mode = VOIDmode, tmode;
1470  enum insn_code icode;
1471
1472  data.offset = 0;
1473  data.from_addr = from_addr;
1474  if (to)
1475    {
1476      to_addr = XEXP (to, 0);
1477      data.to = to;
1478      data.autinc_to
1479	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1480	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1481      data.reverse
1482	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1483    }
1484  else
1485    {
1486      to_addr = NULL_RTX;
1487      data.to = NULL_RTX;
1488      data.autinc_to = 1;
1489#ifdef STACK_GROWS_DOWNWARD
1490      data.reverse = 1;
1491#else
1492      data.reverse = 0;
1493#endif
1494    }
1495  data.to_addr = to_addr;
1496  data.from = from;
1497  data.autinc_from
1498    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1499       || GET_CODE (from_addr) == POST_INC
1500       || GET_CODE (from_addr) == POST_DEC);
1501
1502  data.explicit_inc_from = 0;
1503  data.explicit_inc_to = 0;
1504  if (data.reverse) data.offset = len;
1505  data.len = len;
1506
1507  /* If copying requires more than two move insns,
1508     copy addresses to registers (to make displacements shorter)
1509     and use post-increment if available.  */
1510  if (!(data.autinc_from && data.autinc_to)
1511      && move_by_pieces_ninsns (len, align) > 2)
1512    {
1513      /* Find the mode of the largest move...  */
1514      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1515	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1516	if (GET_MODE_SIZE (tmode) < max_size)
1517	  mode = tmode;
1518
1519      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1520	{
1521	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1522	  data.autinc_from = 1;
1523	  data.explicit_inc_from = -1;
1524	}
1525      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1526	{
1527	  data.from_addr = copy_addr_to_reg (from_addr);
1528	  data.autinc_from = 1;
1529	  data.explicit_inc_from = 1;
1530	}
1531      if (!data.autinc_from && CONSTANT_P (from_addr))
1532	data.from_addr = copy_addr_to_reg (from_addr);
1533      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1534	{
1535	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1536	  data.autinc_to = 1;
1537	  data.explicit_inc_to = -1;
1538	}
1539      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1540	{
1541	  data.to_addr = copy_addr_to_reg (to_addr);
1542	  data.autinc_to = 1;
1543	  data.explicit_inc_to = 1;
1544	}
1545      if (!data.autinc_to && CONSTANT_P (to_addr))
1546	data.to_addr = copy_addr_to_reg (to_addr);
1547    }
1548
1549  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1550      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1551    align = MOVE_MAX * BITS_PER_UNIT;
1552
1553  /* First move what we can in the largest integer mode, then go to
1554     successively smaller modes.  */
1555
1556  while (max_size > 1)
1557    {
1558      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1559	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1560	if (GET_MODE_SIZE (tmode) < max_size)
1561	  mode = tmode;
1562
1563      if (mode == VOIDmode)
1564	break;
1565
1566      icode = mov_optab->handlers[(int) mode].insn_code;
1567      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1568	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1569
1570      max_size = GET_MODE_SIZE (mode);
1571    }
1572
1573  /* The code above should have handled everything.  */
1574  if (data.len > 0)
1575    abort ();
1576}
1577
1578/* Return number of insns required to move L bytes by pieces.
1579   ALIGN (in bits) is maximum alignment we can assume.  */
1580
1581static unsigned HOST_WIDE_INT
1582move_by_pieces_ninsns (l, align)
1583     unsigned HOST_WIDE_INT l;
1584     unsigned int align;
1585{
1586  unsigned HOST_WIDE_INT n_insns = 0;
1587  unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1588
1589  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1590      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1591    align = MOVE_MAX * BITS_PER_UNIT;
1592
1593  while (max_size > 1)
1594    {
1595      enum machine_mode mode = VOIDmode, tmode;
1596      enum insn_code icode;
1597
1598      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1599	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1600	if (GET_MODE_SIZE (tmode) < max_size)
1601	  mode = tmode;
1602
1603      if (mode == VOIDmode)
1604	break;
1605
1606      icode = mov_optab->handlers[(int) mode].insn_code;
1607      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1608	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1609
1610      max_size = GET_MODE_SIZE (mode);
1611    }
1612
1613  if (l)
1614    abort ();
1615  return n_insns;
1616}
1617
1618/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1619   with move instructions for mode MODE.  GENFUN is the gen_... function
1620   to make a move insn for that mode.  DATA has all the other info.  */
1621
1622static void
1623move_by_pieces_1 (genfun, mode, data)
1624     rtx (*genfun) PARAMS ((rtx, ...));
1625     enum machine_mode mode;
1626     struct move_by_pieces *data;
1627{
1628  unsigned int size = GET_MODE_SIZE (mode);
1629  rtx to1 = NULL_RTX, from1;
1630
1631  while (data->len >= size)
1632    {
1633      if (data->reverse)
1634	data->offset -= size;
1635
1636      if (data->to)
1637	{
1638	  if (data->autinc_to)
1639	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1640					     data->offset);
1641	  else
1642	    to1 = adjust_address (data->to, mode, data->offset);
1643	}
1644
1645      if (data->autinc_from)
1646	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1647					   data->offset);
1648      else
1649	from1 = adjust_address (data->from, mode, data->offset);
1650
1651      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1652	emit_insn (gen_add2_insn (data->to_addr,
1653				  GEN_INT (-(HOST_WIDE_INT)size)));
1654      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1655	emit_insn (gen_add2_insn (data->from_addr,
1656				  GEN_INT (-(HOST_WIDE_INT)size)));
1657
1658      if (data->to)
1659	emit_insn ((*genfun) (to1, from1));
1660      else
1661	{
1662#ifdef PUSH_ROUNDING
1663	  emit_single_push_insn (mode, from1, NULL);
1664#else
1665	  abort ();
1666#endif
1667	}
1668
1669      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1670	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1671      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1672	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1673
1674      if (! data->reverse)
1675	data->offset += size;
1676
1677      data->len -= size;
1678    }
1679}
1680
1681/* Emit code to move a block Y to a block X.  This may be done with
1682   string-move instructions, with multiple scalar move instructions,
1683   or with a library call.
1684
1685   Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1686   SIZE is an rtx that says how long they are.
1687   ALIGN is the maximum alignment we can assume they have.
1688   METHOD describes what kind of copy this is, and what mechanisms may be used.
1689
1690   Return the address of the new block, if memcpy is called and returns it,
1691   0 otherwise.  */
1692
1693rtx
1694emit_block_move (x, y, size, method)
1695     rtx x, y, size;
1696     enum block_op_methods method;
1697{
1698  bool may_use_call;
1699  rtx retval = 0;
1700  unsigned int align;
1701
1702  switch (method)
1703    {
1704    case BLOCK_OP_NORMAL:
1705      may_use_call = true;
1706      break;
1707
1708    case BLOCK_OP_CALL_PARM:
1709      may_use_call = block_move_libcall_safe_for_call_parm ();
1710
1711      /* Make inhibit_defer_pop nonzero around the library call
1712	 to force it to pop the arguments right away.  */
1713      NO_DEFER_POP;
1714      break;
1715
1716    case BLOCK_OP_NO_LIBCALL:
1717      may_use_call = false;
1718      break;
1719
1720    default:
1721      abort ();
1722    }
1723
1724  align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1725
1726  if (GET_MODE (x) != BLKmode)
1727    abort ();
1728  if (GET_MODE (y) != BLKmode)
1729    abort ();
1730
1731  x = protect_from_queue (x, 1);
1732  y = protect_from_queue (y, 0);
1733  size = protect_from_queue (size, 0);
1734
1735  if (GET_CODE (x) != MEM)
1736    abort ();
1737  if (GET_CODE (y) != MEM)
1738    abort ();
1739  if (size == 0)
1740    abort ();
1741
1742  /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1743     can be incorrect is coming from __builtin_memcpy.  */
1744  if (GET_CODE (size) == CONST_INT)
1745    {
1746      x = shallow_copy_rtx (x);
1747      y = shallow_copy_rtx (y);
1748      set_mem_size (x, size);
1749      set_mem_size (y, size);
1750    }
1751
1752  if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1753    move_by_pieces (x, y, INTVAL (size), align);
1754  else if (emit_block_move_via_movstr (x, y, size, align))
1755    ;
1756  else if (may_use_call)
1757    retval = emit_block_move_via_libcall (x, y, size);
1758  else
1759    emit_block_move_via_loop (x, y, size, align);
1760
1761  if (method == BLOCK_OP_CALL_PARM)
1762    OK_DEFER_POP;
1763
1764  return retval;
1765}
1766
1767/* A subroutine of emit_block_move.  Returns true if calling the
1768   block move libcall will not clobber any parameters which may have
1769   already been placed on the stack.  */
1770
1771static bool
1772block_move_libcall_safe_for_call_parm ()
1773{
1774  if (PUSH_ARGS)
1775    return true;
1776  else
1777    {
1778      /* Check to see whether memcpy takes all register arguments.  */
1779      static enum {
1780	takes_regs_uninit, takes_regs_no, takes_regs_yes
1781      } takes_regs = takes_regs_uninit;
1782
1783      switch (takes_regs)
1784	{
1785	case takes_regs_uninit:
1786	  {
1787	    CUMULATIVE_ARGS args_so_far;
1788	    tree fn, arg;
1789
1790	    fn = emit_block_move_libcall_fn (false);
1791	    INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1792
1793	    arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1794	    for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1795	      {
1796		enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1797		rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1798		if (!tmp || !REG_P (tmp))
1799		  goto fail_takes_regs;
1800#ifdef FUNCTION_ARG_PARTIAL_NREGS
1801		if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1802						NULL_TREE, 1))
1803		  goto fail_takes_regs;
1804#endif
1805		FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1806	      }
1807	  }
1808	  takes_regs = takes_regs_yes;
1809	  /* FALLTHRU */
1810
1811	case takes_regs_yes:
1812	  return true;
1813
1814	fail_takes_regs:
1815	  takes_regs = takes_regs_no;
1816	  /* FALLTHRU */
1817	case takes_regs_no:
1818	  return false;
1819
1820	default:
1821	  abort ();
1822	}
1823    }
1824}
1825
1826/* A subroutine of emit_block_move.  Expand a movstr pattern;
1827   return true if successful.  */
1828
1829static bool
1830emit_block_move_via_movstr (x, y, size, align)
1831     rtx x, y, size;
1832     unsigned int align;
1833{
1834  /* Try the most limited insn first, because there's no point
1835     including more than one in the machine description unless
1836     the more limited one has some advantage.  */
1837
1838  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1839  enum machine_mode mode;
1840
1841  /* Since this is a move insn, we don't care about volatility.  */
1842  volatile_ok = 1;
1843
1844  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1845       mode = GET_MODE_WIDER_MODE (mode))
1846    {
1847      enum insn_code code = movstr_optab[(int) mode];
1848      insn_operand_predicate_fn pred;
1849
1850      if (code != CODE_FOR_nothing
1851	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1852	     here because if SIZE is less than the mode mask, as it is
1853	     returned by the macro, it will definitely be less than the
1854	     actual mode mask.  */
1855	  && ((GET_CODE (size) == CONST_INT
1856	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1857		   <= (GET_MODE_MASK (mode) >> 1)))
1858	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1859	  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1860	      || (*pred) (x, BLKmode))
1861	  && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1862	      || (*pred) (y, BLKmode))
1863	  && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1864	      || (*pred) (opalign, VOIDmode)))
1865	{
1866	  rtx op2;
1867	  rtx last = get_last_insn ();
1868	  rtx pat;
1869
1870	  op2 = convert_to_mode (mode, size, 1);
1871	  pred = insn_data[(int) code].operand[2].predicate;
1872	  if (pred != 0 && ! (*pred) (op2, mode))
1873	    op2 = copy_to_mode_reg (mode, op2);
1874
1875	  /* ??? When called via emit_block_move_for_call, it'd be
1876	     nice if there were some way to inform the backend, so
1877	     that it doesn't fail the expansion because it thinks
1878	     emitting the libcall would be more efficient.  */
1879
1880	  pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1881	  if (pat)
1882	    {
1883	      emit_insn (pat);
1884	      volatile_ok = 0;
1885	      return true;
1886	    }
1887	  else
1888	    delete_insns_since (last);
1889	}
1890    }
1891
1892  volatile_ok = 0;
1893  return false;
1894}
1895
1896/* A subroutine of emit_block_move.  Expand a call to memcpy or bcopy.
1897   Return the return value from memcpy, 0 otherwise.  */
1898
1899static rtx
1900emit_block_move_via_libcall (dst, src, size)
1901     rtx dst, src, size;
1902{
1903  tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1904  enum machine_mode size_mode;
1905  rtx retval;
1906
1907  /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1908
1909     It is unsafe to save the value generated by protect_from_queue
1910     and reuse it later.  Consider what happens if emit_queue is
1911     called before the return value from protect_from_queue is used.
1912
1913     Expansion of the CALL_EXPR below will call emit_queue before
1914     we are finished emitting RTL for argument setup.  So if we are
1915     not careful we could get the wrong value for an argument.
1916
1917     To avoid this problem we go ahead and emit code to copy X, Y &
1918     SIZE into new pseudos.  We can then place those new pseudos
1919     into an RTL_EXPR and use them later, even after a call to
1920     emit_queue.
1921
1922     Note this is not strictly needed for library calls since they
1923     do not call emit_queue before loading their arguments.  However,
1924     we may need to have library calls call emit_queue in the future
1925     since failing to do so could cause problems for targets which
1926     define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
1927
1928  dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1929  src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1930
1931  if (TARGET_MEM_FUNCTIONS)
1932    size_mode = TYPE_MODE (sizetype);
1933  else
1934    size_mode = TYPE_MODE (unsigned_type_node);
1935  size = convert_to_mode (size_mode, size, 1);
1936  size = copy_to_mode_reg (size_mode, size);
1937
1938  /* It is incorrect to use the libcall calling conventions to call
1939     memcpy in this context.  This could be a user call to memcpy and
1940     the user may wish to examine the return value from memcpy.  For
1941     targets where libcalls and normal calls have different conventions
1942     for returning pointers, we could end up generating incorrect code.
1943
1944     For convenience, we generate the call to bcopy this way as well.  */
1945
1946  dst_tree = make_tree (ptr_type_node, dst);
1947  src_tree = make_tree (ptr_type_node, src);
1948  if (TARGET_MEM_FUNCTIONS)
1949    size_tree = make_tree (sizetype, size);
1950  else
1951    size_tree = make_tree (unsigned_type_node, size);
1952
1953  fn = emit_block_move_libcall_fn (true);
1954  arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1955  if (TARGET_MEM_FUNCTIONS)
1956    {
1957      arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1958      arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1959    }
1960  else
1961    {
1962      arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1963      arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1964    }
1965
1966  /* Now we have to build up the CALL_EXPR itself.  */
1967  call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1968  call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1969		     call_expr, arg_list, NULL_TREE);
1970  TREE_SIDE_EFFECTS (call_expr) = 1;
1971
1972  retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1973
1974  /* If we are initializing a readonly value, show the above call
1975     clobbered it.  Otherwise, a load from it may erroneously be
1976     hoisted from a loop.  */
1977  if (RTX_UNCHANGING_P (dst))
1978    emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1979
1980  return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1981}
1982
1983/* A subroutine of emit_block_move_via_libcall.  Create the tree node
1984   for the function we use for block copies.  The first time FOR_CALL
1985   is true, we call assemble_external.  */
1986
1987static GTY(()) tree block_move_fn;
1988
1989static tree
1990emit_block_move_libcall_fn (for_call)
1991      int for_call;
1992{
1993  static bool emitted_extern;
1994  tree fn = block_move_fn, args;
1995
1996  if (!fn)
1997    {
1998      if (TARGET_MEM_FUNCTIONS)
1999	{
2000	  fn = get_identifier ("memcpy");
2001	  args = build_function_type_list (ptr_type_node, ptr_type_node,
2002					   const_ptr_type_node, sizetype,
2003					   NULL_TREE);
2004	}
2005      else
2006	{
2007	  fn = get_identifier ("bcopy");
2008	  args = build_function_type_list (void_type_node, const_ptr_type_node,
2009					   ptr_type_node, unsigned_type_node,
2010					   NULL_TREE);
2011	}
2012
2013      fn = build_decl (FUNCTION_DECL, fn, args);
2014      DECL_EXTERNAL (fn) = 1;
2015      TREE_PUBLIC (fn) = 1;
2016      DECL_ARTIFICIAL (fn) = 1;
2017      TREE_NOTHROW (fn) = 1;
2018
2019      block_move_fn = fn;
2020    }
2021
2022  if (for_call && !emitted_extern)
2023    {
2024      emitted_extern = true;
2025      make_decl_rtl (fn, NULL);
2026      assemble_external (fn);
2027    }
2028
2029  return fn;
2030}
2031
2032/* A subroutine of emit_block_move.  Copy the data via an explicit
2033   loop.  This is used only when libcalls are forbidden.  */
2034/* ??? It'd be nice to copy in hunks larger than QImode.  */
2035
2036static void
2037emit_block_move_via_loop (x, y, size, align)
2038     rtx x, y, size;
2039     unsigned int align ATTRIBUTE_UNUSED;
2040{
2041  rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2042  enum machine_mode iter_mode;
2043
2044  iter_mode = GET_MODE (size);
2045  if (iter_mode == VOIDmode)
2046    iter_mode = word_mode;
2047
2048  top_label = gen_label_rtx ();
2049  cmp_label = gen_label_rtx ();
2050  iter = gen_reg_rtx (iter_mode);
2051
2052  emit_move_insn (iter, const0_rtx);
2053
2054  x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2055  y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2056  do_pending_stack_adjust ();
2057
2058  emit_note (NULL, NOTE_INSN_LOOP_BEG);
2059
2060  emit_jump (cmp_label);
2061  emit_label (top_label);
2062
2063  tmp = convert_modes (Pmode, iter_mode, iter, true);
2064  x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2065  y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2066  x = change_address (x, QImode, x_addr);
2067  y = change_address (y, QImode, y_addr);
2068
2069  emit_move_insn (x, y);
2070
2071  tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2072			     true, OPTAB_LIB_WIDEN);
2073  if (tmp != iter)
2074    emit_move_insn (iter, tmp);
2075
2076  emit_note (NULL, NOTE_INSN_LOOP_CONT);
2077  emit_label (cmp_label);
2078
2079  emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2080			   true, top_label);
2081
2082  emit_note (NULL, NOTE_INSN_LOOP_END);
2083}
2084
2085/* Copy all or part of a value X into registers starting at REGNO.
2086   The number of registers to be filled is NREGS.  */
2087
2088void
2089move_block_to_reg (regno, x, nregs, mode)
2090     int regno;
2091     rtx x;
2092     int nregs;
2093     enum machine_mode mode;
2094{
2095  int i;
2096#ifdef HAVE_load_multiple
2097  rtx pat;
2098  rtx last;
2099#endif
2100
2101  if (nregs == 0)
2102    return;
2103
2104  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2105    x = validize_mem (force_const_mem (mode, x));
2106
2107  /* See if the machine can do this with a load multiple insn.  */
2108#ifdef HAVE_load_multiple
2109  if (HAVE_load_multiple)
2110    {
2111      last = get_last_insn ();
2112      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2113			       GEN_INT (nregs));
2114      if (pat)
2115	{
2116	  emit_insn (pat);
2117	  return;
2118	}
2119      else
2120	delete_insns_since (last);
2121    }
2122#endif
2123
2124  for (i = 0; i < nregs; i++)
2125    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2126		    operand_subword_force (x, i, mode));
2127}
2128
2129/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2130   The number of registers to be filled is NREGS.  SIZE indicates the number
2131   of bytes in the object X.  */
2132
2133void
2134move_block_from_reg (regno, x, nregs, size)
2135     int regno;
2136     rtx x;
2137     int nregs;
2138     int size;
2139{
2140  int i;
2141#ifdef HAVE_store_multiple
2142  rtx pat;
2143  rtx last;
2144#endif
2145  enum machine_mode mode;
2146
2147  if (nregs == 0)
2148    return;
2149
2150  /* If SIZE is that of a mode no bigger than a word, just use that
2151     mode's store operation.  */
2152  if (size <= UNITS_PER_WORD
2153      && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2154    {
2155      emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2156      return;
2157    }
2158
2159  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2160     to the left before storing to memory.  Note that the previous test
2161     doesn't handle all cases (e.g. SIZE == 3).  */
2162  if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2163    {
2164      rtx tem = operand_subword (x, 0, 1, BLKmode);
2165      rtx shift;
2166
2167      if (tem == 0)
2168	abort ();
2169
2170      shift = expand_shift (LSHIFT_EXPR, word_mode,
2171			    gen_rtx_REG (word_mode, regno),
2172			    build_int_2 ((UNITS_PER_WORD - size)
2173					 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2174      emit_move_insn (tem, shift);
2175      return;
2176    }
2177
2178  /* See if the machine can do this with a store multiple insn.  */
2179#ifdef HAVE_store_multiple
2180  if (HAVE_store_multiple)
2181    {
2182      last = get_last_insn ();
2183      pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2184				GEN_INT (nregs));
2185      if (pat)
2186	{
2187	  emit_insn (pat);
2188	  return;
2189	}
2190      else
2191	delete_insns_since (last);
2192    }
2193#endif
2194
2195  for (i = 0; i < nregs; i++)
2196    {
2197      rtx tem = operand_subword (x, i, 1, BLKmode);
2198
2199      if (tem == 0)
2200	abort ();
2201
2202      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2203    }
2204}
2205
2206/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2207   ORIG, where ORIG is a non-consecutive group of registers represented by
2208   a PARALLEL.  The clone is identical to the original except in that the
2209   original set of registers is replaced by a new set of pseudo registers.
2210   The new set has the same modes as the original set.  */
2211
2212rtx
2213gen_group_rtx (orig)
2214     rtx orig;
2215{
2216  int i, length;
2217  rtx *tmps;
2218
2219  if (GET_CODE (orig) != PARALLEL)
2220    abort ();
2221
2222  length = XVECLEN (orig, 0);
2223  tmps = (rtx *) alloca (sizeof (rtx) * length);
2224
2225  /* Skip a NULL entry in first slot.  */
2226  i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2227
2228  if (i)
2229    tmps[0] = 0;
2230
2231  for (; i < length; i++)
2232    {
2233      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2234      rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2235
2236      tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2237    }
2238
2239  return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2240}
2241
2242/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2243   registers represented by a PARALLEL.  SSIZE represents the total size of
2244   block SRC in bytes, or -1 if not known.  */
2245/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2246   the balance will be in what would be the low-order memory addresses, i.e.
2247   left justified for big endian, right justified for little endian.  This
2248   happens to be true for the targets currently using this support.  If this
2249   ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2250   would be needed.  */
2251
2252void
2253emit_group_load (dst, orig_src, ssize)
2254     rtx dst, orig_src;
2255     int ssize;
2256{
2257  rtx *tmps, src;
2258  int start, i;
2259
2260  if (GET_CODE (dst) != PARALLEL)
2261    abort ();
2262
2263  /* Check for a NULL entry, used to indicate that the parameter goes
2264     both on the stack and in registers.  */
2265  if (XEXP (XVECEXP (dst, 0, 0), 0))
2266    start = 0;
2267  else
2268    start = 1;
2269
2270  tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2271
2272  /* Process the pieces.  */
2273  for (i = start; i < XVECLEN (dst, 0); i++)
2274    {
2275      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2276      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2277      unsigned int bytelen = GET_MODE_SIZE (mode);
2278      int shift = 0;
2279
2280      /* Handle trailing fragments that run over the size of the struct.  */
2281      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2282	{
2283	  shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2284	  bytelen = ssize - bytepos;
2285	  if (bytelen <= 0)
2286	    abort ();
2287	}
2288
2289      /* If we won't be loading directly from memory, protect the real source
2290	 from strange tricks we might play; but make sure that the source can
2291	 be loaded directly into the destination.  */
2292      src = orig_src;
2293      if (GET_CODE (orig_src) != MEM
2294	  && (!CONSTANT_P (orig_src)
2295	      || (GET_MODE (orig_src) != mode
2296		  && GET_MODE (orig_src) != VOIDmode)))
2297	{
2298	  if (GET_MODE (orig_src) == VOIDmode)
2299	    src = gen_reg_rtx (mode);
2300	  else
2301	    src = gen_reg_rtx (GET_MODE (orig_src));
2302
2303	  emit_move_insn (src, orig_src);
2304	}
2305
2306      /* Optimize the access just a bit.  */
2307      if (GET_CODE (src) == MEM
2308	  && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2309	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2310	  && bytelen == GET_MODE_SIZE (mode))
2311	{
2312	  tmps[i] = gen_reg_rtx (mode);
2313	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2314	}
2315      else if (GET_CODE (src) == CONCAT)
2316	{
2317	  unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2318	  unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2319
2320	  if ((bytepos == 0 && bytelen == slen0)
2321	      || (bytepos != 0 && bytepos + bytelen <= slen))
2322	    {
2323	      /* The following assumes that the concatenated objects all
2324		 have the same size.  In this case, a simple calculation
2325		 can be used to determine the object and the bit field
2326		 to be extracted.  */
2327	      tmps[i] = XEXP (src, bytepos / slen0);
2328	      if (! CONSTANT_P (tmps[i])
2329		  && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2330		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2331					     (bytepos % slen0) * BITS_PER_UNIT,
2332					     1, NULL_RTX, mode, mode, ssize);
2333	    }
2334	  else if (bytepos == 0)
2335	    {
2336	      rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2337	      emit_move_insn (mem, src);
2338	      tmps[i] = adjust_address (mem, mode, 0);
2339	    }
2340	  else
2341	    abort ();
2342	}
2343      else if (CONSTANT_P (src)
2344	       || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2345	tmps[i] = src;
2346      else
2347	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2348				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2349				     mode, mode, ssize);
2350
2351      if (BYTES_BIG_ENDIAN && shift)
2352	expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2353		      tmps[i], 0, OPTAB_WIDEN);
2354    }
2355
2356  emit_queue ();
2357
2358  /* Copy the extracted pieces into the proper (probable) hard regs.  */
2359  for (i = start; i < XVECLEN (dst, 0); i++)
2360    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2361}
2362
2363/* Emit code to move a block SRC to block DST, where SRC and DST are
2364   non-consecutive groups of registers, each represented by a PARALLEL.  */
2365
2366void
2367emit_group_move (dst, src)
2368     rtx dst, src;
2369{
2370  int i;
2371
2372  if (GET_CODE (src) != PARALLEL
2373      || GET_CODE (dst) != PARALLEL
2374      || XVECLEN (src, 0) != XVECLEN (dst, 0))
2375    abort ();
2376
2377  /* Skip first entry if NULL.  */
2378  for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2379    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2380		    XEXP (XVECEXP (src, 0, i), 0));
2381}
2382
2383/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2384   registers represented by a PARALLEL.  SSIZE represents the total size of
2385   block DST, or -1 if not known.  */
2386
2387void
2388emit_group_store (orig_dst, src, ssize)
2389     rtx orig_dst, src;
2390     int ssize;
2391{
2392  rtx *tmps, dst;
2393  int start, i;
2394
2395  if (GET_CODE (src) != PARALLEL)
2396    abort ();
2397
2398  /* Check for a NULL entry, used to indicate that the parameter goes
2399     both on the stack and in registers.  */
2400  if (XEXP (XVECEXP (src, 0, 0), 0))
2401    start = 0;
2402  else
2403    start = 1;
2404
2405  tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2406
2407  /* Copy the (probable) hard regs into pseudos.  */
2408  for (i = start; i < XVECLEN (src, 0); i++)
2409    {
2410      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2411      tmps[i] = gen_reg_rtx (GET_MODE (reg));
2412      emit_move_insn (tmps[i], reg);
2413    }
2414  emit_queue ();
2415
2416  /* If we won't be storing directly into memory, protect the real destination
2417     from strange tricks we might play.  */
2418  dst = orig_dst;
2419  if (GET_CODE (dst) == PARALLEL)
2420    {
2421      rtx temp;
2422
2423      /* We can get a PARALLEL dst if there is a conditional expression in
2424	 a return statement.  In that case, the dst and src are the same,
2425	 so no action is necessary.  */
2426      if (rtx_equal_p (dst, src))
2427	return;
2428
2429      /* It is unclear if we can ever reach here, but we may as well handle
2430	 it.  Allocate a temporary, and split this into a store/load to/from
2431	 the temporary.  */
2432
2433      temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2434      emit_group_store (temp, src, ssize);
2435      emit_group_load (dst, temp, ssize);
2436      return;
2437    }
2438  else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2439    {
2440      dst = gen_reg_rtx (GET_MODE (orig_dst));
2441      /* Make life a bit easier for combine.  */
2442      emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2443    }
2444
2445  /* Process the pieces.  */
2446  for (i = start; i < XVECLEN (src, 0); i++)
2447    {
2448      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2449      enum machine_mode mode = GET_MODE (tmps[i]);
2450      unsigned int bytelen = GET_MODE_SIZE (mode);
2451      rtx dest = dst;
2452
2453      /* Handle trailing fragments that run over the size of the struct.  */
2454      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2455	{
2456	  if (BYTES_BIG_ENDIAN)
2457	    {
2458	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2459	      expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2460			    tmps[i], 0, OPTAB_WIDEN);
2461	    }
2462	  bytelen = ssize - bytepos;
2463	}
2464
2465      if (GET_CODE (dst) == CONCAT)
2466	{
2467	  if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2468	    dest = XEXP (dst, 0);
2469	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2470	    {
2471	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2472	      dest = XEXP (dst, 1);
2473	    }
2474	  else if (bytepos == 0 && XVECLEN (src, 0))
2475	    {
2476	      dest = assign_stack_temp (GET_MODE (dest),
2477				        GET_MODE_SIZE (GET_MODE (dest)), 0);
2478	      emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2479			      tmps[i]);
2480	      dst = dest;
2481	      break;
2482	    }
2483	  else
2484	    abort ();
2485	}
2486
2487      /* Optimize the access just a bit.  */
2488      if (GET_CODE (dest) == MEM
2489	  && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2490	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2491	  && bytelen == GET_MODE_SIZE (mode))
2492	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2493      else
2494	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2495			 mode, tmps[i], ssize);
2496    }
2497
2498  emit_queue ();
2499
2500  /* Copy from the pseudo into the (probable) hard reg.  */
2501  if (orig_dst != dst)
2502    emit_move_insn (orig_dst, dst);
2503}
2504
2505/* Generate code to copy a BLKmode object of TYPE out of a
2506   set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2507   is null, a stack temporary is created.  TGTBLK is returned.
2508
2509   The primary purpose of this routine is to handle functions
2510   that return BLKmode structures in registers.  Some machines
2511   (the PA for example) want to return all small structures
2512   in registers regardless of the structure's alignment.  */
2513
2514rtx
2515copy_blkmode_from_reg (tgtblk, srcreg, type)
2516     rtx tgtblk;
2517     rtx srcreg;
2518     tree type;
2519{
2520  unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2521  rtx src = NULL, dst = NULL;
2522  unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2523  unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2524
2525  if (tgtblk == 0)
2526    {
2527      tgtblk = assign_temp (build_qualified_type (type,
2528						  (TYPE_QUALS (type)
2529						   | TYPE_QUAL_CONST)),
2530			    0, 1, 1);
2531      preserve_temp_slots (tgtblk);
2532    }
2533
2534  /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2535     into a new pseudo which is a full word.  */
2536
2537  if (GET_MODE (srcreg) != BLKmode
2538      && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2539    srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2540
2541  /* Structures whose size is not a multiple of a word are aligned
2542     to the least significant byte (to the right).  On a BYTES_BIG_ENDIAN
2543     machine, this means we must skip the empty high order bytes when
2544     calculating the bit offset.  */
2545  if (BYTES_BIG_ENDIAN
2546      && bytes % UNITS_PER_WORD)
2547    big_endian_correction
2548      = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2549
2550  /* Copy the structure BITSIZE bites at a time.
2551
2552     We could probably emit more efficient code for machines which do not use
2553     strict alignment, but it doesn't seem worth the effort at the current
2554     time.  */
2555  for (bitpos = 0, xbitpos = big_endian_correction;
2556       bitpos < bytes * BITS_PER_UNIT;
2557       bitpos += bitsize, xbitpos += bitsize)
2558    {
2559      /* We need a new source operand each time xbitpos is on a
2560	 word boundary and when xbitpos == big_endian_correction
2561	 (the first time through).  */
2562      if (xbitpos % BITS_PER_WORD == 0
2563	  || xbitpos == big_endian_correction)
2564	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2565				     GET_MODE (srcreg));
2566
2567      /* We need a new destination operand each time bitpos is on
2568	 a word boundary.  */
2569      if (bitpos % BITS_PER_WORD == 0)
2570	dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2571
2572      /* Use xbitpos for the source extraction (right justified) and
2573	 xbitpos for the destination store (left justified).  */
2574      store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2575		       extract_bit_field (src, bitsize,
2576					  xbitpos % BITS_PER_WORD, 1,
2577					  NULL_RTX, word_mode, word_mode,
2578					  BITS_PER_WORD),
2579		       BITS_PER_WORD);
2580    }
2581
2582  return tgtblk;
2583}
2584
2585/* Add a USE expression for REG to the (possibly empty) list pointed
2586   to by CALL_FUSAGE.  REG must denote a hard register.  */
2587
2588void
2589use_reg (call_fusage, reg)
2590     rtx *call_fusage, reg;
2591{
2592  if (GET_CODE (reg) != REG
2593      || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2594    abort ();
2595
2596  *call_fusage
2597    = gen_rtx_EXPR_LIST (VOIDmode,
2598			 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2599}
2600
2601/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2602   starting at REGNO.  All of these registers must be hard registers.  */
2603
2604void
2605use_regs (call_fusage, regno, nregs)
2606     rtx *call_fusage;
2607     int regno;
2608     int nregs;
2609{
2610  int i;
2611
2612  if (regno + nregs > FIRST_PSEUDO_REGISTER)
2613    abort ();
2614
2615  for (i = 0; i < nregs; i++)
2616    use_reg (call_fusage, regno_reg_rtx[regno + i]);
2617}
2618
2619/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2620   PARALLEL REGS.  This is for calls that pass values in multiple
2621   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2622
2623void
2624use_group_regs (call_fusage, regs)
2625     rtx *call_fusage;
2626     rtx regs;
2627{
2628  int i;
2629
2630  for (i = 0; i < XVECLEN (regs, 0); i++)
2631    {
2632      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2633
2634      /* A NULL entry means the parameter goes both on the stack and in
2635	 registers.  This can also be a MEM for targets that pass values
2636	 partially on the stack and partially in registers.  */
2637      if (reg != 0 && GET_CODE (reg) == REG)
2638	use_reg (call_fusage, reg);
2639    }
2640}
2641
2642
2643/* Determine whether the LEN bytes generated by CONSTFUN can be
2644   stored to memory using several move instructions.  CONSTFUNDATA is
2645   a pointer which will be passed as argument in every CONSTFUN call.
2646   ALIGN is maximum alignment we can assume.  Return nonzero if a
2647   call to store_by_pieces should succeed.  */
2648
2649int
2650can_store_by_pieces (len, constfun, constfundata, align)
2651     unsigned HOST_WIDE_INT len;
2652     rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2653     PTR constfundata;
2654     unsigned int align;
2655{
2656  unsigned HOST_WIDE_INT max_size, l;
2657  HOST_WIDE_INT offset = 0;
2658  enum machine_mode mode, tmode;
2659  enum insn_code icode;
2660  int reverse;
2661  rtx cst;
2662
2663  if (len == 0)
2664    return 1;
2665
2666  if (! MOVE_BY_PIECES_P (len, align))
2667    return 0;
2668
2669  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2670      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2671    align = MOVE_MAX * BITS_PER_UNIT;
2672
2673  /* We would first store what we can in the largest integer mode, then go to
2674     successively smaller modes.  */
2675
2676  for (reverse = 0;
2677       reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2678       reverse++)
2679    {
2680      l = len;
2681      mode = VOIDmode;
2682      max_size = STORE_MAX_PIECES + 1;
2683      while (max_size > 1)
2684	{
2685	  for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2686	       tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2687	    if (GET_MODE_SIZE (tmode) < max_size)
2688	      mode = tmode;
2689
2690	  if (mode == VOIDmode)
2691	    break;
2692
2693	  icode = mov_optab->handlers[(int) mode].insn_code;
2694	  if (icode != CODE_FOR_nothing
2695	      && align >= GET_MODE_ALIGNMENT (mode))
2696	    {
2697	      unsigned int size = GET_MODE_SIZE (mode);
2698
2699	      while (l >= size)
2700		{
2701		  if (reverse)
2702		    offset -= size;
2703
2704		  cst = (*constfun) (constfundata, offset, mode);
2705		  if (!LEGITIMATE_CONSTANT_P (cst))
2706		    return 0;
2707
2708		  if (!reverse)
2709		    offset += size;
2710
2711		  l -= size;
2712		}
2713	    }
2714
2715	  max_size = GET_MODE_SIZE (mode);
2716	}
2717
2718      /* The code above should have handled everything.  */
2719      if (l != 0)
2720	abort ();
2721    }
2722
2723  return 1;
2724}
2725
2726/* Generate several move instructions to store LEN bytes generated by
2727   CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2728   pointer which will be passed as argument in every CONSTFUN call.
2729   ALIGN is maximum alignment we can assume.  */
2730
2731void
2732store_by_pieces (to, len, constfun, constfundata, align)
2733     rtx to;
2734     unsigned HOST_WIDE_INT len;
2735     rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2736     PTR constfundata;
2737     unsigned int align;
2738{
2739  struct store_by_pieces data;
2740
2741  if (len == 0)
2742    return;
2743
2744  if (! MOVE_BY_PIECES_P (len, align))
2745    abort ();
2746  to = protect_from_queue (to, 1);
2747  data.constfun = constfun;
2748  data.constfundata = constfundata;
2749  data.len = len;
2750  data.to = to;
2751  store_by_pieces_1 (&data, align);
2752}
2753
2754/* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2755   rtx with BLKmode).  The caller must pass TO through protect_from_queue
2756   before calling. ALIGN is maximum alignment we can assume.  */
2757
2758static void
2759clear_by_pieces (to, len, align)
2760     rtx to;
2761     unsigned HOST_WIDE_INT len;
2762     unsigned int align;
2763{
2764  struct store_by_pieces data;
2765
2766  if (len == 0)
2767    return;
2768
2769  data.constfun = clear_by_pieces_1;
2770  data.constfundata = NULL;
2771  data.len = len;
2772  data.to = to;
2773  store_by_pieces_1 (&data, align);
2774}
2775
2776/* Callback routine for clear_by_pieces.
2777   Return const0_rtx unconditionally.  */
2778
2779static rtx
2780clear_by_pieces_1 (data, offset, mode)
2781     PTR data ATTRIBUTE_UNUSED;
2782     HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2783     enum machine_mode mode ATTRIBUTE_UNUSED;
2784{
2785  return const0_rtx;
2786}
2787
2788/* Subroutine of clear_by_pieces and store_by_pieces.
2789   Generate several move instructions to store LEN bytes of block TO.  (A MEM
2790   rtx with BLKmode).  The caller must pass TO through protect_from_queue
2791   before calling.  ALIGN is maximum alignment we can assume.  */
2792
2793static void
2794store_by_pieces_1 (data, align)
2795     struct store_by_pieces *data;
2796     unsigned int align;
2797{
2798  rtx to_addr = XEXP (data->to, 0);
2799  unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2800  enum machine_mode mode = VOIDmode, tmode;
2801  enum insn_code icode;
2802
2803  data->offset = 0;
2804  data->to_addr = to_addr;
2805  data->autinc_to
2806    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2807       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2808
2809  data->explicit_inc_to = 0;
2810  data->reverse
2811    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2812  if (data->reverse)
2813    data->offset = data->len;
2814
2815  /* If storing requires more than two move insns,
2816     copy addresses to registers (to make displacements shorter)
2817     and use post-increment if available.  */
2818  if (!data->autinc_to
2819      && move_by_pieces_ninsns (data->len, align) > 2)
2820    {
2821      /* Determine the main mode we'll be using.  */
2822      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2823	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2824	if (GET_MODE_SIZE (tmode) < max_size)
2825	  mode = tmode;
2826
2827      if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2828	{
2829	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2830	  data->autinc_to = 1;
2831	  data->explicit_inc_to = -1;
2832	}
2833
2834      if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2835	  && ! data->autinc_to)
2836	{
2837	  data->to_addr = copy_addr_to_reg (to_addr);
2838	  data->autinc_to = 1;
2839	  data->explicit_inc_to = 1;
2840	}
2841
2842      if ( !data->autinc_to && CONSTANT_P (to_addr))
2843	data->to_addr = copy_addr_to_reg (to_addr);
2844    }
2845
2846  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2847      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2848    align = MOVE_MAX * BITS_PER_UNIT;
2849
2850  /* First store what we can in the largest integer mode, then go to
2851     successively smaller modes.  */
2852
2853  while (max_size > 1)
2854    {
2855      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2856	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2857	if (GET_MODE_SIZE (tmode) < max_size)
2858	  mode = tmode;
2859
2860      if (mode == VOIDmode)
2861	break;
2862
2863      icode = mov_optab->handlers[(int) mode].insn_code;
2864      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2865	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2866
2867      max_size = GET_MODE_SIZE (mode);
2868    }
2869
2870  /* The code above should have handled everything.  */
2871  if (data->len != 0)
2872    abort ();
2873}
2874
2875/* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2876   with move instructions for mode MODE.  GENFUN is the gen_... function
2877   to make a move insn for that mode.  DATA has all the other info.  */
2878
2879static void
2880store_by_pieces_2 (genfun, mode, data)
2881     rtx (*genfun) PARAMS ((rtx, ...));
2882     enum machine_mode mode;
2883     struct store_by_pieces *data;
2884{
2885  unsigned int size = GET_MODE_SIZE (mode);
2886  rtx to1, cst;
2887
2888  while (data->len >= size)
2889    {
2890      if (data->reverse)
2891	data->offset -= size;
2892
2893      if (data->autinc_to)
2894	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2895					 data->offset);
2896      else
2897	to1 = adjust_address (data->to, mode, data->offset);
2898
2899      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2900	emit_insn (gen_add2_insn (data->to_addr,
2901				  GEN_INT (-(HOST_WIDE_INT) size)));
2902
2903      cst = (*data->constfun) (data->constfundata, data->offset, mode);
2904      emit_insn ((*genfun) (to1, cst));
2905
2906      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2907	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2908
2909      if (! data->reverse)
2910	data->offset += size;
2911
2912      data->len -= size;
2913    }
2914}
2915
2916/* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2917   its length in bytes.  */
2918
2919rtx
2920clear_storage (object, size)
2921     rtx object;
2922     rtx size;
2923{
2924  rtx retval = 0;
2925  unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2926			: GET_MODE_ALIGNMENT (GET_MODE (object)));
2927
2928  /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2929     just move a zero.  Otherwise, do this a piece at a time.  */
2930  if (GET_MODE (object) != BLKmode
2931      && GET_CODE (size) == CONST_INT
2932      && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2933    emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2934  else
2935    {
2936      object = protect_from_queue (object, 1);
2937      size = protect_from_queue (size, 0);
2938
2939      if (GET_CODE (size) == CONST_INT && INTVAL (size) == 0)
2940	;
2941      else if (GET_CODE (size) == CONST_INT
2942	  && CLEAR_BY_PIECES_P (INTVAL (size), align))
2943	clear_by_pieces (object, INTVAL (size), align);
2944      else if (clear_storage_via_clrstr (object, size, align))
2945	;
2946      else
2947	retval = clear_storage_via_libcall (object, size);
2948    }
2949
2950  return retval;
2951}
2952
2953/* A subroutine of clear_storage.  Expand a clrstr pattern;
2954   return true if successful.  */
2955
2956static bool
2957clear_storage_via_clrstr (object, size, align)
2958     rtx object, size;
2959     unsigned int align;
2960{
2961  /* Try the most limited insn first, because there's no point
2962     including more than one in the machine description unless
2963     the more limited one has some advantage.  */
2964
2965  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2966  enum machine_mode mode;
2967
2968  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2969       mode = GET_MODE_WIDER_MODE (mode))
2970    {
2971      enum insn_code code = clrstr_optab[(int) mode];
2972      insn_operand_predicate_fn pred;
2973
2974      if (code != CODE_FOR_nothing
2975	  /* We don't need MODE to be narrower than
2976	     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2977	     the mode mask, as it is returned by the macro, it will
2978	     definitely be less than the actual mode mask.  */
2979	  && ((GET_CODE (size) == CONST_INT
2980	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2981		   <= (GET_MODE_MASK (mode) >> 1)))
2982	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2983	  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2984	      || (*pred) (object, BLKmode))
2985	  && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2986	      || (*pred) (opalign, VOIDmode)))
2987	{
2988	  rtx op1;
2989	  rtx last = get_last_insn ();
2990	  rtx pat;
2991
2992	  op1 = convert_to_mode (mode, size, 1);
2993	  pred = insn_data[(int) code].operand[1].predicate;
2994	  if (pred != 0 && ! (*pred) (op1, mode))
2995	    op1 = copy_to_mode_reg (mode, op1);
2996
2997	  pat = GEN_FCN ((int) code) (object, op1, opalign);
2998	  if (pat)
2999	    {
3000	      emit_insn (pat);
3001	      return true;
3002	    }
3003	  else
3004	    delete_insns_since (last);
3005	}
3006    }
3007
3008  return false;
3009}
3010
3011/* A subroutine of clear_storage.  Expand a call to memset or bzero.
3012   Return the return value of memset, 0 otherwise.  */
3013
3014static rtx
3015clear_storage_via_libcall (object, size)
3016     rtx object, size;
3017{
3018  tree call_expr, arg_list, fn, object_tree, size_tree;
3019  enum machine_mode size_mode;
3020  rtx retval;
3021
3022  /* OBJECT or SIZE may have been passed through protect_from_queue.
3023
3024     It is unsafe to save the value generated by protect_from_queue
3025     and reuse it later.  Consider what happens if emit_queue is
3026     called before the return value from protect_from_queue is used.
3027
3028     Expansion of the CALL_EXPR below will call emit_queue before
3029     we are finished emitting RTL for argument setup.  So if we are
3030     not careful we could get the wrong value for an argument.
3031
3032     To avoid this problem we go ahead and emit code to copy OBJECT
3033     and SIZE into new pseudos.  We can then place those new pseudos
3034     into an RTL_EXPR and use them later, even after a call to
3035     emit_queue.
3036
3037     Note this is not strictly needed for library calls since they
3038     do not call emit_queue before loading their arguments.  However,
3039     we may need to have library calls call emit_queue in the future
3040     since failing to do so could cause problems for targets which
3041     define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
3042
3043  object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3044
3045  if (TARGET_MEM_FUNCTIONS)
3046    size_mode = TYPE_MODE (sizetype);
3047  else
3048    size_mode = TYPE_MODE (unsigned_type_node);
3049  size = convert_to_mode (size_mode, size, 1);
3050  size = copy_to_mode_reg (size_mode, size);
3051
3052  /* It is incorrect to use the libcall calling conventions to call
3053     memset in this context.  This could be a user call to memset and
3054     the user may wish to examine the return value from memset.  For
3055     targets where libcalls and normal calls have different conventions
3056     for returning pointers, we could end up generating incorrect code.
3057
3058     For convenience, we generate the call to bzero this way as well.  */
3059
3060  object_tree = make_tree (ptr_type_node, object);
3061  if (TARGET_MEM_FUNCTIONS)
3062    size_tree = make_tree (sizetype, size);
3063  else
3064    size_tree = make_tree (unsigned_type_node, size);
3065
3066  fn = clear_storage_libcall_fn (true);
3067  arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3068  if (TARGET_MEM_FUNCTIONS)
3069    arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3070  arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3071
3072  /* Now we have to build up the CALL_EXPR itself.  */
3073  call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3074  call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3075		     call_expr, arg_list, NULL_TREE);
3076  TREE_SIDE_EFFECTS (call_expr) = 1;
3077
3078  retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3079
3080  /* If we are initializing a readonly value, show the above call
3081     clobbered it.  Otherwise, a load from it may erroneously be
3082     hoisted from a loop.  */
3083  if (RTX_UNCHANGING_P (object))
3084    emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3085
3086  return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3087}
3088
3089/* A subroutine of clear_storage_via_libcall.  Create the tree node
3090   for the function we use for block clears.  The first time FOR_CALL
3091   is true, we call assemble_external.  */
3092
3093static GTY(()) tree block_clear_fn;
3094
3095static tree
3096clear_storage_libcall_fn (for_call)
3097     int for_call;
3098{
3099  static bool emitted_extern;
3100  tree fn = block_clear_fn, args;
3101
3102  if (!fn)
3103    {
3104      if (TARGET_MEM_FUNCTIONS)
3105	{
3106	  fn = get_identifier ("memset");
3107	  args = build_function_type_list (ptr_type_node, ptr_type_node,
3108					   integer_type_node, sizetype,
3109					   NULL_TREE);
3110	}
3111      else
3112	{
3113	  fn = get_identifier ("bzero");
3114	  args = build_function_type_list (void_type_node, ptr_type_node,
3115					   unsigned_type_node, NULL_TREE);
3116	}
3117
3118      fn = build_decl (FUNCTION_DECL, fn, args);
3119      DECL_EXTERNAL (fn) = 1;
3120      TREE_PUBLIC (fn) = 1;
3121      DECL_ARTIFICIAL (fn) = 1;
3122      TREE_NOTHROW (fn) = 1;
3123
3124      block_clear_fn = fn;
3125    }
3126
3127  if (for_call && !emitted_extern)
3128    {
3129      emitted_extern = true;
3130      make_decl_rtl (fn, NULL);
3131      assemble_external (fn);
3132    }
3133
3134  return fn;
3135}
3136
3137/* Generate code to copy Y into X.
3138   Both Y and X must have the same mode, except that
3139   Y can be a constant with VOIDmode.
3140   This mode cannot be BLKmode; use emit_block_move for that.
3141
3142   Return the last instruction emitted.  */
3143
3144rtx
3145emit_move_insn (x, y)
3146     rtx x, y;
3147{
3148  enum machine_mode mode = GET_MODE (x);
3149  rtx y_cst = NULL_RTX;
3150  rtx last_insn;
3151
3152  x = protect_from_queue (x, 1);
3153  y = protect_from_queue (y, 0);
3154
3155  if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3156    abort ();
3157
3158  /* Never force constant_p_rtx to memory.  */
3159  if (GET_CODE (y) == CONSTANT_P_RTX)
3160    ;
3161  else if (CONSTANT_P (y))
3162    {
3163      if (optimize
3164	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3165	  && (last_insn = compress_float_constant (x, y)))
3166	return last_insn;
3167
3168      if (!LEGITIMATE_CONSTANT_P (y))
3169	{
3170	  y_cst = y;
3171	  y = force_const_mem (mode, y);
3172
3173	  /* If the target's cannot_force_const_mem prevented the spill,
3174	     assume that the target's move expanders will also take care
3175	     of the non-legitimate constant.  */
3176	  if (!y)
3177	    y = y_cst;
3178	}
3179    }
3180
3181  /* If X or Y are memory references, verify that their addresses are valid
3182     for the machine.  */
3183  if (GET_CODE (x) == MEM
3184      && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3185	   && ! push_operand (x, GET_MODE (x)))
3186	  || (flag_force_addr
3187	      && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3188    x = validize_mem (x);
3189
3190  if (GET_CODE (y) == MEM
3191      && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3192	  || (flag_force_addr
3193	      && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3194    y = validize_mem (y);
3195
3196  if (mode == BLKmode)
3197    abort ();
3198
3199  last_insn = emit_move_insn_1 (x, y);
3200
3201  if (y_cst && GET_CODE (x) == REG)
3202    set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3203
3204  return last_insn;
3205}
3206
3207/* Low level part of emit_move_insn.
3208   Called just like emit_move_insn, but assumes X and Y
3209   are basically valid.  */
3210
3211rtx
3212emit_move_insn_1 (x, y)
3213     rtx x, y;
3214{
3215  enum machine_mode mode = GET_MODE (x);
3216  enum machine_mode submode;
3217  enum mode_class class = GET_MODE_CLASS (mode);
3218
3219  if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3220    abort ();
3221
3222  if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3223    return
3224      emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3225
3226  /* Expand complex moves by moving real part and imag part, if possible.  */
3227  else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3228	   && BLKmode != (submode = GET_MODE_INNER (mode))
3229	   && (mov_optab->handlers[(int) submode].insn_code
3230	       != CODE_FOR_nothing))
3231    {
3232      /* Don't split destination if it is a stack push.  */
3233      int stack = push_operand (x, GET_MODE (x));
3234
3235#ifdef PUSH_ROUNDING
3236      /* In case we output to the stack, but the size is smaller machine can
3237	 push exactly, we need to use move instructions.  */
3238      if (stack
3239	  && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3240	      != GET_MODE_SIZE (submode)))
3241	{
3242	  rtx temp;
3243	  HOST_WIDE_INT offset1, offset2;
3244
3245	  /* Do not use anti_adjust_stack, since we don't want to update
3246	     stack_pointer_delta.  */
3247	  temp = expand_binop (Pmode,
3248#ifdef STACK_GROWS_DOWNWARD
3249			       sub_optab,
3250#else
3251			       add_optab,
3252#endif
3253			       stack_pointer_rtx,
3254			       GEN_INT
3255				 (PUSH_ROUNDING
3256				  (GET_MODE_SIZE (GET_MODE (x)))),
3257			       stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3258
3259	  if (temp != stack_pointer_rtx)
3260	    emit_move_insn (stack_pointer_rtx, temp);
3261
3262#ifdef STACK_GROWS_DOWNWARD
3263	  offset1 = 0;
3264	  offset2 = GET_MODE_SIZE (submode);
3265#else
3266	  offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3267	  offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3268		     + GET_MODE_SIZE (submode));
3269#endif
3270
3271	  emit_move_insn (change_address (x, submode,
3272					  gen_rtx_PLUS (Pmode,
3273						        stack_pointer_rtx,
3274							GEN_INT (offset1))),
3275			  gen_realpart (submode, y));
3276	  emit_move_insn (change_address (x, submode,
3277					  gen_rtx_PLUS (Pmode,
3278						        stack_pointer_rtx,
3279							GEN_INT (offset2))),
3280			  gen_imagpart (submode, y));
3281	}
3282      else
3283#endif
3284      /* If this is a stack, push the highpart first, so it
3285	 will be in the argument order.
3286
3287	 In that case, change_address is used only to convert
3288	 the mode, not to change the address.  */
3289      if (stack)
3290	{
3291	  /* Note that the real part always precedes the imag part in memory
3292	     regardless of machine's endianness.  */
3293#ifdef STACK_GROWS_DOWNWARD
3294	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3295		     (gen_rtx_MEM (submode, XEXP (x, 0)),
3296		      gen_imagpart (submode, y)));
3297	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3298		     (gen_rtx_MEM (submode, XEXP (x, 0)),
3299		      gen_realpart (submode, y)));
3300#else
3301	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3302		     (gen_rtx_MEM (submode, XEXP (x, 0)),
3303		      gen_realpart (submode, y)));
3304	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3305		     (gen_rtx_MEM (submode, XEXP (x, 0)),
3306		      gen_imagpart (submode, y)));
3307#endif
3308	}
3309      else
3310	{
3311	  rtx realpart_x, realpart_y;
3312	  rtx imagpart_x, imagpart_y;
3313
3314	  /* If this is a complex value with each part being smaller than a
3315	     word, the usual calling sequence will likely pack the pieces into
3316	     a single register.  Unfortunately, SUBREG of hard registers only
3317	     deals in terms of words, so we have a problem converting input
3318	     arguments to the CONCAT of two registers that is used elsewhere
3319	     for complex values.  If this is before reload, we can copy it into
3320	     memory and reload.  FIXME, we should see about using extract and
3321	     insert on integer registers, but complex short and complex char
3322	     variables should be rarely used.  */
3323	  if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3324	      && (reload_in_progress | reload_completed) == 0)
3325	    {
3326	      int packed_dest_p
3327		= (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3328	      int packed_src_p
3329		= (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3330
3331	      if (packed_dest_p || packed_src_p)
3332		{
3333		  enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3334					       ? MODE_FLOAT : MODE_INT);
3335
3336		  enum machine_mode reg_mode
3337		    = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3338
3339		  if (reg_mode != BLKmode)
3340		    {
3341		      rtx mem = assign_stack_temp (reg_mode,
3342						   GET_MODE_SIZE (mode), 0);
3343		      rtx cmem = adjust_address (mem, mode, 0);
3344
3345		      cfun->cannot_inline
3346			= N_("function using short complex types cannot be inline");
3347
3348		      if (packed_dest_p)
3349			{
3350			  rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3351
3352			  emit_move_insn_1 (cmem, y);
3353			  return emit_move_insn_1 (sreg, mem);
3354			}
3355		      else
3356			{
3357			  rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3358
3359			  emit_move_insn_1 (mem, sreg);
3360			  return emit_move_insn_1 (x, cmem);
3361			}
3362		    }
3363		}
3364	    }
3365
3366	  realpart_x = gen_realpart (submode, x);
3367	  realpart_y = gen_realpart (submode, y);
3368	  imagpart_x = gen_imagpart (submode, x);
3369	  imagpart_y = gen_imagpart (submode, y);
3370
3371	  /* Show the output dies here.  This is necessary for SUBREGs
3372	     of pseudos since we cannot track their lifetimes correctly;
3373	     hard regs shouldn't appear here except as return values.
3374	     We never want to emit such a clobber after reload.  */
3375	  if (x != y
3376	      && ! (reload_in_progress || reload_completed)
3377	      && (GET_CODE (realpart_x) == SUBREG
3378		  || GET_CODE (imagpart_x) == SUBREG))
3379	    emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3380
3381	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3382		     (realpart_x, realpart_y));
3383	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3384		     (imagpart_x, imagpart_y));
3385	}
3386
3387      return get_last_insn ();
3388    }
3389
3390  /* This will handle any multi-word or full-word mode that lacks a move_insn
3391     pattern.  However, you will get better code if you define such patterns,
3392     even if they must turn into multiple assembler instructions.  */
3393  else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3394    {
3395      rtx last_insn = 0;
3396      rtx seq, inner;
3397      int need_clobber;
3398      int i;
3399
3400#ifdef PUSH_ROUNDING
3401
3402      /* If X is a push on the stack, do the push now and replace
3403	 X with a reference to the stack pointer.  */
3404      if (push_operand (x, GET_MODE (x)))
3405	{
3406	  rtx temp;
3407	  enum rtx_code code;
3408
3409	  /* Do not use anti_adjust_stack, since we don't want to update
3410	     stack_pointer_delta.  */
3411	  temp = expand_binop (Pmode,
3412#ifdef STACK_GROWS_DOWNWARD
3413			       sub_optab,
3414#else
3415			       add_optab,
3416#endif
3417			       stack_pointer_rtx,
3418			       GEN_INT
3419				 (PUSH_ROUNDING
3420				  (GET_MODE_SIZE (GET_MODE (x)))),
3421			       stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3422
3423	  if (temp != stack_pointer_rtx)
3424	    emit_move_insn (stack_pointer_rtx, temp);
3425
3426	  code = GET_CODE (XEXP (x, 0));
3427
3428	  /* Just hope that small offsets off SP are OK.  */
3429	  if (code == POST_INC)
3430	    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3431				GEN_INT (-((HOST_WIDE_INT)
3432					   GET_MODE_SIZE (GET_MODE (x)))));
3433	  else if (code == POST_DEC)
3434	    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3435				GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3436	  else
3437	    temp = stack_pointer_rtx;
3438
3439	  x = change_address (x, VOIDmode, temp);
3440	}
3441#endif
3442
3443      /* If we are in reload, see if either operand is a MEM whose address
3444	 is scheduled for replacement.  */
3445      if (reload_in_progress && GET_CODE (x) == MEM
3446	  && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3447	x = replace_equiv_address_nv (x, inner);
3448      if (reload_in_progress && GET_CODE (y) == MEM
3449	  && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3450	y = replace_equiv_address_nv (y, inner);
3451
3452      start_sequence ();
3453
3454      need_clobber = 0;
3455      for (i = 0;
3456	   i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3457	   i++)
3458	{
3459	  rtx xpart = operand_subword (x, i, 1, mode);
3460	  rtx ypart = operand_subword (y, i, 1, mode);
3461
3462	  /* If we can't get a part of Y, put Y into memory if it is a
3463	     constant.  Otherwise, force it into a register.  If we still
3464	     can't get a part of Y, abort.  */
3465	  if (ypart == 0 && CONSTANT_P (y))
3466	    {
3467	      y = force_const_mem (mode, y);
3468	      ypart = operand_subword (y, i, 1, mode);
3469	    }
3470	  else if (ypart == 0)
3471	    ypart = operand_subword_force (y, i, mode);
3472
3473	  if (xpart == 0 || ypart == 0)
3474	    abort ();
3475
3476	  need_clobber |= (GET_CODE (xpart) == SUBREG);
3477
3478	  last_insn = emit_move_insn (xpart, ypart);
3479	}
3480
3481      seq = get_insns ();
3482      end_sequence ();
3483
3484      /* Show the output dies here.  This is necessary for SUBREGs
3485	 of pseudos since we cannot track their lifetimes correctly;
3486	 hard regs shouldn't appear here except as return values.
3487	 We never want to emit such a clobber after reload.  */
3488      if (x != y
3489	  && ! (reload_in_progress || reload_completed)
3490	  && need_clobber != 0)
3491	emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3492
3493      emit_insn (seq);
3494
3495      return last_insn;
3496    }
3497  else
3498    abort ();
3499}
3500
3501/* If Y is representable exactly in a narrower mode, and the target can
3502   perform the extension directly from constant or memory, then emit the
3503   move as an extension.  */
3504
3505static rtx
3506compress_float_constant (x, y)
3507     rtx x, y;
3508{
3509  enum machine_mode dstmode = GET_MODE (x);
3510  enum machine_mode orig_srcmode = GET_MODE (y);
3511  enum machine_mode srcmode;
3512  REAL_VALUE_TYPE r;
3513
3514  REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3515
3516  for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3517       srcmode != orig_srcmode;
3518       srcmode = GET_MODE_WIDER_MODE (srcmode))
3519    {
3520      enum insn_code ic;
3521      rtx trunc_y, last_insn;
3522
3523      /* Skip if the target can't extend this way.  */
3524      ic = can_extend_p (dstmode, srcmode, 0);
3525      if (ic == CODE_FOR_nothing)
3526	continue;
3527
3528      /* Skip if the narrowed value isn't exact.  */
3529      if (! exact_real_truncate (srcmode, &r))
3530	continue;
3531
3532      trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3533
3534      if (LEGITIMATE_CONSTANT_P (trunc_y))
3535	{
3536	  /* Skip if the target needs extra instructions to perform
3537	     the extension.  */
3538	  if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3539	    continue;
3540	}
3541      else if (float_extend_from_mem[dstmode][srcmode])
3542	trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3543      else
3544	continue;
3545
3546      emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3547      last_insn = get_last_insn ();
3548
3549      if (GET_CODE (x) == REG)
3550	REG_NOTES (last_insn)
3551	  = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3552
3553      return last_insn;
3554    }
3555
3556  return NULL_RTX;
3557}
3558
3559/* Pushing data onto the stack.  */
3560
3561/* Push a block of length SIZE (perhaps variable)
3562   and return an rtx to address the beginning of the block.
3563   Note that it is not possible for the value returned to be a QUEUED.
3564   The value may be virtual_outgoing_args_rtx.
3565
3566   EXTRA is the number of bytes of padding to push in addition to SIZE.
3567   BELOW nonzero means this padding comes at low addresses;
3568   otherwise, the padding comes at high addresses.  */
3569
3570rtx
3571push_block (size, extra, below)
3572     rtx size;
3573     int extra, below;
3574{
3575  rtx temp;
3576
3577  size = convert_modes (Pmode, ptr_mode, size, 1);
3578  if (CONSTANT_P (size))
3579    anti_adjust_stack (plus_constant (size, extra));
3580  else if (GET_CODE (size) == REG && extra == 0)
3581    anti_adjust_stack (size);
3582  else
3583    {
3584      temp = copy_to_mode_reg (Pmode, size);
3585      if (extra != 0)
3586	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3587			     temp, 0, OPTAB_LIB_WIDEN);
3588      anti_adjust_stack (temp);
3589    }
3590
3591#ifndef STACK_GROWS_DOWNWARD
3592  if (0)
3593#else
3594  if (1)
3595#endif
3596    {
3597      temp = virtual_outgoing_args_rtx;
3598      if (extra != 0 && below)
3599	temp = plus_constant (temp, extra);
3600    }
3601  else
3602    {
3603      if (GET_CODE (size) == CONST_INT)
3604	temp = plus_constant (virtual_outgoing_args_rtx,
3605			      -INTVAL (size) - (below ? 0 : extra));
3606      else if (extra != 0 && !below)
3607	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3608			     negate_rtx (Pmode, plus_constant (size, extra)));
3609      else
3610	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3611			     negate_rtx (Pmode, size));
3612    }
3613
3614  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3615}
3616
3617#ifdef PUSH_ROUNDING
3618
3619/* Emit single push insn.  */
3620
3621static void
3622emit_single_push_insn (mode, x, type)
3623     rtx x;
3624     enum machine_mode mode;
3625     tree type;
3626{
3627  rtx dest_addr;
3628  unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3629  rtx dest;
3630  enum insn_code icode;
3631  insn_operand_predicate_fn pred;
3632
3633  stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3634  /* If there is push pattern, use it.  Otherwise try old way of throwing
3635     MEM representing push operation to move expander.  */
3636  icode = push_optab->handlers[(int) mode].insn_code;
3637  if (icode != CODE_FOR_nothing)
3638    {
3639      if (((pred = insn_data[(int) icode].operand[0].predicate)
3640	   && !((*pred) (x, mode))))
3641	x = force_reg (mode, x);
3642      emit_insn (GEN_FCN (icode) (x));
3643      return;
3644    }
3645  if (GET_MODE_SIZE (mode) == rounded_size)
3646    dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3647  else
3648    {
3649#ifdef STACK_GROWS_DOWNWARD
3650      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3651				GEN_INT (-(HOST_WIDE_INT) rounded_size));
3652#else
3653      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3654				GEN_INT (rounded_size));
3655#endif
3656      dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3657    }
3658
3659  dest = gen_rtx_MEM (mode, dest_addr);
3660
3661  if (type != 0)
3662    {
3663      set_mem_attributes (dest, type, 1);
3664
3665      if (flag_optimize_sibling_calls)
3666	/* Function incoming arguments may overlap with sibling call
3667	   outgoing arguments and we cannot allow reordering of reads
3668	   from function arguments with stores to outgoing arguments
3669	   of sibling calls.  */
3670	set_mem_alias_set (dest, 0);
3671    }
3672  emit_move_insn (dest, x);
3673}
3674#endif
3675
3676/* Generate code to push X onto the stack, assuming it has mode MODE and
3677   type TYPE.
3678   MODE is redundant except when X is a CONST_INT (since they don't
3679   carry mode info).
3680   SIZE is an rtx for the size of data to be copied (in bytes),
3681   needed only if X is BLKmode.
3682
3683   ALIGN (in bits) is maximum alignment we can assume.
3684
3685   If PARTIAL and REG are both nonzero, then copy that many of the first
3686   words of X into registers starting with REG, and push the rest of X.
3687   The amount of space pushed is decreased by PARTIAL words,
3688   rounded *down* to a multiple of PARM_BOUNDARY.
3689   REG must be a hard register in this case.
3690   If REG is zero but PARTIAL is not, take any all others actions for an
3691   argument partially in registers, but do not actually load any
3692   registers.
3693
3694   EXTRA is the amount in bytes of extra space to leave next to this arg.
3695   This is ignored if an argument block has already been allocated.
3696
3697   On a machine that lacks real push insns, ARGS_ADDR is the address of
3698   the bottom of the argument block for this call.  We use indexing off there
3699   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
3700   argument block has not been preallocated.
3701
3702   ARGS_SO_FAR is the size of args previously pushed for this call.
3703
3704   REG_PARM_STACK_SPACE is nonzero if functions require stack space
3705   for arguments passed in registers.  If nonzero, it will be the number
3706   of bytes required.  */
3707
3708void
3709emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3710		args_addr, args_so_far, reg_parm_stack_space,
3711		alignment_pad)
3712     rtx x;
3713     enum machine_mode mode;
3714     tree type;
3715     rtx size;
3716     unsigned int align;
3717     int partial;
3718     rtx reg;
3719     int extra;
3720     rtx args_addr;
3721     rtx args_so_far;
3722     int reg_parm_stack_space;
3723     rtx alignment_pad;
3724{
3725  rtx xinner;
3726  enum direction stack_direction
3727#ifdef STACK_GROWS_DOWNWARD
3728    = downward;
3729#else
3730    = upward;
3731#endif
3732
3733  /* Decide where to pad the argument: `downward' for below,
3734     `upward' for above, or `none' for don't pad it.
3735     Default is below for small data on big-endian machines; else above.  */
3736  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3737
3738  /* Invert direction if stack is post-decrement.
3739     FIXME: why?  */
3740  if (STACK_PUSH_CODE == POST_DEC)
3741    if (where_pad != none)
3742      where_pad = (where_pad == downward ? upward : downward);
3743
3744  xinner = x = protect_from_queue (x, 0);
3745
3746  if (mode == BLKmode)
3747    {
3748      /* Copy a block into the stack, entirely or partially.  */
3749
3750      rtx temp;
3751      int used = partial * UNITS_PER_WORD;
3752      int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3753      int skip;
3754
3755      if (size == 0)
3756	abort ();
3757
3758      used -= offset;
3759
3760      /* USED is now the # of bytes we need not copy to the stack
3761	 because registers will take care of them.  */
3762
3763      if (partial != 0)
3764	xinner = adjust_address (xinner, BLKmode, used);
3765
3766      /* If the partial register-part of the arg counts in its stack size,
3767	 skip the part of stack space corresponding to the registers.
3768	 Otherwise, start copying to the beginning of the stack space,
3769	 by setting SKIP to 0.  */
3770      skip = (reg_parm_stack_space == 0) ? 0 : used;
3771
3772#ifdef PUSH_ROUNDING
3773      /* Do it with several push insns if that doesn't take lots of insns
3774	 and if there is no difficulty with push insns that skip bytes
3775	 on the stack for alignment purposes.  */
3776      if (args_addr == 0
3777	  && PUSH_ARGS
3778	  && GET_CODE (size) == CONST_INT
3779	  && skip == 0
3780	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3781	  /* Here we avoid the case of a structure whose weak alignment
3782	     forces many pushes of a small amount of data,
3783	     and such small pushes do rounding that causes trouble.  */
3784	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3785	      || align >= BIGGEST_ALIGNMENT
3786	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3787		  == (align / BITS_PER_UNIT)))
3788	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3789	{
3790	  /* Push padding now if padding above and stack grows down,
3791	     or if padding below and stack grows up.
3792	     But if space already allocated, this has already been done.  */
3793	  if (extra && args_addr == 0
3794	      && where_pad != none && where_pad != stack_direction)
3795	    anti_adjust_stack (GEN_INT (extra));
3796
3797	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3798	}
3799      else
3800#endif /* PUSH_ROUNDING  */
3801	{
3802	  rtx target;
3803
3804	  /* Otherwise make space on the stack and copy the data
3805	     to the address of that space.  */
3806
3807	  /* Deduct words put into registers from the size we must copy.  */
3808	  if (partial != 0)
3809	    {
3810	      if (GET_CODE (size) == CONST_INT)
3811		size = GEN_INT (INTVAL (size) - used);
3812	      else
3813		size = expand_binop (GET_MODE (size), sub_optab, size,
3814				     GEN_INT (used), NULL_RTX, 0,
3815				     OPTAB_LIB_WIDEN);
3816	    }
3817
3818	  /* Get the address of the stack space.
3819	     In this case, we do not deal with EXTRA separately.
3820	     A single stack adjust will do.  */
3821	  if (! args_addr)
3822	    {
3823	      temp = push_block (size, extra, where_pad == downward);
3824	      extra = 0;
3825	    }
3826	  else if (GET_CODE (args_so_far) == CONST_INT)
3827	    temp = memory_address (BLKmode,
3828				   plus_constant (args_addr,
3829						  skip + INTVAL (args_so_far)));
3830	  else
3831	    temp = memory_address (BLKmode,
3832				   plus_constant (gen_rtx_PLUS (Pmode,
3833								args_addr,
3834								args_so_far),
3835						  skip));
3836
3837	  if (!ACCUMULATE_OUTGOING_ARGS)
3838	    {
3839	      /* If the source is referenced relative to the stack pointer,
3840		 copy it to another register to stabilize it.  We do not need
3841		 to do this if we know that we won't be changing sp.  */
3842
3843	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3844		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3845		temp = copy_to_reg (temp);
3846	    }
3847
3848	  target = gen_rtx_MEM (BLKmode, temp);
3849
3850	  if (type != 0)
3851	    {
3852	      set_mem_attributes (target, type, 1);
3853	      /* Function incoming arguments may overlap with sibling call
3854		 outgoing arguments and we cannot allow reordering of reads
3855		 from function arguments with stores to outgoing arguments
3856		 of sibling calls.  */
3857	      set_mem_alias_set (target, 0);
3858	    }
3859
3860	  /* ALIGN may well be better aligned than TYPE, e.g. due to
3861	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
3862	  set_mem_align (target, align);
3863
3864	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3865	}
3866    }
3867  else if (partial > 0)
3868    {
3869      /* Scalar partly in registers.  */
3870
3871      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3872      int i;
3873      int not_stack;
3874      /* # words of start of argument
3875	 that we must make space for but need not store.  */
3876      int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3877      int args_offset = INTVAL (args_so_far);
3878      int skip;
3879
3880      /* Push padding now if padding above and stack grows down,
3881	 or if padding below and stack grows up.
3882	 But if space already allocated, this has already been done.  */
3883      if (extra && args_addr == 0
3884	  && where_pad != none && where_pad != stack_direction)
3885	anti_adjust_stack (GEN_INT (extra));
3886
3887      /* If we make space by pushing it, we might as well push
3888	 the real data.  Otherwise, we can leave OFFSET nonzero
3889	 and leave the space uninitialized.  */
3890      if (args_addr == 0)
3891	offset = 0;
3892
3893      /* Now NOT_STACK gets the number of words that we don't need to
3894	 allocate on the stack.  */
3895      not_stack = partial - offset;
3896
3897      /* If the partial register-part of the arg counts in its stack size,
3898	 skip the part of stack space corresponding to the registers.
3899	 Otherwise, start copying to the beginning of the stack space,
3900	 by setting SKIP to 0.  */
3901      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3902
3903      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3904	x = validize_mem (force_const_mem (mode, x));
3905
3906      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3907	 SUBREGs of such registers are not allowed.  */
3908      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3909	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3910	x = copy_to_reg (x);
3911
3912      /* Loop over all the words allocated on the stack for this arg.  */
3913      /* We can do it by words, because any scalar bigger than a word
3914	 has a size a multiple of a word.  */
3915#ifndef PUSH_ARGS_REVERSED
3916      for (i = not_stack; i < size; i++)
3917#else
3918      for (i = size - 1; i >= not_stack; i--)
3919#endif
3920	if (i >= not_stack + offset)
3921	  emit_push_insn (operand_subword_force (x, i, mode),
3922			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3923			  0, args_addr,
3924			  GEN_INT (args_offset + ((i - not_stack + skip)
3925						  * UNITS_PER_WORD)),
3926			  reg_parm_stack_space, alignment_pad);
3927    }
3928  else
3929    {
3930      rtx addr;
3931      rtx target = NULL_RTX;
3932      rtx dest;
3933
3934      /* Push padding now if padding above and stack grows down,
3935	 or if padding below and stack grows up.
3936	 But if space already allocated, this has already been done.  */
3937      if (extra && args_addr == 0
3938	  && where_pad != none && where_pad != stack_direction)
3939	anti_adjust_stack (GEN_INT (extra));
3940
3941#ifdef PUSH_ROUNDING
3942      if (args_addr == 0 && PUSH_ARGS)
3943	emit_single_push_insn (mode, x, type);
3944      else
3945#endif
3946	{
3947	  if (GET_CODE (args_so_far) == CONST_INT)
3948	    addr
3949	      = memory_address (mode,
3950				plus_constant (args_addr,
3951					       INTVAL (args_so_far)));
3952	  else
3953	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3954						       args_so_far));
3955	  target = addr;
3956	  dest = gen_rtx_MEM (mode, addr);
3957	  if (type != 0)
3958	    {
3959	      set_mem_attributes (dest, type, 1);
3960	      /* Function incoming arguments may overlap with sibling call
3961		 outgoing arguments and we cannot allow reordering of reads
3962		 from function arguments with stores to outgoing arguments
3963		 of sibling calls.  */
3964	      set_mem_alias_set (dest, 0);
3965	    }
3966
3967	  emit_move_insn (dest, x);
3968	}
3969    }
3970
3971  /* If part should go in registers, copy that part
3972     into the appropriate registers.  Do this now, at the end,
3973     since mem-to-mem copies above may do function calls.  */
3974  if (partial > 0 && reg != 0)
3975    {
3976      /* Handle calls that pass values in multiple non-contiguous locations.
3977	 The Irix 6 ABI has examples of this.  */
3978      if (GET_CODE (reg) == PARALLEL)
3979	emit_group_load (reg, x, -1);  /* ??? size? */
3980      else
3981	move_block_to_reg (REGNO (reg), x, partial, mode);
3982    }
3983
3984  if (extra && args_addr == 0 && where_pad == stack_direction)
3985    anti_adjust_stack (GEN_INT (extra));
3986
3987  if (alignment_pad && args_addr == 0)
3988    anti_adjust_stack (alignment_pad);
3989}
3990
3991/* Return X if X can be used as a subtarget in a sequence of arithmetic
3992   operations.  */
3993
3994static rtx
3995get_subtarget (x)
3996     rtx x;
3997{
3998  return ((x == 0
3999	   /* Only registers can be subtargets.  */
4000	   || GET_CODE (x) != REG
4001	   /* If the register is readonly, it can't be set more than once.  */
4002	   || RTX_UNCHANGING_P (x)
4003	   /* Don't use hard regs to avoid extending their life.  */
4004	   || REGNO (x) < FIRST_PSEUDO_REGISTER
4005	   /* Avoid subtargets inside loops,
4006	      since they hide some invariant expressions.  */
4007	   || preserve_subexpressions_p ())
4008	  ? 0 : x);
4009}
4010
4011/* Expand an assignment that stores the value of FROM into TO.
4012   If WANT_VALUE is nonzero, return an rtx for the value of TO.
4013   (This may contain a QUEUED rtx;
4014   if the value is constant, this rtx is a constant.)
4015   Otherwise, the returned value is NULL_RTX.
4016
4017   SUGGEST_REG is no longer actually used.
4018   It used to mean, copy the value through a register
4019   and return that register, if that is possible.
4020   We now use WANT_VALUE to decide whether to do this.  */
4021
4022rtx
4023expand_assignment (to, from, want_value, suggest_reg)
4024     tree to, from;
4025     int want_value;
4026     int suggest_reg ATTRIBUTE_UNUSED;
4027{
4028  rtx to_rtx = 0;
4029  rtx result;
4030
4031  /* Don't crash if the lhs of the assignment was erroneous.  */
4032
4033  if (TREE_CODE (to) == ERROR_MARK)
4034    {
4035      result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4036      return want_value ? result : NULL_RTX;
4037    }
4038
4039  /* Assignment of a structure component needs special treatment
4040     if the structure component's rtx is not simply a MEM.
4041     Assignment of an array element at a constant index, and assignment of
4042     an array element in an unaligned packed structure field, has the same
4043     problem.  */
4044
4045  if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4046      || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4047      || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4048    {
4049      enum machine_mode mode1;
4050      HOST_WIDE_INT bitsize, bitpos;
4051      rtx orig_to_rtx;
4052      tree offset;
4053      int unsignedp;
4054      int volatilep = 0;
4055      tree tem;
4056
4057      push_temp_slots ();
4058      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4059				 &unsignedp, &volatilep);
4060
4061      /* If we are going to use store_bit_field and extract_bit_field,
4062	 make sure to_rtx will be safe for multiple use.  */
4063
4064      if (mode1 == VOIDmode && want_value)
4065	tem = stabilize_reference (tem);
4066
4067      orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4068
4069      if (offset != 0)
4070	{
4071	  rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4072
4073	  if (GET_CODE (to_rtx) != MEM)
4074	    abort ();
4075
4076#ifdef POINTERS_EXTEND_UNSIGNED
4077	  if (GET_MODE (offset_rtx) != Pmode)
4078	    offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4079#else
4080	  if (GET_MODE (offset_rtx) != ptr_mode)
4081	    offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4082#endif
4083
4084	  /* A constant address in TO_RTX can have VOIDmode, we must not try
4085	     to call force_reg for that case.  Avoid that case.  */
4086	  if (GET_CODE (to_rtx) == MEM
4087	      && GET_MODE (to_rtx) == BLKmode
4088	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4089	      && bitsize > 0
4090	      && (bitpos % bitsize) == 0
4091	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4092	      && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4093	    {
4094	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4095	      bitpos = 0;
4096	    }
4097
4098	  to_rtx = offset_address (to_rtx, offset_rtx,
4099				   highest_pow2_factor_for_type (TREE_TYPE (to),
4100								 offset));
4101	}
4102
4103      if (GET_CODE (to_rtx) == MEM)
4104	{
4105	  /* If the field is at offset zero, we could have been given the
4106	     DECL_RTX of the parent struct.  Don't munge it.  */
4107	  to_rtx = shallow_copy_rtx (to_rtx);
4108
4109	  set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4110	}
4111
4112      /* Deal with volatile and readonly fields.  The former is only done
4113	 for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
4114      if (volatilep && GET_CODE (to_rtx) == MEM)
4115	{
4116	  if (to_rtx == orig_to_rtx)
4117	    to_rtx = copy_rtx (to_rtx);
4118	  MEM_VOLATILE_P (to_rtx) = 1;
4119	}
4120
4121      if (TREE_CODE (to) == COMPONENT_REF
4122	  && TREE_READONLY (TREE_OPERAND (to, 1)))
4123	{
4124	  if (to_rtx == orig_to_rtx)
4125	    to_rtx = copy_rtx (to_rtx);
4126	  RTX_UNCHANGING_P (to_rtx) = 1;
4127	}
4128
4129      if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4130	{
4131	  if (to_rtx == orig_to_rtx)
4132	    to_rtx = copy_rtx (to_rtx);
4133	  MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4134	}
4135
4136      result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4137			    (want_value
4138			     /* Spurious cast for HPUX compiler.  */
4139			     ? ((enum machine_mode)
4140				TYPE_MODE (TREE_TYPE (to)))
4141			     : VOIDmode),
4142			    unsignedp, TREE_TYPE (tem), get_alias_set (to));
4143
4144      preserve_temp_slots (result);
4145      free_temp_slots ();
4146      pop_temp_slots ();
4147
4148      /* If the value is meaningful, convert RESULT to the proper mode.
4149	 Otherwise, return nothing.  */
4150      return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4151					  TYPE_MODE (TREE_TYPE (from)),
4152					  result,
4153					  TREE_UNSIGNED (TREE_TYPE (to)))
4154	      : NULL_RTX);
4155    }
4156
4157  /* If the rhs is a function call and its value is not an aggregate,
4158     call the function before we start to compute the lhs.
4159     This is needed for correct code for cases such as
4160     val = setjmp (buf) on machines where reference to val
4161     requires loading up part of an address in a separate insn.
4162
4163     Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4164     since it might be a promoted variable where the zero- or sign- extension
4165     needs to be done.  Handling this in the normal way is safe because no
4166     computation is done before the call.  */
4167  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4168      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4169      && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4170	    && GET_CODE (DECL_RTL (to)) == REG))
4171    {
4172      rtx value;
4173
4174      push_temp_slots ();
4175      value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4176      if (to_rtx == 0)
4177	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4178
4179      /* Handle calls that return values in multiple non-contiguous locations.
4180	 The Irix 6 ABI has examples of this.  */
4181      if (GET_CODE (to_rtx) == PARALLEL)
4182	emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4183      else if (GET_MODE (to_rtx) == BLKmode)
4184	emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4185      else
4186	{
4187#ifdef POINTERS_EXTEND_UNSIGNED
4188	  if (POINTER_TYPE_P (TREE_TYPE (to))
4189	      && GET_MODE (to_rtx) != GET_MODE (value))
4190	    value = convert_memory_address (GET_MODE (to_rtx), value);
4191#endif
4192	  emit_move_insn (to_rtx, value);
4193	}
4194      preserve_temp_slots (to_rtx);
4195      free_temp_slots ();
4196      pop_temp_slots ();
4197      return want_value ? to_rtx : NULL_RTX;
4198    }
4199
4200  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
4201     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
4202
4203  if (to_rtx == 0)
4204    to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4205
4206  /* Don't move directly into a return register.  */
4207  if (TREE_CODE (to) == RESULT_DECL
4208      && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4209    {
4210      rtx temp;
4211
4212      push_temp_slots ();
4213      temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4214
4215      if (GET_CODE (to_rtx) == PARALLEL)
4216	emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4217      else
4218	emit_move_insn (to_rtx, temp);
4219
4220      preserve_temp_slots (to_rtx);
4221      free_temp_slots ();
4222      pop_temp_slots ();
4223      return want_value ? to_rtx : NULL_RTX;
4224    }
4225
4226  /* In case we are returning the contents of an object which overlaps
4227     the place the value is being stored, use a safe function when copying
4228     a value through a pointer into a structure value return block.  */
4229  if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4230      && current_function_returns_struct
4231      && !current_function_returns_pcc_struct)
4232    {
4233      rtx from_rtx, size;
4234
4235      push_temp_slots ();
4236      size = expr_size (from);
4237      from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4238
4239      if (TARGET_MEM_FUNCTIONS)
4240	emit_library_call (memmove_libfunc, LCT_NORMAL,
4241			   VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4242			   XEXP (from_rtx, 0), Pmode,
4243			   convert_to_mode (TYPE_MODE (sizetype),
4244					    size, TREE_UNSIGNED (sizetype)),
4245			   TYPE_MODE (sizetype));
4246      else
4247        emit_library_call (bcopy_libfunc, LCT_NORMAL,
4248			   VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4249			   XEXP (to_rtx, 0), Pmode,
4250			   convert_to_mode (TYPE_MODE (integer_type_node),
4251					    size,
4252					    TREE_UNSIGNED (integer_type_node)),
4253			   TYPE_MODE (integer_type_node));
4254
4255      preserve_temp_slots (to_rtx);
4256      free_temp_slots ();
4257      pop_temp_slots ();
4258      return want_value ? to_rtx : NULL_RTX;
4259    }
4260
4261  /* Compute FROM and store the value in the rtx we got.  */
4262
4263  push_temp_slots ();
4264  result = store_expr (from, to_rtx, want_value);
4265  preserve_temp_slots (result);
4266  free_temp_slots ();
4267  pop_temp_slots ();
4268  return want_value ? result : NULL_RTX;
4269}
4270
4271/* Generate code for computing expression EXP,
4272   and storing the value into TARGET.
4273   TARGET may contain a QUEUED rtx.
4274
4275   If WANT_VALUE & 1 is nonzero, return a copy of the value
4276   not in TARGET, so that we can be sure to use the proper
4277   value in a containing expression even if TARGET has something
4278   else stored in it.  If possible, we copy the value through a pseudo
4279   and return that pseudo.  Or, if the value is constant, we try to
4280   return the constant.  In some cases, we return a pseudo
4281   copied *from* TARGET.
4282
4283   If the mode is BLKmode then we may return TARGET itself.
4284   It turns out that in BLKmode it doesn't cause a problem.
4285   because C has no operators that could combine two different
4286   assignments into the same BLKmode object with different values
4287   with no sequence point.  Will other languages need this to
4288   be more thorough?
4289
4290   If WANT_VALUE & 1 is 0, we return NULL, to make sure
4291   to catch quickly any cases where the caller uses the value
4292   and fails to set WANT_VALUE.
4293
4294   If WANT_VALUE & 2 is set, this is a store into a call param on the
4295   stack, and block moves may need to be treated specially.  */
4296
4297rtx
4298store_expr (exp, target, want_value)
4299     tree exp;
4300     rtx target;
4301     int want_value;
4302{
4303  rtx temp;
4304  int dont_return_target = 0;
4305  int dont_store_target = 0;
4306
4307  if (VOID_TYPE_P (TREE_TYPE (exp)))
4308    {
4309      /* C++ can generate ?: expressions with a throw expression in one
4310	 branch and an rvalue in the other. Here, we resolve attempts to
4311	 store the throw expression's nonexistant result. */
4312      if (want_value)
4313	abort ();
4314      expand_expr (exp, const0_rtx, VOIDmode, 0);
4315      return NULL_RTX;
4316    }
4317  if (TREE_CODE (exp) == COMPOUND_EXPR)
4318    {
4319      /* Perform first part of compound expression, then assign from second
4320	 part.  */
4321      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4322		   want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4323      emit_queue ();
4324      return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4325    }
4326  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4327    {
4328      /* For conditional expression, get safe form of the target.  Then
4329	 test the condition, doing the appropriate assignment on either
4330	 side.  This avoids the creation of unnecessary temporaries.
4331	 For non-BLKmode, it is more efficient not to do this.  */
4332
4333      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4334
4335      emit_queue ();
4336      target = protect_from_queue (target, 1);
4337
4338      do_pending_stack_adjust ();
4339      NO_DEFER_POP;
4340      jumpifnot (TREE_OPERAND (exp, 0), lab1);
4341      start_cleanup_deferral ();
4342      store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4343      end_cleanup_deferral ();
4344      emit_queue ();
4345      emit_jump_insn (gen_jump (lab2));
4346      emit_barrier ();
4347      emit_label (lab1);
4348      start_cleanup_deferral ();
4349      store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4350      end_cleanup_deferral ();
4351      emit_queue ();
4352      emit_label (lab2);
4353      OK_DEFER_POP;
4354
4355      return want_value & 1 ? target : NULL_RTX;
4356    }
4357  else if (queued_subexp_p (target))
4358    /* If target contains a postincrement, let's not risk
4359       using it as the place to generate the rhs.  */
4360    {
4361      if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4362	{
4363	  /* Expand EXP into a new pseudo.  */
4364	  temp = gen_reg_rtx (GET_MODE (target));
4365	  temp = expand_expr (exp, temp, GET_MODE (target),
4366			      (want_value & 2
4367			       ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4368	}
4369      else
4370	temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4371			    (want_value & 2
4372			     ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4373
4374      /* If target is volatile, ANSI requires accessing the value
4375	 *from* the target, if it is accessed.  So make that happen.
4376	 In no case return the target itself.  */
4377      if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4378	dont_return_target = 1;
4379    }
4380  else if ((want_value & 1) != 0
4381	   && GET_CODE (target) == MEM
4382	   && ! MEM_VOLATILE_P (target)
4383	   && GET_MODE (target) != BLKmode)
4384    /* If target is in memory and caller wants value in a register instead,
4385       arrange that.  Pass TARGET as target for expand_expr so that,
4386       if EXP is another assignment, WANT_VALUE will be nonzero for it.
4387       We know expand_expr will not use the target in that case.
4388       Don't do this if TARGET is volatile because we are supposed
4389       to write it and then read it.  */
4390    {
4391      temp = expand_expr (exp, target, GET_MODE (target),
4392			  want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4393      if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4394	{
4395	  /* If TEMP is already in the desired TARGET, only copy it from
4396	     memory and don't store it there again.  */
4397	  if (temp == target
4398	      || (rtx_equal_p (temp, target)
4399		  && ! side_effects_p (temp) && ! side_effects_p (target)))
4400	    dont_store_target = 1;
4401	  temp = copy_to_reg (temp);
4402	}
4403      dont_return_target = 1;
4404    }
4405  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4406    /* If this is a scalar in a register that is stored in a wider mode
4407       than the declared mode, compute the result into its declared mode
4408       and then convert to the wider mode.  Our value is the computed
4409       expression.  */
4410    {
4411      rtx inner_target = 0;
4412
4413      /* If we don't want a value, we can do the conversion inside EXP,
4414	 which will often result in some optimizations.  Do the conversion
4415	 in two steps: first change the signedness, if needed, then
4416	 the extend.  But don't do this if the type of EXP is a subtype
4417	 of something else since then the conversion might involve
4418	 more than just converting modes.  */
4419      if ((want_value & 1) == 0
4420	  && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4421	  && TREE_TYPE (TREE_TYPE (exp)) == 0)
4422	{
4423	  if (TREE_UNSIGNED (TREE_TYPE (exp))
4424	      != SUBREG_PROMOTED_UNSIGNED_P (target))
4425	    exp = convert
4426	      ((*lang_hooks.types.signed_or_unsigned_type)
4427	       (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4428
4429	  exp = convert ((*lang_hooks.types.type_for_mode)
4430			 (GET_MODE (SUBREG_REG (target)),
4431			  SUBREG_PROMOTED_UNSIGNED_P (target)),
4432			 exp);
4433
4434	  inner_target = SUBREG_REG (target);
4435	}
4436
4437      temp = expand_expr (exp, inner_target, VOIDmode,
4438			  want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4439
4440      /* If TEMP is a volatile MEM and we want a result value, make
4441	 the access now so it gets done only once.  Likewise if
4442	 it contains TARGET.  */
4443      if (GET_CODE (temp) == MEM && (want_value & 1) != 0
4444	  && (MEM_VOLATILE_P (temp)
4445	      || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4446	temp = copy_to_reg (temp);
4447
4448      /* If TEMP is a VOIDmode constant, use convert_modes to make
4449	 sure that we properly convert it.  */
4450      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4451	{
4452	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4453				temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4454	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4455			        GET_MODE (target), temp,
4456			        SUBREG_PROMOTED_UNSIGNED_P (target));
4457	}
4458
4459      convert_move (SUBREG_REG (target), temp,
4460		    SUBREG_PROMOTED_UNSIGNED_P (target));
4461
4462      /* If we promoted a constant, change the mode back down to match
4463	 target.  Otherwise, the caller might get confused by a result whose
4464	 mode is larger than expected.  */
4465
4466      if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4467	{
4468	  if (GET_MODE (temp) != VOIDmode)
4469	    {
4470	      temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4471	      SUBREG_PROMOTED_VAR_P (temp) = 1;
4472	      SUBREG_PROMOTED_UNSIGNED_SET (temp,
4473		SUBREG_PROMOTED_UNSIGNED_P (target));
4474	    }
4475	  else
4476	    temp = convert_modes (GET_MODE (target),
4477				  GET_MODE (SUBREG_REG (target)),
4478				  temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4479	}
4480
4481      return want_value & 1 ? temp : NULL_RTX;
4482    }
4483  else
4484    {
4485      temp = expand_expr (exp, target, GET_MODE (target),
4486			  want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4487      /* Return TARGET if it's a specified hardware register.
4488	 If TARGET is a volatile mem ref, either return TARGET
4489	 or return a reg copied *from* TARGET; ANSI requires this.
4490
4491	 Otherwise, if TEMP is not TARGET, return TEMP
4492	 if it is constant (for efficiency),
4493	 or if we really want the correct value.  */
4494      if (!(target && GET_CODE (target) == REG
4495	    && REGNO (target) < FIRST_PSEUDO_REGISTER)
4496	  && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4497	  && ! rtx_equal_p (temp, target)
4498	  && (CONSTANT_P (temp) || (want_value & 1) != 0))
4499	dont_return_target = 1;
4500    }
4501
4502  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4503     the same as that of TARGET, adjust the constant.  This is needed, for
4504     example, in case it is a CONST_DOUBLE and we want only a word-sized
4505     value.  */
4506  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4507      && TREE_CODE (exp) != ERROR_MARK
4508      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4509    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4510			  temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4511
4512  /* If value was not generated in the target, store it there.
4513     Convert the value to TARGET's type first if necessary.
4514     If TEMP and TARGET compare equal according to rtx_equal_p, but
4515     one or both of them are volatile memory refs, we have to distinguish
4516     two cases:
4517     - expand_expr has used TARGET.  In this case, we must not generate
4518       another copy.  This can be detected by TARGET being equal according
4519       to == .
4520     - expand_expr has not used TARGET - that means that the source just
4521       happens to have the same RTX form.  Since temp will have been created
4522       by expand_expr, it will compare unequal according to == .
4523       We must generate a copy in this case, to reach the correct number
4524       of volatile memory references.  */
4525
4526  if ((! rtx_equal_p (temp, target)
4527       || (temp != target && (side_effects_p (temp)
4528			      || side_effects_p (target))))
4529      && TREE_CODE (exp) != ERROR_MARK
4530      && ! dont_store_target
4531	 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4532	    but TARGET is not valid memory reference, TEMP will differ
4533	    from TARGET although it is really the same location.  */
4534      && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4535	  || target != DECL_RTL_IF_SET (exp))
4536      /* If there's nothing to copy, don't bother.  Don't call expr_size
4537	 unless necessary, because some front-ends (C++) expr_size-hook
4538	 aborts on objects that are not supposed to be bit-copied or
4539	 bit-initialized.  */
4540      && expr_size (exp) != const0_rtx)
4541    {
4542      target = protect_from_queue (target, 1);
4543      if (GET_MODE (temp) != GET_MODE (target)
4544	  && GET_MODE (temp) != VOIDmode)
4545	{
4546	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4547	  if (dont_return_target)
4548	    {
4549	      /* In this case, we will return TEMP,
4550		 so make sure it has the proper mode.
4551		 But don't forget to store the value into TARGET.  */
4552	      temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4553	      emit_move_insn (target, temp);
4554	    }
4555	  else
4556	    convert_move (target, temp, unsignedp);
4557	}
4558
4559      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4560	{
4561	  /* Handle copying a string constant into an array.  The string
4562	     constant may be shorter than the array.  So copy just the string's
4563	     actual length, and clear the rest.  First get the size of the data
4564	     type of the string, which is actually the size of the target.  */
4565	  rtx size = expr_size (exp);
4566
4567	  if (GET_CODE (size) == CONST_INT
4568	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
4569	    emit_block_move (target, temp, size,
4570			     (want_value & 2
4571			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4572	  else
4573	    {
4574	      /* Compute the size of the data to copy from the string.  */
4575	      tree copy_size
4576		= size_binop (MIN_EXPR,
4577			      make_tree (sizetype, size),
4578			      size_int (TREE_STRING_LENGTH (exp)));
4579	      rtx copy_size_rtx
4580		= expand_expr (copy_size, NULL_RTX, VOIDmode,
4581			       (want_value & 2
4582				? EXPAND_STACK_PARM : EXPAND_NORMAL));
4583	      rtx label = 0;
4584
4585	      /* Copy that much.  */
4586	      copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4587					       TREE_UNSIGNED (sizetype));
4588	      emit_block_move (target, temp, copy_size_rtx,
4589			       (want_value & 2
4590				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4591
4592	      /* Figure out how much is left in TARGET that we have to clear.
4593		 Do all calculations in ptr_mode.  */
4594	      if (GET_CODE (copy_size_rtx) == CONST_INT)
4595		{
4596		  size = plus_constant (size, -INTVAL (copy_size_rtx));
4597		  target = adjust_address (target, BLKmode,
4598					   INTVAL (copy_size_rtx));
4599		}
4600	      else
4601		{
4602		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4603				       copy_size_rtx, NULL_RTX, 0,
4604				       OPTAB_LIB_WIDEN);
4605
4606#ifdef POINTERS_EXTEND_UNSIGNED
4607		  if (GET_MODE (copy_size_rtx) != Pmode)
4608		    copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4609						     TREE_UNSIGNED (sizetype));
4610#endif
4611
4612		  target = offset_address (target, copy_size_rtx,
4613					   highest_pow2_factor (copy_size));
4614		  label = gen_label_rtx ();
4615		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4616					   GET_MODE (size), 0, label);
4617		}
4618
4619	      if (size != const0_rtx)
4620		clear_storage (target, size);
4621
4622	      if (label)
4623		emit_label (label);
4624	    }
4625	}
4626      /* Handle calls that return values in multiple non-contiguous locations.
4627	 The Irix 6 ABI has examples of this.  */
4628      else if (GET_CODE (target) == PARALLEL)
4629	emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4630      else if (GET_MODE (temp) == BLKmode)
4631	emit_block_move (target, temp, expr_size (exp),
4632			 (want_value & 2
4633			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4634      else
4635	emit_move_insn (target, temp);
4636    }
4637
4638  /* If we don't want a value, return NULL_RTX.  */
4639  if ((want_value & 1) == 0)
4640    return NULL_RTX;
4641
4642  /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4643     ??? The latter test doesn't seem to make sense.  */
4644  else if (dont_return_target && GET_CODE (temp) != MEM)
4645    return temp;
4646
4647  /* Return TARGET itself if it is a hard register.  */
4648  else if ((want_value & 1) != 0
4649	   && GET_MODE (target) != BLKmode
4650	   && ! (GET_CODE (target) == REG
4651		 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4652    return copy_to_reg (target);
4653
4654  else
4655    return target;
4656}
4657
4658/* Return 1 if EXP just contains zeros.  */
4659
4660static int
4661is_zeros_p (exp)
4662     tree exp;
4663{
4664  tree elt;
4665
4666  switch (TREE_CODE (exp))
4667    {
4668    case CONVERT_EXPR:
4669    case NOP_EXPR:
4670    case NON_LVALUE_EXPR:
4671    case VIEW_CONVERT_EXPR:
4672      return is_zeros_p (TREE_OPERAND (exp, 0));
4673
4674    case INTEGER_CST:
4675      return integer_zerop (exp);
4676
4677    case COMPLEX_CST:
4678      return
4679	is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4680
4681    case REAL_CST:
4682      return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4683
4684    case VECTOR_CST:
4685      for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4686	   elt = TREE_CHAIN (elt))
4687	if (!is_zeros_p (TREE_VALUE (elt)))
4688	  return 0;
4689
4690      return 1;
4691
4692    case CONSTRUCTOR:
4693      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4694	return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4695      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4696	if (! is_zeros_p (TREE_VALUE (elt)))
4697	  return 0;
4698
4699      return 1;
4700
4701    default:
4702      return 0;
4703    }
4704}
4705
4706/* Return 1 if EXP contains mostly (3/4)  zeros.  */
4707
4708static int
4709mostly_zeros_p (exp)
4710     tree exp;
4711{
4712  if (TREE_CODE (exp) == CONSTRUCTOR)
4713    {
4714      int elts = 0, zeros = 0;
4715      tree elt = CONSTRUCTOR_ELTS (exp);
4716      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4717	{
4718	  /* If there are no ranges of true bits, it is all zero.  */
4719	  return elt == NULL_TREE;
4720	}
4721      for (; elt; elt = TREE_CHAIN (elt))
4722	{
4723	  /* We do not handle the case where the index is a RANGE_EXPR,
4724	     so the statistic will be somewhat inaccurate.
4725	     We do make a more accurate count in store_constructor itself,
4726	     so since this function is only used for nested array elements,
4727	     this should be close enough.  */
4728	  if (mostly_zeros_p (TREE_VALUE (elt)))
4729	    zeros++;
4730	  elts++;
4731	}
4732
4733      return 4 * zeros >= 3 * elts;
4734    }
4735
4736  return is_zeros_p (exp);
4737}
4738
4739/* Helper function for store_constructor.
4740   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4741   TYPE is the type of the CONSTRUCTOR, not the element type.
4742   CLEARED is as for store_constructor.
4743   ALIAS_SET is the alias set to use for any stores.
4744
4745   This provides a recursive shortcut back to store_constructor when it isn't
4746   necessary to go through store_field.  This is so that we can pass through
4747   the cleared field to let store_constructor know that we may not have to
4748   clear a substructure if the outer structure has already been cleared.  */
4749
4750static void
4751store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4752			 alias_set)
4753     rtx target;
4754     unsigned HOST_WIDE_INT bitsize;
4755     HOST_WIDE_INT bitpos;
4756     enum machine_mode mode;
4757     tree exp, type;
4758     int cleared;
4759     int alias_set;
4760{
4761  if (TREE_CODE (exp) == CONSTRUCTOR
4762      && bitpos % BITS_PER_UNIT == 0
4763      /* If we have a nonzero bitpos for a register target, then we just
4764	 let store_field do the bitfield handling.  This is unlikely to
4765	 generate unnecessary clear instructions anyways.  */
4766      && (bitpos == 0 || GET_CODE (target) == MEM))
4767    {
4768      if (GET_CODE (target) == MEM)
4769	target
4770	  = adjust_address (target,
4771			    GET_MODE (target) == BLKmode
4772			    || 0 != (bitpos
4773				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
4774			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4775
4776
4777      /* Update the alias set, if required.  */
4778      if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4779	  && MEM_ALIAS_SET (target) != 0)
4780	{
4781	  target = copy_rtx (target);
4782	  set_mem_alias_set (target, alias_set);
4783	}
4784
4785      store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4786    }
4787  else
4788    store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4789		 alias_set);
4790}
4791
4792/* Store the value of constructor EXP into the rtx TARGET.
4793   TARGET is either a REG or a MEM; we know it cannot conflict, since
4794   safe_from_p has been called.
4795   CLEARED is true if TARGET is known to have been zero'd.
4796   SIZE is the number of bytes of TARGET we are allowed to modify: this
4797   may not be the same as the size of EXP if we are assigning to a field
4798   which has been packed to exclude padding bits.  */
4799
4800static void
4801store_constructor (exp, target, cleared, size)
4802     tree exp;
4803     rtx target;
4804     int cleared;
4805     HOST_WIDE_INT size;
4806{
4807  tree type = TREE_TYPE (exp);
4808#ifdef WORD_REGISTER_OPERATIONS
4809  HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4810#endif
4811
4812  if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4813      || TREE_CODE (type) == QUAL_UNION_TYPE)
4814    {
4815      tree elt;
4816
4817      /* If size is zero or the target is already cleared, do nothing.  */
4818      if (size == 0 || cleared)
4819	cleared = 1;
4820      /* We either clear the aggregate or indicate the value is dead.  */
4821      else if ((TREE_CODE (type) == UNION_TYPE
4822		|| TREE_CODE (type) == QUAL_UNION_TYPE)
4823	       && ! CONSTRUCTOR_ELTS (exp))
4824	/* If the constructor is empty, clear the union.  */
4825	{
4826	  clear_storage (target, expr_size (exp));
4827	  cleared = 1;
4828	}
4829
4830      /* If we are building a static constructor into a register,
4831	 set the initial value as zero so we can fold the value into
4832	 a constant.  But if more than one register is involved,
4833	 this probably loses.  */
4834      else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4835	       && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4836	{
4837	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4838	  cleared = 1;
4839	}
4840
4841      /* If the constructor has fewer fields than the structure
4842	 or if we are initializing the structure to mostly zeros,
4843	 clear the whole structure first.  Don't do this if TARGET is a
4844	 register whose mode size isn't equal to SIZE since clear_storage
4845	 can't handle this case.  */
4846      else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4847		|| mostly_zeros_p (exp))
4848	       && (GET_CODE (target) != REG
4849		   || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4850		       == size)))
4851	{
4852	  clear_storage (target, GEN_INT (size));
4853	  cleared = 1;
4854	}
4855
4856      if (! cleared)
4857	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4858
4859      /* Store each element of the constructor into
4860	 the corresponding field of TARGET.  */
4861
4862      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4863	{
4864	  tree field = TREE_PURPOSE (elt);
4865	  tree value = TREE_VALUE (elt);
4866	  enum machine_mode mode;
4867	  HOST_WIDE_INT bitsize;
4868	  HOST_WIDE_INT bitpos = 0;
4869	  int unsignedp;
4870	  tree offset;
4871	  rtx to_rtx = target;
4872
4873	  /* Just ignore missing fields.
4874	     We cleared the whole structure, above,
4875	     if any fields are missing.  */
4876	  if (field == 0)
4877	    continue;
4878
4879	  if (cleared && is_zeros_p (value))
4880	    continue;
4881
4882	  if (host_integerp (DECL_SIZE (field), 1))
4883	    bitsize = tree_low_cst (DECL_SIZE (field), 1);
4884	  else
4885	    bitsize = -1;
4886
4887	  unsignedp = TREE_UNSIGNED (field);
4888	  mode = DECL_MODE (field);
4889	  if (DECL_BIT_FIELD (field))
4890	    mode = VOIDmode;
4891
4892	  offset = DECL_FIELD_OFFSET (field);
4893	  if (host_integerp (offset, 0)
4894	      && host_integerp (bit_position (field), 0))
4895	    {
4896	      bitpos = int_bit_position (field);
4897	      offset = 0;
4898	    }
4899	  else
4900	    bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4901
4902	  if (offset)
4903	    {
4904	      rtx offset_rtx;
4905
4906	      if (contains_placeholder_p (offset))
4907		offset = build (WITH_RECORD_EXPR, sizetype,
4908				offset, make_tree (TREE_TYPE (exp), target));
4909
4910	      offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4911	      if (GET_CODE (to_rtx) != MEM)
4912		abort ();
4913
4914#ifdef POINTERS_EXTEND_UNSIGNED
4915	      if (GET_MODE (offset_rtx) != Pmode)
4916		offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4917#else
4918	      if (GET_MODE (offset_rtx) != ptr_mode)
4919		offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4920#endif
4921
4922	      to_rtx = offset_address (to_rtx, offset_rtx,
4923				       highest_pow2_factor (offset));
4924	    }
4925
4926	  if (TREE_READONLY (field))
4927	    {
4928	      if (GET_CODE (to_rtx) == MEM)
4929		to_rtx = copy_rtx (to_rtx);
4930
4931	      RTX_UNCHANGING_P (to_rtx) = 1;
4932	    }
4933
4934#ifdef WORD_REGISTER_OPERATIONS
4935	  /* If this initializes a field that is smaller than a word, at the
4936	     start of a word, try to widen it to a full word.
4937	     This special case allows us to output C++ member function
4938	     initializations in a form that the optimizers can understand.  */
4939	  if (GET_CODE (target) == REG
4940	      && bitsize < BITS_PER_WORD
4941	      && bitpos % BITS_PER_WORD == 0
4942	      && GET_MODE_CLASS (mode) == MODE_INT
4943	      && TREE_CODE (value) == INTEGER_CST
4944	      && exp_size >= 0
4945	      && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4946	    {
4947	      tree type = TREE_TYPE (value);
4948
4949	      if (TYPE_PRECISION (type) < BITS_PER_WORD)
4950		{
4951		  type = (*lang_hooks.types.type_for_size)
4952		    (BITS_PER_WORD, TREE_UNSIGNED (type));
4953		  value = convert (type, value);
4954		}
4955
4956	      if (BYTES_BIG_ENDIAN)
4957		value
4958		  = fold (build (LSHIFT_EXPR, type, value,
4959				 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4960	      bitsize = BITS_PER_WORD;
4961	      mode = word_mode;
4962	    }
4963#endif
4964
4965	  if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4966	      && DECL_NONADDRESSABLE_P (field))
4967	    {
4968	      to_rtx = copy_rtx (to_rtx);
4969	      MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4970	    }
4971
4972	  store_constructor_field (to_rtx, bitsize, bitpos, mode,
4973				   value, type, cleared,
4974				   get_alias_set (TREE_TYPE (field)));
4975	}
4976    }
4977  else if (TREE_CODE (type) == ARRAY_TYPE
4978	   || TREE_CODE (type) == VECTOR_TYPE)
4979    {
4980      tree elt;
4981      int i;
4982      int need_to_clear;
4983      tree domain = TYPE_DOMAIN (type);
4984      tree elttype = TREE_TYPE (type);
4985      int const_bounds_p;
4986      HOST_WIDE_INT minelt = 0;
4987      HOST_WIDE_INT maxelt = 0;
4988
4989      /* Vectors are like arrays, but the domain is stored via an array
4990	 type indirectly.  */
4991      if (TREE_CODE (type) == VECTOR_TYPE)
4992	{
4993	  /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4994	     the same field as TYPE_DOMAIN, we are not guaranteed that
4995	     it always will.  */
4996	  domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4997	  domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4998	}
4999
5000      const_bounds_p = (TYPE_MIN_VALUE (domain)
5001			&& TYPE_MAX_VALUE (domain)
5002			&& host_integerp (TYPE_MIN_VALUE (domain), 0)
5003			&& host_integerp (TYPE_MAX_VALUE (domain), 0));
5004
5005      /* If we have constant bounds for the range of the type, get them.  */
5006      if (const_bounds_p)
5007	{
5008	  minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5009	  maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5010	}
5011
5012      /* If the constructor has fewer elements than the array,
5013         clear the whole array first.  Similarly if this is
5014         static constructor of a non-BLKmode object.  */
5015      if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5016	need_to_clear = 1;
5017      else
5018	{
5019	  HOST_WIDE_INT count = 0, zero_count = 0;
5020	  need_to_clear = ! const_bounds_p;
5021
5022	  /* This loop is a more accurate version of the loop in
5023	     mostly_zeros_p (it handles RANGE_EXPR in an index).
5024	     It is also needed to check for missing elements.  */
5025	  for (elt = CONSTRUCTOR_ELTS (exp);
5026	       elt != NULL_TREE && ! need_to_clear;
5027	       elt = TREE_CHAIN (elt))
5028	    {
5029	      tree index = TREE_PURPOSE (elt);
5030	      HOST_WIDE_INT this_node_count;
5031
5032	      if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5033		{
5034		  tree lo_index = TREE_OPERAND (index, 0);
5035		  tree hi_index = TREE_OPERAND (index, 1);
5036
5037		  if (! host_integerp (lo_index, 1)
5038		      || ! host_integerp (hi_index, 1))
5039		    {
5040		      need_to_clear = 1;
5041		      break;
5042		    }
5043
5044		  this_node_count = (tree_low_cst (hi_index, 1)
5045				     - tree_low_cst (lo_index, 1) + 1);
5046		}
5047	      else
5048		this_node_count = 1;
5049
5050	      count += this_node_count;
5051	      if (mostly_zeros_p (TREE_VALUE (elt)))
5052		zero_count += this_node_count;
5053	    }
5054
5055	  /* Clear the entire array first if there are any missing elements,
5056	     or if the incidence of zero elements is >= 75%.  */
5057	  if (! need_to_clear
5058	      && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5059	    need_to_clear = 1;
5060	}
5061
5062      if (need_to_clear && size > 0)
5063	{
5064	  if (! cleared)
5065	    {
5066	      if (REG_P (target))
5067		emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
5068	      else
5069		clear_storage (target, GEN_INT (size));
5070	    }
5071	  cleared = 1;
5072	}
5073      else if (REG_P (target))
5074	/* Inform later passes that the old value is dead.  */
5075	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5076
5077      /* Store each element of the constructor into
5078	 the corresponding element of TARGET, determined
5079	 by counting the elements.  */
5080      for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5081	   elt;
5082	   elt = TREE_CHAIN (elt), i++)
5083	{
5084	  enum machine_mode mode;
5085	  HOST_WIDE_INT bitsize;
5086	  HOST_WIDE_INT bitpos;
5087	  int unsignedp;
5088	  tree value = TREE_VALUE (elt);
5089	  tree index = TREE_PURPOSE (elt);
5090	  rtx xtarget = target;
5091
5092	  if (cleared && is_zeros_p (value))
5093	    continue;
5094
5095	  unsignedp = TREE_UNSIGNED (elttype);
5096	  mode = TYPE_MODE (elttype);
5097	  if (mode == BLKmode)
5098	    bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5099		       ? tree_low_cst (TYPE_SIZE (elttype), 1)
5100		       : -1);
5101	  else
5102	    bitsize = GET_MODE_BITSIZE (mode);
5103
5104	  if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5105	    {
5106	      tree lo_index = TREE_OPERAND (index, 0);
5107	      tree hi_index = TREE_OPERAND (index, 1);
5108	      rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
5109	      struct nesting *loop;
5110	      HOST_WIDE_INT lo, hi, count;
5111	      tree position;
5112
5113	      /* If the range is constant and "small", unroll the loop.  */
5114	      if (const_bounds_p
5115		  && host_integerp (lo_index, 0)
5116		  && host_integerp (hi_index, 0)
5117		  && (lo = tree_low_cst (lo_index, 0),
5118		      hi = tree_low_cst (hi_index, 0),
5119		      count = hi - lo + 1,
5120		      (GET_CODE (target) != MEM
5121		       || count <= 2
5122		       || (host_integerp (TYPE_SIZE (elttype), 1)
5123			   && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5124			       <= 40 * 8)))))
5125		{
5126		  lo -= minelt;  hi -= minelt;
5127		  for (; lo <= hi; lo++)
5128		    {
5129		      bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5130
5131		      if (GET_CODE (target) == MEM
5132			  && !MEM_KEEP_ALIAS_SET_P (target)
5133			  && TREE_CODE (type) == ARRAY_TYPE
5134			  && TYPE_NONALIASED_COMPONENT (type))
5135			{
5136			  target = copy_rtx (target);
5137			  MEM_KEEP_ALIAS_SET_P (target) = 1;
5138			}
5139
5140		      store_constructor_field
5141			(target, bitsize, bitpos, mode, value, type, cleared,
5142			 get_alias_set (elttype));
5143		    }
5144		}
5145	      else
5146		{
5147		  hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5148		  loop_top = gen_label_rtx ();
5149		  loop_end = gen_label_rtx ();
5150
5151		  unsignedp = TREE_UNSIGNED (domain);
5152
5153		  index = build_decl (VAR_DECL, NULL_TREE, domain);
5154
5155		  index_r
5156		    = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5157						 &unsignedp, 0));
5158		  SET_DECL_RTL (index, index_r);
5159		  if (TREE_CODE (value) == SAVE_EXPR
5160		      && SAVE_EXPR_RTL (value) == 0)
5161		    {
5162		      /* Make sure value gets expanded once before the
5163                         loop.  */
5164		      expand_expr (value, const0_rtx, VOIDmode, 0);
5165		      emit_queue ();
5166		    }
5167		  store_expr (lo_index, index_r, 0);
5168		  loop = expand_start_loop (0);
5169
5170		  /* Assign value to element index.  */
5171		  position
5172		    = convert (ssizetype,
5173			       fold (build (MINUS_EXPR, TREE_TYPE (index),
5174					    index, TYPE_MIN_VALUE (domain))));
5175		  position = size_binop (MULT_EXPR, position,
5176					 convert (ssizetype,
5177						  TYPE_SIZE_UNIT (elttype)));
5178
5179		  pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5180		  xtarget = offset_address (target, pos_rtx,
5181					    highest_pow2_factor (position));
5182		  xtarget = adjust_address (xtarget, mode, 0);
5183		  if (TREE_CODE (value) == CONSTRUCTOR)
5184		    store_constructor (value, xtarget, cleared,
5185				       bitsize / BITS_PER_UNIT);
5186		  else
5187		    store_expr (value, xtarget, 0);
5188
5189		  expand_exit_loop_if_false (loop,
5190					     build (LT_EXPR, integer_type_node,
5191						    index, hi_index));
5192
5193		  expand_increment (build (PREINCREMENT_EXPR,
5194					   TREE_TYPE (index),
5195					   index, integer_one_node), 0, 0);
5196		  expand_end_loop ();
5197		  emit_label (loop_end);
5198		}
5199	    }
5200	  else if ((index != 0 && ! host_integerp (index, 0))
5201		   || ! host_integerp (TYPE_SIZE (elttype), 1))
5202	    {
5203	      tree position;
5204
5205	      if (index == 0)
5206		index = ssize_int (1);
5207
5208	      if (minelt)
5209		index = convert (ssizetype,
5210				 fold (build (MINUS_EXPR, index,
5211					      TYPE_MIN_VALUE (domain))));
5212
5213	      position = size_binop (MULT_EXPR, index,
5214				     convert (ssizetype,
5215					      TYPE_SIZE_UNIT (elttype)));
5216	      xtarget = offset_address (target,
5217					expand_expr (position, 0, VOIDmode, 0),
5218					highest_pow2_factor (position));
5219	      xtarget = adjust_address (xtarget, mode, 0);
5220	      store_expr (value, xtarget, 0);
5221	    }
5222	  else
5223	    {
5224	      if (index != 0)
5225		bitpos = ((tree_low_cst (index, 0) - minelt)
5226			  * tree_low_cst (TYPE_SIZE (elttype), 1));
5227	      else
5228		bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5229
5230	      if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5231		  && TREE_CODE (type) == ARRAY_TYPE
5232		  && TYPE_NONALIASED_COMPONENT (type))
5233		{
5234		  target = copy_rtx (target);
5235		  MEM_KEEP_ALIAS_SET_P (target) = 1;
5236		}
5237
5238	      store_constructor_field (target, bitsize, bitpos, mode, value,
5239				       type, cleared, get_alias_set (elttype));
5240
5241	    }
5242	}
5243    }
5244
5245  /* Set constructor assignments.  */
5246  else if (TREE_CODE (type) == SET_TYPE)
5247    {
5248      tree elt = CONSTRUCTOR_ELTS (exp);
5249      unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5250      tree domain = TYPE_DOMAIN (type);
5251      tree domain_min, domain_max, bitlength;
5252
5253      /* The default implementation strategy is to extract the constant
5254	 parts of the constructor, use that to initialize the target,
5255	 and then "or" in whatever non-constant ranges we need in addition.
5256
5257	 If a large set is all zero or all ones, it is
5258	 probably better to set it using memset (if available) or bzero.
5259	 Also, if a large set has just a single range, it may also be
5260	 better to first clear all the first clear the set (using
5261	 bzero/memset), and set the bits we want.  */
5262
5263      /* Check for all zeros.  */
5264      if (elt == NULL_TREE && size > 0)
5265	{
5266	  if (!cleared)
5267	    clear_storage (target, GEN_INT (size));
5268	  return;
5269	}
5270
5271      domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5272      domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5273      bitlength = size_binop (PLUS_EXPR,
5274			      size_diffop (domain_max, domain_min),
5275			      ssize_int (1));
5276
5277      nbits = tree_low_cst (bitlength, 1);
5278
5279      /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5280	 are "complicated" (more than one range), initialize (the
5281	 constant parts) by copying from a constant.  */
5282      if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5283	  || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5284	{
5285	  unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5286	  enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5287	  char *bit_buffer = (char *) alloca (nbits);
5288	  HOST_WIDE_INT word = 0;
5289	  unsigned int bit_pos = 0;
5290	  unsigned int ibit = 0;
5291	  unsigned int offset = 0;  /* In bytes from beginning of set.  */
5292
5293	  elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5294	  for (;;)
5295	    {
5296	      if (bit_buffer[ibit])
5297		{
5298		  if (BYTES_BIG_ENDIAN)
5299		    word |= (1 << (set_word_size - 1 - bit_pos));
5300		  else
5301		    word |= 1 << bit_pos;
5302		}
5303
5304	      bit_pos++;  ibit++;
5305	      if (bit_pos >= set_word_size || ibit == nbits)
5306		{
5307		  if (word != 0 || ! cleared)
5308		    {
5309		      rtx datum = GEN_INT (word);
5310		      rtx to_rtx;
5311
5312		      /* The assumption here is that it is safe to use
5313			 XEXP if the set is multi-word, but not if
5314			 it's single-word.  */
5315		      if (GET_CODE (target) == MEM)
5316			to_rtx = adjust_address (target, mode, offset);
5317		      else if (offset == 0)
5318			to_rtx = target;
5319		      else
5320			abort ();
5321		      emit_move_insn (to_rtx, datum);
5322		    }
5323
5324		  if (ibit == nbits)
5325		    break;
5326		  word = 0;
5327		  bit_pos = 0;
5328		  offset += set_word_size / BITS_PER_UNIT;
5329		}
5330	    }
5331	}
5332      else if (!cleared)
5333	/* Don't bother clearing storage if the set is all ones.  */
5334	if (TREE_CHAIN (elt) != NULL_TREE
5335	    || (TREE_PURPOSE (elt) == NULL_TREE
5336		? nbits != 1
5337		: ( ! host_integerp (TREE_VALUE (elt), 0)
5338		   || ! host_integerp (TREE_PURPOSE (elt), 0)
5339		   || (tree_low_cst (TREE_VALUE (elt), 0)
5340		       - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5341		       != (HOST_WIDE_INT) nbits))))
5342	  clear_storage (target, expr_size (exp));
5343
5344      for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5345	{
5346	  /* Start of range of element or NULL.  */
5347	  tree startbit = TREE_PURPOSE (elt);
5348	  /* End of range of element, or element value.  */
5349	  tree endbit   = TREE_VALUE (elt);
5350	  HOST_WIDE_INT startb, endb;
5351	  rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5352
5353	  bitlength_rtx = expand_expr (bitlength,
5354				       NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5355
5356	  /* Handle non-range tuple element like [ expr ].  */
5357	  if (startbit == NULL_TREE)
5358	    {
5359	      startbit = save_expr (endbit);
5360	      endbit = startbit;
5361	    }
5362
5363	  startbit = convert (sizetype, startbit);
5364	  endbit = convert (sizetype, endbit);
5365	  if (! integer_zerop (domain_min))
5366	    {
5367	      startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5368	      endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5369	    }
5370	  startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5371				      EXPAND_CONST_ADDRESS);
5372	  endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5373				    EXPAND_CONST_ADDRESS);
5374
5375	  if (REG_P (target))
5376	    {
5377	      targetx
5378		= assign_temp
5379		  ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5380					  (GET_MODE (target), 0),
5381					  TYPE_QUAL_CONST)),
5382		   0, 1, 1);
5383	      emit_move_insn (targetx, target);
5384	    }
5385
5386	  else if (GET_CODE (target) == MEM)
5387	    targetx = target;
5388	  else
5389	    abort ();
5390
5391	  /* Optimization:  If startbit and endbit are constants divisible
5392	     by BITS_PER_UNIT, call memset instead.  */
5393	  if (TARGET_MEM_FUNCTIONS
5394	      && TREE_CODE (startbit) == INTEGER_CST
5395	      && TREE_CODE (endbit) == INTEGER_CST
5396	      && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5397	      && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5398	    {
5399	      emit_library_call (memset_libfunc, LCT_NORMAL,
5400				 VOIDmode, 3,
5401				 plus_constant (XEXP (targetx, 0),
5402						startb / BITS_PER_UNIT),
5403				 Pmode,
5404				 constm1_rtx, TYPE_MODE (integer_type_node),
5405				 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5406				 TYPE_MODE (sizetype));
5407	    }
5408	  else
5409	    emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5410			       LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5411			       Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5412			       startbit_rtx, TYPE_MODE (sizetype),
5413			       endbit_rtx, TYPE_MODE (sizetype));
5414
5415	  if (REG_P (target))
5416	    emit_move_insn (target, targetx);
5417	}
5418    }
5419
5420  else
5421    abort ();
5422}
5423
5424/* Store the value of EXP (an expression tree)
5425   into a subfield of TARGET which has mode MODE and occupies
5426   BITSIZE bits, starting BITPOS bits from the start of TARGET.
5427   If MODE is VOIDmode, it means that we are storing into a bit-field.
5428
5429   If VALUE_MODE is VOIDmode, return nothing in particular.
5430   UNSIGNEDP is not used in this case.
5431
5432   Otherwise, return an rtx for the value stored.  This rtx
5433   has mode VALUE_MODE if that is convenient to do.
5434   In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5435
5436   TYPE is the type of the underlying object,
5437
5438   ALIAS_SET is the alias set for the destination.  This value will
5439   (in general) be different from that for TARGET, since TARGET is a
5440   reference to the containing structure.  */
5441
5442static rtx
5443store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5444	     alias_set)
5445     rtx target;
5446     HOST_WIDE_INT bitsize;
5447     HOST_WIDE_INT bitpos;
5448     enum machine_mode mode;
5449     tree exp;
5450     enum machine_mode value_mode;
5451     int unsignedp;
5452     tree type;
5453     int alias_set;
5454{
5455  HOST_WIDE_INT width_mask = 0;
5456
5457  if (TREE_CODE (exp) == ERROR_MARK)
5458    return const0_rtx;
5459
5460  /* If we have nothing to store, do nothing unless the expression has
5461     side-effects.  */
5462  if (bitsize == 0)
5463    return expand_expr (exp, const0_rtx, VOIDmode, 0);
5464  else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5465    width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5466
5467  /* If we are storing into an unaligned field of an aligned union that is
5468     in a register, we may have the mode of TARGET being an integer mode but
5469     MODE == BLKmode.  In that case, get an aligned object whose size and
5470     alignment are the same as TARGET and store TARGET into it (we can avoid
5471     the store if the field being stored is the entire width of TARGET).  Then
5472     call ourselves recursively to store the field into a BLKmode version of
5473     that object.  Finally, load from the object into TARGET.  This is not
5474     very efficient in general, but should only be slightly more expensive
5475     than the otherwise-required unaligned accesses.  Perhaps this can be
5476     cleaned up later.  */
5477
5478  if (mode == BLKmode
5479      && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5480    {
5481      rtx object
5482	= assign_temp
5483	  (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5484	   0, 1, 1);
5485      rtx blk_object = adjust_address (object, BLKmode, 0);
5486
5487      if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5488	emit_move_insn (object, target);
5489
5490      store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5491		   alias_set);
5492
5493      emit_move_insn (target, object);
5494
5495      /* We want to return the BLKmode version of the data.  */
5496      return blk_object;
5497    }
5498
5499  if (GET_CODE (target) == CONCAT)
5500    {
5501      /* We're storing into a struct containing a single __complex.  */
5502
5503      if (bitpos != 0)
5504	abort ();
5505      return store_expr (exp, target, 0);
5506    }
5507
5508  /* If the structure is in a register or if the component
5509     is a bit field, we cannot use addressing to access it.
5510     Use bit-field techniques or SUBREG to store in it.  */
5511
5512  if (mode == VOIDmode
5513      || (mode != BLKmode && ! direct_store[(int) mode]
5514	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5515	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5516      || GET_CODE (target) == REG
5517      || GET_CODE (target) == SUBREG
5518      /* If the field isn't aligned enough to store as an ordinary memref,
5519	 store it as a bit field.  */
5520      || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5521	  && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5522	      || bitpos % GET_MODE_ALIGNMENT (mode)))
5523      /* If the RHS and field are a constant size and the size of the
5524	 RHS isn't the same size as the bitfield, we must use bitfield
5525	 operations.  */
5526      || (bitsize >= 0
5527	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5528	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5529    {
5530      rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5531
5532      /* If BITSIZE is narrower than the size of the type of EXP
5533	 we will be narrowing TEMP.  Normally, what's wanted are the
5534	 low-order bits.  However, if EXP's type is a record and this is
5535	 big-endian machine, we want the upper BITSIZE bits.  */
5536      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5537	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5538	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5539	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5540			     size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5541				       - bitsize),
5542			     temp, 1);
5543
5544      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5545	 MODE.  */
5546      if (mode != VOIDmode && mode != BLKmode
5547	  && mode != TYPE_MODE (TREE_TYPE (exp)))
5548	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5549
5550      /* If the modes of TARGET and TEMP are both BLKmode, both
5551	 must be in memory and BITPOS must be aligned on a byte
5552	 boundary.  If so, we simply do a block copy.  */
5553      if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5554	{
5555	  if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5556	      || bitpos % BITS_PER_UNIT != 0)
5557	    abort ();
5558
5559	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5560	  emit_block_move (target, temp,
5561			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5562				    / BITS_PER_UNIT),
5563			   BLOCK_OP_NORMAL);
5564
5565	  return value_mode == VOIDmode ? const0_rtx : target;
5566	}
5567
5568      /* Store the value in the bitfield.  */
5569      store_bit_field (target, bitsize, bitpos, mode, temp,
5570		       int_size_in_bytes (type));
5571
5572      if (value_mode != VOIDmode)
5573	{
5574	  /* The caller wants an rtx for the value.
5575	     If possible, avoid refetching from the bitfield itself.  */
5576	  if (width_mask != 0
5577	      && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5578	    {
5579	      tree count;
5580	      enum machine_mode tmode;
5581
5582	      tmode = GET_MODE (temp);
5583	      if (tmode == VOIDmode)
5584		tmode = value_mode;
5585
5586	      if (unsignedp)
5587		return expand_and (tmode, temp,
5588				   gen_int_mode (width_mask, tmode),
5589				   NULL_RTX);
5590
5591	      count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5592	      temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5593	      return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5594	    }
5595
5596	  return extract_bit_field (target, bitsize, bitpos, unsignedp,
5597				    NULL_RTX, value_mode, VOIDmode,
5598				    int_size_in_bytes (type));
5599	}
5600      return const0_rtx;
5601    }
5602  else
5603    {
5604      rtx addr = XEXP (target, 0);
5605      rtx to_rtx = target;
5606
5607      /* If a value is wanted, it must be the lhs;
5608	 so make the address stable for multiple use.  */
5609
5610      if (value_mode != VOIDmode && GET_CODE (addr) != REG
5611	  && ! CONSTANT_ADDRESS_P (addr)
5612	  /* A frame-pointer reference is already stable.  */
5613	  && ! (GET_CODE (addr) == PLUS
5614		&& GET_CODE (XEXP (addr, 1)) == CONST_INT
5615		&& (XEXP (addr, 0) == virtual_incoming_args_rtx
5616		    || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5617	to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5618
5619      /* Now build a reference to just the desired component.  */
5620
5621      to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5622
5623      if (to_rtx == target)
5624	to_rtx = copy_rtx (to_rtx);
5625
5626      MEM_SET_IN_STRUCT_P (to_rtx, 1);
5627      if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5628	set_mem_alias_set (to_rtx, alias_set);
5629
5630      return store_expr (exp, to_rtx, value_mode != VOIDmode);
5631    }
5632}
5633
5634/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5635   an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5636   codes and find the ultimate containing object, which we return.
5637
5638   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5639   bit position, and *PUNSIGNEDP to the signedness of the field.
5640   If the position of the field is variable, we store a tree
5641   giving the variable offset (in units) in *POFFSET.
5642   This offset is in addition to the bit position.
5643   If the position is not variable, we store 0 in *POFFSET.
5644
5645   If any of the extraction expressions is volatile,
5646   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
5647
5648   If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
5649   is a mode that can be used to access the field.  In that case, *PBITSIZE
5650   is redundant.
5651
5652   If the field describes a variable-sized object, *PMODE is set to
5653   VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
5654   this case, but the address of the object can be found.  */
5655
5656tree
5657get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5658		     punsignedp, pvolatilep)
5659     tree exp;
5660     HOST_WIDE_INT *pbitsize;
5661     HOST_WIDE_INT *pbitpos;
5662     tree *poffset;
5663     enum machine_mode *pmode;
5664     int *punsignedp;
5665     int *pvolatilep;
5666{
5667  tree size_tree = 0;
5668  enum machine_mode mode = VOIDmode;
5669  tree offset = size_zero_node;
5670  tree bit_offset = bitsize_zero_node;
5671  tree placeholder_ptr = 0;
5672  tree tem;
5673
5674  /* First get the mode, signedness, and size.  We do this from just the
5675     outermost expression.  */
5676  if (TREE_CODE (exp) == COMPONENT_REF)
5677    {
5678      size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5679      if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5680	mode = DECL_MODE (TREE_OPERAND (exp, 1));
5681
5682      *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5683    }
5684  else if (TREE_CODE (exp) == BIT_FIELD_REF)
5685    {
5686      size_tree = TREE_OPERAND (exp, 1);
5687      *punsignedp = TREE_UNSIGNED (exp);
5688    }
5689  else
5690    {
5691      mode = TYPE_MODE (TREE_TYPE (exp));
5692      *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5693
5694      if (mode == BLKmode)
5695	size_tree = TYPE_SIZE (TREE_TYPE (exp));
5696      else
5697	*pbitsize = GET_MODE_BITSIZE (mode);
5698    }
5699
5700  if (size_tree != 0)
5701    {
5702      if (! host_integerp (size_tree, 1))
5703	mode = BLKmode, *pbitsize = -1;
5704      else
5705	*pbitsize = tree_low_cst (size_tree, 1);
5706    }
5707
5708  /* Compute cumulative bit-offset for nested component-refs and array-refs,
5709     and find the ultimate containing object.  */
5710  while (1)
5711    {
5712      if (TREE_CODE (exp) == BIT_FIELD_REF)
5713	bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5714      else if (TREE_CODE (exp) == COMPONENT_REF)
5715	{
5716	  tree field = TREE_OPERAND (exp, 1);
5717	  tree this_offset = DECL_FIELD_OFFSET (field);
5718
5719	  /* If this field hasn't been filled in yet, don't go
5720	     past it.  This should only happen when folding expressions
5721	     made during type construction.  */
5722	  if (this_offset == 0)
5723	    break;
5724	  else if (! TREE_CONSTANT (this_offset)
5725		   && contains_placeholder_p (this_offset))
5726	    this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5727
5728	  offset = size_binop (PLUS_EXPR, offset, this_offset);
5729	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
5730				   DECL_FIELD_BIT_OFFSET (field));
5731
5732	  /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
5733	}
5734
5735      else if (TREE_CODE (exp) == ARRAY_REF
5736	       || TREE_CODE (exp) == ARRAY_RANGE_REF)
5737	{
5738	  tree index = TREE_OPERAND (exp, 1);
5739	  tree array = TREE_OPERAND (exp, 0);
5740	  tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5741	  tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5742	  tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5743
5744	  /* We assume all arrays have sizes that are a multiple of a byte.
5745	     First subtract the lower bound, if any, in the type of the
5746	     index, then convert to sizetype and multiply by the size of the
5747	     array element.  */
5748	  if (low_bound != 0 && ! integer_zerop (low_bound))
5749	    index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5750				 index, low_bound));
5751
5752	  /* If the index has a self-referential type, pass it to a
5753	     WITH_RECORD_EXPR; if the component size is, pass our
5754	     component to one.  */
5755	  if (! TREE_CONSTANT (index)
5756	      && contains_placeholder_p (index))
5757	    index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5758	  if (! TREE_CONSTANT (unit_size)
5759	      && contains_placeholder_p (unit_size))
5760	    unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5761
5762	  offset = size_binop (PLUS_EXPR, offset,
5763			       size_binop (MULT_EXPR,
5764					   convert (sizetype, index),
5765					   unit_size));
5766	}
5767
5768      else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5769	{
5770	  tree new = find_placeholder (exp, &placeholder_ptr);
5771
5772	  /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5773	     We might have been called from tree optimization where we
5774	     haven't set up an object yet.  */
5775	  if (new == 0)
5776	    break;
5777	  else
5778	    exp = new;
5779
5780	  continue;
5781	}
5782      else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5783	       && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5784	       && ! ((TREE_CODE (exp) == NOP_EXPR
5785		      || TREE_CODE (exp) == CONVERT_EXPR)
5786		     && (TYPE_MODE (TREE_TYPE (exp))
5787			 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5788	break;
5789
5790      /* If any reference in the chain is volatile, the effect is volatile.  */
5791      if (TREE_THIS_VOLATILE (exp))
5792	*pvolatilep = 1;
5793
5794      exp = TREE_OPERAND (exp, 0);
5795    }
5796
5797  /* If OFFSET is constant, see if we can return the whole thing as a
5798     constant bit position.  Otherwise, split it up.  */
5799  if (host_integerp (offset, 0)
5800      && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5801				 bitsize_unit_node))
5802      && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5803      && host_integerp (tem, 0))
5804    *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5805  else
5806    *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5807
5808  *pmode = mode;
5809  return exp;
5810}
5811
5812/* Return 1 if T is an expression that get_inner_reference handles.  */
5813
5814int
5815handled_component_p (t)
5816     tree t;
5817{
5818  switch (TREE_CODE (t))
5819    {
5820    case BIT_FIELD_REF:
5821    case COMPONENT_REF:
5822    case ARRAY_REF:
5823    case ARRAY_RANGE_REF:
5824    case NON_LVALUE_EXPR:
5825    case VIEW_CONVERT_EXPR:
5826      return 1;
5827
5828    case NOP_EXPR:
5829    case CONVERT_EXPR:
5830      return (TYPE_MODE (TREE_TYPE (t))
5831	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5832
5833    default:
5834      return 0;
5835    }
5836}
5837
5838/* Given an rtx VALUE that may contain additions and multiplications, return
5839   an equivalent value that just refers to a register, memory, or constant.
5840   This is done by generating instructions to perform the arithmetic and
5841   returning a pseudo-register containing the value.
5842
5843   The returned value may be a REG, SUBREG, MEM or constant.  */
5844
5845rtx
5846force_operand (value, target)
5847     rtx value, target;
5848{
5849  rtx op1, op2;
5850  /* Use subtarget as the target for operand 0 of a binary operation.  */
5851  rtx subtarget = get_subtarget (target);
5852  enum rtx_code code = GET_CODE (value);
5853
5854  /* Check for a PIC address load.  */
5855  if ((code == PLUS || code == MINUS)
5856      && XEXP (value, 0) == pic_offset_table_rtx
5857      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5858	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
5859	  || GET_CODE (XEXP (value, 1)) == CONST))
5860    {
5861      if (!subtarget)
5862	subtarget = gen_reg_rtx (GET_MODE (value));
5863      emit_move_insn (subtarget, value);
5864      return subtarget;
5865    }
5866
5867  if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5868    {
5869      if (!target)
5870	target = gen_reg_rtx (GET_MODE (value));
5871      convert_move (target, force_operand (XEXP (value, 0), NULL),
5872		    code == ZERO_EXTEND);
5873      return target;
5874    }
5875
5876  if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5877    {
5878      op2 = XEXP (value, 1);
5879      if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5880	subtarget = 0;
5881      if (code == MINUS && GET_CODE (op2) == CONST_INT)
5882	{
5883	  code = PLUS;
5884	  op2 = negate_rtx (GET_MODE (value), op2);
5885	}
5886
5887      /* Check for an addition with OP2 a constant integer and our first
5888         operand a PLUS of a virtual register and something else.  In that
5889         case, we want to emit the sum of the virtual register and the
5890         constant first and then add the other value.  This allows virtual
5891         register instantiation to simply modify the constant rather than
5892         creating another one around this addition.  */
5893      if (code == PLUS && GET_CODE (op2) == CONST_INT
5894	  && GET_CODE (XEXP (value, 0)) == PLUS
5895	  && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5896	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5897	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5898	{
5899	  rtx temp = expand_simple_binop (GET_MODE (value), code,
5900					  XEXP (XEXP (value, 0), 0), op2,
5901					  subtarget, 0, OPTAB_LIB_WIDEN);
5902	  return expand_simple_binop (GET_MODE (value), code, temp,
5903				      force_operand (XEXP (XEXP (value,
5904								 0), 1), 0),
5905				      target, 0, OPTAB_LIB_WIDEN);
5906	}
5907
5908      op1 = force_operand (XEXP (value, 0), subtarget);
5909      op2 = force_operand (op2, NULL_RTX);
5910      switch (code)
5911	{
5912	case MULT:
5913	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
5914	case DIV:
5915	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
5916	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
5917					target, 1, OPTAB_LIB_WIDEN);
5918	  else
5919	    return expand_divmod (0,
5920				  FLOAT_MODE_P (GET_MODE (value))
5921				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
5922				  GET_MODE (value), op1, op2, target, 0);
5923	  break;
5924	case MOD:
5925	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5926				target, 0);
5927	  break;
5928	case UDIV:
5929	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5930				target, 1);
5931	  break;
5932	case UMOD:
5933	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5934				target, 1);
5935	  break;
5936	case ASHIFTRT:
5937	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
5938				      target, 0, OPTAB_LIB_WIDEN);
5939	  break;
5940	default:
5941	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
5942				      target, 1, OPTAB_LIB_WIDEN);
5943	}
5944    }
5945  if (GET_RTX_CLASS (code) == '1')
5946    {
5947      op1 = force_operand (XEXP (value, 0), NULL_RTX);
5948      return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5949    }
5950
5951#ifdef INSN_SCHEDULING
5952  /* On machines that have insn scheduling, we want all memory reference to be
5953     explicit, so we need to deal with such paradoxical SUBREGs.  */
5954  if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5955      && (GET_MODE_SIZE (GET_MODE (value))
5956	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5957    value
5958      = simplify_gen_subreg (GET_MODE (value),
5959			     force_reg (GET_MODE (SUBREG_REG (value)),
5960					force_operand (SUBREG_REG (value),
5961						       NULL_RTX)),
5962			     GET_MODE (SUBREG_REG (value)),
5963			     SUBREG_BYTE (value));
5964#endif
5965
5966  return value;
5967}
5968
5969/* Subroutine of expand_expr: return nonzero iff there is no way that
5970   EXP can reference X, which is being modified.  TOP_P is nonzero if this
5971   call is going to be used to determine whether we need a temporary
5972   for EXP, as opposed to a recursive call to this function.
5973
5974   It is always safe for this routine to return zero since it merely
5975   searches for optimization opportunities.  */
5976
5977int
5978safe_from_p (x, exp, top_p)
5979     rtx x;
5980     tree exp;
5981     int top_p;
5982{
5983  rtx exp_rtl = 0;
5984  int i, nops;
5985  static tree save_expr_list;
5986
5987  if (x == 0
5988      /* If EXP has varying size, we MUST use a target since we currently
5989	 have no way of allocating temporaries of variable size
5990	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5991	 So we assume here that something at a higher level has prevented a
5992	 clash.  This is somewhat bogus, but the best we can do.  Only
5993	 do this when X is BLKmode and when we are at the top level.  */
5994      || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5995	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5996	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5997	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5998	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5999	      != INTEGER_CST)
6000	  && GET_MODE (x) == BLKmode)
6001      /* If X is in the outgoing argument area, it is always safe.  */
6002      || (GET_CODE (x) == MEM
6003	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
6004	      || (GET_CODE (XEXP (x, 0)) == PLUS
6005		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6006    return 1;
6007
6008  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6009     find the underlying pseudo.  */
6010  if (GET_CODE (x) == SUBREG)
6011    {
6012      x = SUBREG_REG (x);
6013      if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6014	return 0;
6015    }
6016
6017  /* A SAVE_EXPR might appear many times in the expression passed to the
6018     top-level safe_from_p call, and if it has a complex subexpression,
6019     examining it multiple times could result in a combinatorial explosion.
6020     E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6021     with optimization took about 28 minutes to compile -- even though it was
6022     only a few lines long.  So we mark each SAVE_EXPR we see with TREE_PRIVATE
6023     and turn that off when we are done.  We keep a list of the SAVE_EXPRs
6024     we have processed.  Note that the only test of top_p was above.  */
6025
6026  if (top_p)
6027    {
6028      int rtn;
6029      tree t;
6030
6031      save_expr_list = 0;
6032
6033      rtn = safe_from_p (x, exp, 0);
6034
6035      for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6036	TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6037
6038      return rtn;
6039    }
6040
6041  /* Now look at our tree code and possibly recurse.  */
6042  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6043    {
6044    case 'd':
6045      exp_rtl = DECL_RTL_IF_SET (exp);
6046      break;
6047
6048    case 'c':
6049      return 1;
6050
6051    case 'x':
6052      if (TREE_CODE (exp) == TREE_LIST)
6053	{
6054	  while (1)
6055	    {
6056	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6057		return 0;
6058	      exp = TREE_CHAIN (exp);
6059	      if (!exp)
6060		return 1;
6061	      if (TREE_CODE (exp) != TREE_LIST)
6062		return safe_from_p (x, exp, 0);
6063	    }
6064	}
6065      else if (TREE_CODE (exp) == ERROR_MARK)
6066	return 1;	/* An already-visited SAVE_EXPR? */
6067      else
6068	return 0;
6069
6070    case '2':
6071    case '<':
6072      if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6073	return 0;
6074      /* FALLTHRU */
6075
6076    case '1':
6077      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6078
6079    case 'e':
6080    case 'r':
6081      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
6082	 the expression.  If it is set, we conflict iff we are that rtx or
6083	 both are in memory.  Otherwise, we check all operands of the
6084	 expression recursively.  */
6085
6086      switch (TREE_CODE (exp))
6087	{
6088	case ADDR_EXPR:
6089	  /* If the operand is static or we are static, we can't conflict.
6090	     Likewise if we don't conflict with the operand at all.  */
6091	  if (staticp (TREE_OPERAND (exp, 0))
6092	      || TREE_STATIC (exp)
6093	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6094	    return 1;
6095
6096	  /* Otherwise, the only way this can conflict is if we are taking
6097	     the address of a DECL a that address if part of X, which is
6098	     very rare.  */
6099	  exp = TREE_OPERAND (exp, 0);
6100	  if (DECL_P (exp))
6101	    {
6102	      if (!DECL_RTL_SET_P (exp)
6103		  || GET_CODE (DECL_RTL (exp)) != MEM)
6104		return 0;
6105	      else
6106		exp_rtl = XEXP (DECL_RTL (exp), 0);
6107	    }
6108	  break;
6109
6110	case INDIRECT_REF:
6111	  if (GET_CODE (x) == MEM
6112	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6113					get_alias_set (exp)))
6114	    return 0;
6115	  break;
6116
6117	case CALL_EXPR:
6118	  /* Assume that the call will clobber all hard registers and
6119	     all of memory.  */
6120	  if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6121	      || GET_CODE (x) == MEM)
6122	    return 0;
6123	  break;
6124
6125	case RTL_EXPR:
6126	  /* If a sequence exists, we would have to scan every instruction
6127	     in the sequence to see if it was safe.  This is probably not
6128	     worthwhile.  */
6129	  if (RTL_EXPR_SEQUENCE (exp))
6130	    return 0;
6131
6132	  exp_rtl = RTL_EXPR_RTL (exp);
6133	  break;
6134
6135	case WITH_CLEANUP_EXPR:
6136	  exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6137	  break;
6138
6139	case CLEANUP_POINT_EXPR:
6140	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6141
6142	case SAVE_EXPR:
6143	  exp_rtl = SAVE_EXPR_RTL (exp);
6144	  if (exp_rtl)
6145	    break;
6146
6147	  /* If we've already scanned this, don't do it again.  Otherwise,
6148	     show we've scanned it and record for clearing the flag if we're
6149	     going on.  */
6150	  if (TREE_PRIVATE (exp))
6151	    return 1;
6152
6153	  TREE_PRIVATE (exp) = 1;
6154	  if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6155	    {
6156	      TREE_PRIVATE (exp) = 0;
6157	      return 0;
6158	    }
6159
6160	  save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6161	  return 1;
6162
6163	case BIND_EXPR:
6164	  /* The only operand we look at is operand 1.  The rest aren't
6165	     part of the expression.  */
6166	  return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6167
6168	case METHOD_CALL_EXPR:
6169	  /* This takes an rtx argument, but shouldn't appear here.  */
6170	  abort ();
6171
6172	default:
6173	  break;
6174	}
6175
6176      /* If we have an rtx, we do not need to scan our operands.  */
6177      if (exp_rtl)
6178	break;
6179
6180      nops = first_rtl_op (TREE_CODE (exp));
6181      for (i = 0; i < nops; i++)
6182	if (TREE_OPERAND (exp, i) != 0
6183	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6184	  return 0;
6185
6186      /* If this is a language-specific tree code, it may require
6187	 special handling.  */
6188      if ((unsigned int) TREE_CODE (exp)
6189	  >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6190	  && !(*lang_hooks.safe_from_p) (x, exp))
6191	return 0;
6192    }
6193
6194  /* If we have an rtl, find any enclosed object.  Then see if we conflict
6195     with it.  */
6196  if (exp_rtl)
6197    {
6198      if (GET_CODE (exp_rtl) == SUBREG)
6199	{
6200	  exp_rtl = SUBREG_REG (exp_rtl);
6201	  if (GET_CODE (exp_rtl) == REG
6202	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6203	    return 0;
6204	}
6205
6206      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
6207	 are memory and they conflict.  */
6208      return ! (rtx_equal_p (x, exp_rtl)
6209		|| (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6210		    && true_dependence (exp_rtl, VOIDmode, x,
6211					rtx_addr_varies_p)));
6212    }
6213
6214  /* If we reach here, it is safe.  */
6215  return 1;
6216}
6217
6218/* Subroutine of expand_expr: return rtx if EXP is a
6219   variable or parameter; else return 0.  */
6220
6221static rtx
6222var_rtx (exp)
6223     tree exp;
6224{
6225  STRIP_NOPS (exp);
6226  switch (TREE_CODE (exp))
6227    {
6228    case PARM_DECL:
6229    case VAR_DECL:
6230      return DECL_RTL (exp);
6231    default:
6232      return 0;
6233    }
6234}
6235
6236#ifdef MAX_INTEGER_COMPUTATION_MODE
6237
6238void
6239check_max_integer_computation_mode (exp)
6240     tree exp;
6241{
6242  enum tree_code code;
6243  enum machine_mode mode;
6244
6245  /* Strip any NOPs that don't change the mode.  */
6246  STRIP_NOPS (exp);
6247  code = TREE_CODE (exp);
6248
6249  /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE.  */
6250  if (code == NOP_EXPR
6251      && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6252    return;
6253
6254  /* First check the type of the overall operation.   We need only look at
6255     unary, binary and relational operations.  */
6256  if (TREE_CODE_CLASS (code) == '1'
6257      || TREE_CODE_CLASS (code) == '2'
6258      || TREE_CODE_CLASS (code) == '<')
6259    {
6260      mode = TYPE_MODE (TREE_TYPE (exp));
6261      if (GET_MODE_CLASS (mode) == MODE_INT
6262	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6263	internal_error ("unsupported wide integer operation");
6264    }
6265
6266  /* Check operand of a unary op.  */
6267  if (TREE_CODE_CLASS (code) == '1')
6268    {
6269      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6270      if (GET_MODE_CLASS (mode) == MODE_INT
6271	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6272	internal_error ("unsupported wide integer operation");
6273    }
6274
6275  /* Check operands of a binary/comparison op.  */
6276  if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6277    {
6278      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6279      if (GET_MODE_CLASS (mode) == MODE_INT
6280	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6281	internal_error ("unsupported wide integer operation");
6282
6283      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6284      if (GET_MODE_CLASS (mode) == MODE_INT
6285	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6286	internal_error ("unsupported wide integer operation");
6287    }
6288}
6289#endif
6290
6291/* Return the highest power of two that EXP is known to be a multiple of.
6292   This is used in updating alignment of MEMs in array references.  */
6293
6294static HOST_WIDE_INT
6295highest_pow2_factor (exp)
6296     tree exp;
6297{
6298  HOST_WIDE_INT c0, c1;
6299
6300  switch (TREE_CODE (exp))
6301    {
6302    case INTEGER_CST:
6303      /* We can find the lowest bit that's a one.  If the low
6304	 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6305	 We need to handle this case since we can find it in a COND_EXPR,
6306	 a MIN_EXPR, or a MAX_EXPR.  If the constant overlows, we have an
6307	 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6308	 later ICE.  */
6309      if (TREE_CONSTANT_OVERFLOW (exp))
6310	return BIGGEST_ALIGNMENT;
6311      else
6312	{
6313	  /* Note: tree_low_cst is intentionally not used here,
6314	     we don't care about the upper bits.  */
6315	  c0 = TREE_INT_CST_LOW (exp);
6316	  c0 &= -c0;
6317	  return c0 ? c0 : BIGGEST_ALIGNMENT;
6318	}
6319      break;
6320
6321    case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
6322      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6323      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6324      return MIN (c0, c1);
6325
6326    case MULT_EXPR:
6327      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6328      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6329      return c0 * c1;
6330
6331    case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
6332    case CEIL_DIV_EXPR:
6333      if (integer_pow2p (TREE_OPERAND (exp, 1))
6334	  && host_integerp (TREE_OPERAND (exp, 1), 1))
6335	{
6336	  c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6337	  c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6338	  return MAX (1, c0 / c1);
6339	}
6340      break;
6341
6342    case NON_LVALUE_EXPR:  case NOP_EXPR:  case CONVERT_EXPR:
6343    case SAVE_EXPR: case WITH_RECORD_EXPR:
6344      return highest_pow2_factor (TREE_OPERAND (exp, 0));
6345
6346    case COMPOUND_EXPR:
6347      return highest_pow2_factor (TREE_OPERAND (exp, 1));
6348
6349    case COND_EXPR:
6350      c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6351      c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6352      return MIN (c0, c1);
6353
6354    default:
6355      break;
6356    }
6357
6358  return 1;
6359}
6360
6361/* Similar, except that it is known that the expression must be a multiple
6362   of the alignment of TYPE.  */
6363
6364static HOST_WIDE_INT
6365highest_pow2_factor_for_type (type, exp)
6366     tree type;
6367     tree exp;
6368{
6369  HOST_WIDE_INT type_align, factor;
6370
6371  factor = highest_pow2_factor (exp);
6372  type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6373  return MAX (factor, type_align);
6374}
6375
6376/* Return an object on the placeholder list that matches EXP, a
6377   PLACEHOLDER_EXPR.  An object "matches" if it is of the type of the
6378   PLACEHOLDER_EXPR or a pointer type to it.  For further information, see
6379   tree.def.  If no such object is found, return 0.  If PLIST is nonzero, it
6380   is a location which initially points to a starting location in the
6381   placeholder list (zero means start of the list) and where a pointer into
6382   the placeholder list at which the object is found is placed.  */
6383
6384tree
6385find_placeholder (exp, plist)
6386     tree exp;
6387     tree *plist;
6388{
6389  tree type = TREE_TYPE (exp);
6390  tree placeholder_expr;
6391
6392  for (placeholder_expr
6393       = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6394       placeholder_expr != 0;
6395       placeholder_expr = TREE_CHAIN (placeholder_expr))
6396    {
6397      tree need_type = TYPE_MAIN_VARIANT (type);
6398      tree elt;
6399
6400      /* Find the outermost reference that is of the type we want.  If none,
6401	 see if any object has a type that is a pointer to the type we
6402	 want.  */
6403      for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6404	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6405		   || TREE_CODE (elt) == COND_EXPR)
6406		  ? TREE_OPERAND (elt, 1)
6407		  : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6408		     || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6409		     || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6410		     || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6411		  ? TREE_OPERAND (elt, 0) : 0))
6412	if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6413	  {
6414	    if (plist)
6415	      *plist = placeholder_expr;
6416	    return elt;
6417	  }
6418
6419      for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6420	   elt
6421	   = ((TREE_CODE (elt) == COMPOUND_EXPR
6422	       || TREE_CODE (elt) == COND_EXPR)
6423	      ? TREE_OPERAND (elt, 1)
6424	      : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6425		 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6426		 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6427		 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6428	      ? TREE_OPERAND (elt, 0) : 0))
6429	if (POINTER_TYPE_P (TREE_TYPE (elt))
6430	    && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6431		== need_type))
6432	  {
6433	    if (plist)
6434	      *plist = placeholder_expr;
6435	    return build1 (INDIRECT_REF, need_type, elt);
6436	  }
6437    }
6438
6439  return 0;
6440}
6441
6442/* expand_expr: generate code for computing expression EXP.
6443   An rtx for the computed value is returned.  The value is never null.
6444   In the case of a void EXP, const0_rtx is returned.
6445
6446   The value may be stored in TARGET if TARGET is nonzero.
6447   TARGET is just a suggestion; callers must assume that
6448   the rtx returned may not be the same as TARGET.
6449
6450   If TARGET is CONST0_RTX, it means that the value will be ignored.
6451
6452   If TMODE is not VOIDmode, it suggests generating the
6453   result in mode TMODE.  But this is done only when convenient.
6454   Otherwise, TMODE is ignored and the value generated in its natural mode.
6455   TMODE is just a suggestion; callers must assume that
6456   the rtx returned may not have mode TMODE.
6457
6458   Note that TARGET may have neither TMODE nor MODE.  In that case, it
6459   probably will not be used.
6460
6461   If MODIFIER is EXPAND_SUM then when EXP is an addition
6462   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6463   or a nest of (PLUS ...) and (MINUS ...) where the terms are
6464   products as above, or REG or MEM, or constant.
6465   Ordinarily in such cases we would output mul or add instructions
6466   and then return a pseudo reg containing the sum.
6467
6468   EXPAND_INITIALIZER is much like EXPAND_SUM except that
6469   it also marks a label as absolutely required (it can't be dead).
6470   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6471   This is used for outputting expressions used in initializers.
6472
6473   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6474   with a constant address even if that address is not normally legitimate.
6475   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6476
6477   EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6478   a call parameter.  Such targets require special care as we haven't yet
6479   marked TARGET so that it's safe from being trashed by libcalls.  We
6480   don't want to use TARGET for anything but the final result;
6481   Intermediate values must go elsewhere.   Additionally, calls to
6482   emit_block_move will be flagged with BLOCK_OP_CALL_PARM.  */
6483
6484rtx
6485expand_expr (exp, target, tmode, modifier)
6486     tree exp;
6487     rtx target;
6488     enum machine_mode tmode;
6489     enum expand_modifier modifier;
6490{
6491  rtx op0, op1, temp;
6492  tree type = TREE_TYPE (exp);
6493  int unsignedp = TREE_UNSIGNED (type);
6494  enum machine_mode mode;
6495  enum tree_code code = TREE_CODE (exp);
6496  optab this_optab;
6497  rtx subtarget, original_target;
6498  int ignore;
6499  tree context;
6500
6501  /* Handle ERROR_MARK before anybody tries to access its type.  */
6502  if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6503    {
6504      op0 = CONST0_RTX (tmode);
6505      if (op0 != 0)
6506	return op0;
6507      return const0_rtx;
6508    }
6509
6510  mode = TYPE_MODE (type);
6511  /* Use subtarget as the target for operand 0 of a binary operation.  */
6512  subtarget = get_subtarget (target);
6513  original_target = target;
6514  ignore = (target == const0_rtx
6515	    || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6516		 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6517		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6518		&& TREE_CODE (type) == VOID_TYPE));
6519
6520  /* If we are going to ignore this result, we need only do something
6521     if there is a side-effect somewhere in the expression.  If there
6522     is, short-circuit the most common cases here.  Note that we must
6523     not call expand_expr with anything but const0_rtx in case this
6524     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
6525
6526  if (ignore)
6527    {
6528      if (! TREE_SIDE_EFFECTS (exp))
6529	return const0_rtx;
6530
6531      /* Ensure we reference a volatile object even if value is ignored, but
6532	 don't do this if all we are doing is taking its address.  */
6533      if (TREE_THIS_VOLATILE (exp)
6534	  && TREE_CODE (exp) != FUNCTION_DECL
6535	  && mode != VOIDmode && mode != BLKmode
6536	  && modifier != EXPAND_CONST_ADDRESS)
6537	{
6538	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6539	  if (GET_CODE (temp) == MEM)
6540	    temp = copy_to_reg (temp);
6541	  return const0_rtx;
6542	}
6543
6544      if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6545	  || code == INDIRECT_REF || code == BUFFER_REF)
6546	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6547			    modifier);
6548
6549      else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6550	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6551	{
6552	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6553	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6554	  return const0_rtx;
6555	}
6556      else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6557	       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6558	/* If the second operand has no side effects, just evaluate
6559	   the first.  */
6560	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6561			    modifier);
6562      else if (code == BIT_FIELD_REF)
6563	{
6564	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6565	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6566	  expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6567	  return const0_rtx;
6568	}
6569
6570      target = 0;
6571    }
6572
6573#ifdef MAX_INTEGER_COMPUTATION_MODE
6574  /* Only check stuff here if the mode we want is different from the mode
6575     of the expression; if it's the same, check_max_integer_computiation_mode
6576     will handle it.  Do we really need to check this stuff at all?  */
6577
6578  if (target
6579      && GET_MODE (target) != mode
6580      && TREE_CODE (exp) != INTEGER_CST
6581      && TREE_CODE (exp) != PARM_DECL
6582      && TREE_CODE (exp) != ARRAY_REF
6583      && TREE_CODE (exp) != ARRAY_RANGE_REF
6584      && TREE_CODE (exp) != COMPONENT_REF
6585      && TREE_CODE (exp) != BIT_FIELD_REF
6586      && TREE_CODE (exp) != INDIRECT_REF
6587      && TREE_CODE (exp) != CALL_EXPR
6588      && TREE_CODE (exp) != VAR_DECL
6589      && TREE_CODE (exp) != RTL_EXPR)
6590    {
6591      enum machine_mode mode = GET_MODE (target);
6592
6593      if (GET_MODE_CLASS (mode) == MODE_INT
6594	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6595	internal_error ("unsupported wide integer operation");
6596    }
6597
6598  if (tmode != mode
6599      && TREE_CODE (exp) != INTEGER_CST
6600      && TREE_CODE (exp) != PARM_DECL
6601      && TREE_CODE (exp) != ARRAY_REF
6602      && TREE_CODE (exp) != ARRAY_RANGE_REF
6603      && TREE_CODE (exp) != COMPONENT_REF
6604      && TREE_CODE (exp) != BIT_FIELD_REF
6605      && TREE_CODE (exp) != INDIRECT_REF
6606      && TREE_CODE (exp) != VAR_DECL
6607      && TREE_CODE (exp) != CALL_EXPR
6608      && TREE_CODE (exp) != RTL_EXPR
6609      && GET_MODE_CLASS (tmode) == MODE_INT
6610      && tmode > MAX_INTEGER_COMPUTATION_MODE)
6611    internal_error ("unsupported wide integer operation");
6612
6613  check_max_integer_computation_mode (exp);
6614#endif
6615
6616  /* If will do cse, generate all results into pseudo registers
6617     since 1) that allows cse to find more things
6618     and 2) otherwise cse could produce an insn the machine
6619     cannot support.  And exception is a CONSTRUCTOR into a multi-word
6620     MEM: that's much more likely to be most efficient into the MEM.  */
6621
6622  if (! cse_not_expected && mode != BLKmode && target
6623      && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6624      && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6625    target = 0;
6626
6627  switch (code)
6628    {
6629    case LABEL_DECL:
6630      {
6631	tree function = decl_function_context (exp);
6632	/* Handle using a label in a containing function.  */
6633	if (function != current_function_decl
6634	    && function != inline_function_decl && function != 0)
6635	  {
6636	    struct function *p = find_function_data (function);
6637	    p->expr->x_forced_labels
6638	      = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6639				   p->expr->x_forced_labels);
6640	  }
6641	else
6642	  {
6643	    if (modifier == EXPAND_INITIALIZER)
6644	      forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6645						 label_rtx (exp),
6646						 forced_labels);
6647	  }
6648
6649	temp = gen_rtx_MEM (FUNCTION_MODE,
6650			    gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6651	if (function != current_function_decl
6652	    && function != inline_function_decl && function != 0)
6653	  LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6654	return temp;
6655      }
6656
6657    case PARM_DECL:
6658      if (!DECL_RTL_SET_P (exp))
6659	{
6660	  error_with_decl (exp, "prior parameter's size depends on `%s'");
6661	  return CONST0_RTX (mode);
6662	}
6663
6664      /* ... fall through ...  */
6665
6666    case VAR_DECL:
6667      /* If a static var's type was incomplete when the decl was written,
6668	 but the type is complete now, lay out the decl now.  */
6669      if (DECL_SIZE (exp) == 0
6670	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6671	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6672	layout_decl (exp, 0);
6673
6674      /* ... fall through ...  */
6675
6676    case FUNCTION_DECL:
6677    case RESULT_DECL:
6678      if (DECL_RTL (exp) == 0)
6679	abort ();
6680
6681      /* Ensure variable marked as used even if it doesn't go through
6682	 a parser.  If it hasn't be used yet, write out an external
6683	 definition.  */
6684      if (! TREE_USED (exp))
6685	{
6686	  assemble_external (exp);
6687	  TREE_USED (exp) = 1;
6688	}
6689
6690      /* Show we haven't gotten RTL for this yet.  */
6691      temp = 0;
6692
6693      /* Handle variables inherited from containing functions.  */
6694      context = decl_function_context (exp);
6695
6696      /* We treat inline_function_decl as an alias for the current function
6697	 because that is the inline function whose vars, types, etc.
6698	 are being merged into the current function.
6699	 See expand_inline_function.  */
6700
6701      if (context != 0 && context != current_function_decl
6702	  && context != inline_function_decl
6703	  /* If var is static, we don't need a static chain to access it.  */
6704	  && ! (GET_CODE (DECL_RTL (exp)) == MEM
6705		&& CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6706	{
6707	  rtx addr;
6708
6709	  /* Mark as non-local and addressable.  */
6710	  DECL_NONLOCAL (exp) = 1;
6711	  if (DECL_NO_STATIC_CHAIN (current_function_decl))
6712	    abort ();
6713	  (*lang_hooks.mark_addressable) (exp);
6714	  if (GET_CODE (DECL_RTL (exp)) != MEM)
6715	    abort ();
6716	  addr = XEXP (DECL_RTL (exp), 0);
6717	  if (GET_CODE (addr) == MEM)
6718	    addr
6719	      = replace_equiv_address (addr,
6720				       fix_lexical_addr (XEXP (addr, 0), exp));
6721	  else
6722	    addr = fix_lexical_addr (addr, exp);
6723
6724	  temp = replace_equiv_address (DECL_RTL (exp), addr);
6725	}
6726
6727      /* This is the case of an array whose size is to be determined
6728	 from its initializer, while the initializer is still being parsed.
6729	 See expand_decl.  */
6730
6731      else if (GET_CODE (DECL_RTL (exp)) == MEM
6732	       && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6733	temp = validize_mem (DECL_RTL (exp));
6734
6735      /* If DECL_RTL is memory, we are in the normal case and either
6736	 the address is not valid or it is not a register and -fforce-addr
6737	 is specified, get the address into a register.  */
6738
6739      else if (GET_CODE (DECL_RTL (exp)) == MEM
6740	       && modifier != EXPAND_CONST_ADDRESS
6741	       && modifier != EXPAND_SUM
6742	       && modifier != EXPAND_INITIALIZER
6743	       && (! memory_address_p (DECL_MODE (exp),
6744				       XEXP (DECL_RTL (exp), 0))
6745		   || (flag_force_addr
6746		       && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6747	temp = replace_equiv_address (DECL_RTL (exp),
6748				      copy_rtx (XEXP (DECL_RTL (exp), 0)));
6749
6750      /* If we got something, return it.  But first, set the alignment
6751	 if the address is a register.  */
6752      if (temp != 0)
6753	{
6754	  if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6755	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6756
6757	  return temp;
6758	}
6759
6760      /* If the mode of DECL_RTL does not match that of the decl, it
6761	 must be a promoted value.  We return a SUBREG of the wanted mode,
6762	 but mark it so that we know that it was already extended.  */
6763
6764      if (GET_CODE (DECL_RTL (exp)) == REG
6765	  && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6766	{
6767	  /* Get the signedness used for this variable.  Ensure we get the
6768	     same mode we got when the variable was declared.  */
6769	  if (GET_MODE (DECL_RTL (exp))
6770	      != promote_mode (type, DECL_MODE (exp), &unsignedp,
6771			       (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6772	    abort ();
6773
6774	  temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6775	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6776	  SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6777	  return temp;
6778	}
6779
6780      return DECL_RTL (exp);
6781
6782    case INTEGER_CST:
6783      temp = immed_double_const (TREE_INT_CST_LOW (exp),
6784				 TREE_INT_CST_HIGH (exp), mode);
6785
6786      /* ??? If overflow is set, fold will have done an incomplete job,
6787	 which can result in (plus xx (const_int 0)), which can get
6788	 simplified by validate_replace_rtx during virtual register
6789	 instantiation, which can result in unrecognizable insns.
6790	 Avoid this by forcing all overflows into registers.  */
6791      if (TREE_CONSTANT_OVERFLOW (exp)
6792	  && modifier != EXPAND_INITIALIZER)
6793	temp = force_reg (mode, temp);
6794
6795      return temp;
6796
6797    case VECTOR_CST:
6798      return const_vector_from_tree (exp);
6799
6800    case CONST_DECL:
6801      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6802
6803    case REAL_CST:
6804      /* If optimized, generate immediate CONST_DOUBLE
6805	 which will be turned into memory by reload if necessary.
6806
6807	 We used to force a register so that loop.c could see it.  But
6808	 this does not allow gen_* patterns to perform optimizations with
6809	 the constants.  It also produces two insns in cases like "x = 1.0;".
6810	 On most machines, floating-point constants are not permitted in
6811	 many insns, so we'd end up copying it to a register in any case.
6812
6813	 Now, we do the copying in expand_binop, if appropriate.  */
6814      return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6815					   TYPE_MODE (TREE_TYPE (exp)));
6816
6817    case COMPLEX_CST:
6818    case STRING_CST:
6819      if (! TREE_CST_RTL (exp))
6820	output_constant_def (exp, 1);
6821
6822      /* TREE_CST_RTL probably contains a constant address.
6823	 On RISC machines where a constant address isn't valid,
6824	 make some insns to get that address into a register.  */
6825      if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6826	  && modifier != EXPAND_CONST_ADDRESS
6827	  && modifier != EXPAND_INITIALIZER
6828	  && modifier != EXPAND_SUM
6829	  && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6830	      || (flag_force_addr
6831		  && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6832	return replace_equiv_address (TREE_CST_RTL (exp),
6833				      copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6834      return TREE_CST_RTL (exp);
6835
6836    case EXPR_WITH_FILE_LOCATION:
6837      {
6838	rtx to_return;
6839	const char *saved_input_filename = input_filename;
6840	int saved_lineno = lineno;
6841	input_filename = EXPR_WFL_FILENAME (exp);
6842	lineno = EXPR_WFL_LINENO (exp);
6843	if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6844	  emit_line_note (input_filename, lineno);
6845	/* Possibly avoid switching back and forth here.  */
6846	to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6847	input_filename = saved_input_filename;
6848	lineno = saved_lineno;
6849	return to_return;
6850      }
6851
6852    case SAVE_EXPR:
6853      context = decl_function_context (exp);
6854
6855      /* If this SAVE_EXPR was at global context, assume we are an
6856	 initialization function and move it into our context.  */
6857      if (context == 0)
6858	SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6859
6860      /* We treat inline_function_decl as an alias for the current function
6861	 because that is the inline function whose vars, types, etc.
6862	 are being merged into the current function.
6863	 See expand_inline_function.  */
6864      if (context == current_function_decl || context == inline_function_decl)
6865	context = 0;
6866
6867      /* If this is non-local, handle it.  */
6868      if (context)
6869	{
6870	  /* The following call just exists to abort if the context is
6871	     not of a containing function.  */
6872	  find_function_data (context);
6873
6874	  temp = SAVE_EXPR_RTL (exp);
6875	  if (temp && GET_CODE (temp) == REG)
6876	    {
6877	      put_var_into_stack (exp, /*rescan=*/true);
6878	      temp = SAVE_EXPR_RTL (exp);
6879	    }
6880	  if (temp == 0 || GET_CODE (temp) != MEM)
6881	    abort ();
6882	  return
6883	    replace_equiv_address (temp,
6884				   fix_lexical_addr (XEXP (temp, 0), exp));
6885	}
6886      if (SAVE_EXPR_RTL (exp) == 0)
6887	{
6888	  if (mode == VOIDmode)
6889	    temp = const0_rtx;
6890	  else
6891	    temp = assign_temp (build_qualified_type (type,
6892						      (TYPE_QUALS (type)
6893						       | TYPE_QUAL_CONST)),
6894				3, 0, 0);
6895
6896	  SAVE_EXPR_RTL (exp) = temp;
6897	  if (!optimize && GET_CODE (temp) == REG)
6898	    save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6899						save_expr_regs);
6900
6901	  /* If the mode of TEMP does not match that of the expression, it
6902	     must be a promoted value.  We pass store_expr a SUBREG of the
6903	     wanted mode but mark it so that we know that it was already
6904	     extended.  */
6905
6906	  if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6907	    {
6908	      temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6909	      promote_mode (type, mode, &unsignedp, 0);
6910	      SUBREG_PROMOTED_VAR_P (temp) = 1;
6911	      SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6912	    }
6913
6914	  if (temp == const0_rtx)
6915	    expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6916	  else
6917	    store_expr (TREE_OPERAND (exp, 0), temp,
6918			modifier == EXPAND_STACK_PARM ? 2 : 0);
6919
6920	  TREE_USED (exp) = 1;
6921	}
6922
6923      /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6924	 must be a promoted value.  We return a SUBREG of the wanted mode,
6925	 but mark it so that we know that it was already extended.  */
6926
6927      if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6928	  && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6929	{
6930	  /* Compute the signedness and make the proper SUBREG.  */
6931	  promote_mode (type, mode, &unsignedp, 0);
6932	  temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6933	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6934	  SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6935	  return temp;
6936	}
6937
6938      return SAVE_EXPR_RTL (exp);
6939
6940    case UNSAVE_EXPR:
6941      {
6942	rtx temp;
6943	temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6944	TREE_OPERAND (exp, 0)
6945	  = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6946	return temp;
6947      }
6948
6949    case PLACEHOLDER_EXPR:
6950      {
6951	tree old_list = placeholder_list;
6952	tree placeholder_expr = 0;
6953
6954	exp = find_placeholder (exp, &placeholder_expr);
6955	if (exp == 0)
6956	  abort ();
6957
6958	placeholder_list = TREE_CHAIN (placeholder_expr);
6959	temp = expand_expr (exp, original_target, tmode, modifier);
6960	placeholder_list = old_list;
6961	return temp;
6962      }
6963
6964    case WITH_RECORD_EXPR:
6965      /* Put the object on the placeholder list, expand our first operand,
6966	 and pop the list.  */
6967      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6968				    placeholder_list);
6969      target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6970			    modifier);
6971      placeholder_list = TREE_CHAIN (placeholder_list);
6972      return target;
6973
6974    case GOTO_EXPR:
6975      if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6976	expand_goto (TREE_OPERAND (exp, 0));
6977      else
6978	expand_computed_goto (TREE_OPERAND (exp, 0));
6979      return const0_rtx;
6980
6981    case EXIT_EXPR:
6982      expand_exit_loop_if_false (NULL,
6983				 invert_truthvalue (TREE_OPERAND (exp, 0)));
6984      return const0_rtx;
6985
6986    case LABELED_BLOCK_EXPR:
6987      if (LABELED_BLOCK_BODY (exp))
6988	expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6989      /* Should perhaps use expand_label, but this is simpler and safer.  */
6990      do_pending_stack_adjust ();
6991      emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6992      return const0_rtx;
6993
6994    case EXIT_BLOCK_EXPR:
6995      if (EXIT_BLOCK_RETURN (exp))
6996	sorry ("returned value in block_exit_expr");
6997      expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6998      return const0_rtx;
6999
7000    case LOOP_EXPR:
7001      push_temp_slots ();
7002      expand_start_loop (1);
7003      expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7004      expand_end_loop ();
7005      pop_temp_slots ();
7006
7007      return const0_rtx;
7008
7009    case BIND_EXPR:
7010      {
7011	tree vars = TREE_OPERAND (exp, 0);
7012	int vars_need_expansion = 0;
7013
7014	/* Need to open a binding contour here because
7015	   if there are any cleanups they must be contained here.  */
7016	expand_start_bindings (2);
7017
7018	/* Mark the corresponding BLOCK for output in its proper place.  */
7019	if (TREE_OPERAND (exp, 2) != 0
7020	    && ! TREE_USED (TREE_OPERAND (exp, 2)))
7021	  (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7022
7023	/* If VARS have not yet been expanded, expand them now.  */
7024	while (vars)
7025	  {
7026	    if (!DECL_RTL_SET_P (vars))
7027	      {
7028		vars_need_expansion = 1;
7029		expand_decl (vars);
7030	      }
7031	    expand_decl_init (vars);
7032	    vars = TREE_CHAIN (vars);
7033	  }
7034
7035	temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7036
7037	expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7038
7039	return temp;
7040      }
7041
7042    case RTL_EXPR:
7043      if (RTL_EXPR_SEQUENCE (exp))
7044	{
7045	  if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7046	    abort ();
7047	  emit_insn (RTL_EXPR_SEQUENCE (exp));
7048	  RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7049	}
7050      preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7051      free_temps_for_rtl_expr (exp);
7052      return RTL_EXPR_RTL (exp);
7053
7054    case CONSTRUCTOR:
7055      /* If we don't need the result, just ensure we evaluate any
7056	 subexpressions.  */
7057      if (ignore)
7058	{
7059	  tree elt;
7060
7061	  for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7062	    expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7063
7064	  return const0_rtx;
7065	}
7066
7067      /* All elts simple constants => refer to a constant in memory.  But
7068	 if this is a non-BLKmode mode, let it store a field at a time
7069	 since that should make a CONST_INT or CONST_DOUBLE when we
7070	 fold.  Likewise, if we have a target we can use, it is best to
7071	 store directly into the target unless the type is large enough
7072	 that memcpy will be used.  If we are making an initializer and
7073	 all operands are constant, put it in memory as well.
7074
7075	FIXME: Avoid trying to fill vector constructors piece-meal.
7076	Output them with output_constant_def below unless we're sure
7077	they're zeros.  This should go away when vector initializers
7078	are treated like VECTOR_CST instead of arrays.
7079      */
7080      else if ((TREE_STATIC (exp)
7081		&& ((mode == BLKmode
7082		     && ! (target != 0 && safe_from_p (target, exp, 1)))
7083		    || TREE_ADDRESSABLE (exp)
7084		    || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7085			&& (! MOVE_BY_PIECES_P
7086			    (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7087			     TYPE_ALIGN (type)))
7088			&& ((TREE_CODE (type) == VECTOR_TYPE
7089			     && !is_zeros_p (exp))
7090			    || ! mostly_zeros_p (exp)))))
7091	       || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7092	{
7093	  rtx constructor = output_constant_def (exp, 1);
7094
7095	  if (modifier != EXPAND_CONST_ADDRESS
7096	      && modifier != EXPAND_INITIALIZER
7097	      && modifier != EXPAND_SUM)
7098	    constructor = validize_mem (constructor);
7099
7100	  return constructor;
7101	}
7102      else
7103	{
7104	  /* Handle calls that pass values in multiple non-contiguous
7105	     locations.  The Irix 6 ABI has examples of this.  */
7106	  if (target == 0 || ! safe_from_p (target, exp, 1)
7107	      || GET_CODE (target) == PARALLEL
7108	      || modifier == EXPAND_STACK_PARM)
7109	    target
7110	      = assign_temp (build_qualified_type (type,
7111						   (TYPE_QUALS (type)
7112						    | (TREE_READONLY (exp)
7113						       * TYPE_QUAL_CONST))),
7114			     0, TREE_ADDRESSABLE (exp), 1);
7115
7116	  store_constructor (exp, target, 0, int_expr_size (exp));
7117	  return target;
7118	}
7119
7120    case INDIRECT_REF:
7121      {
7122	tree exp1 = TREE_OPERAND (exp, 0);
7123	tree index;
7124	tree string = string_constant (exp1, &index);
7125
7126	/* Try to optimize reads from const strings.  */
7127	if (string
7128	    && TREE_CODE (string) == STRING_CST
7129	    && TREE_CODE (index) == INTEGER_CST
7130	    && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7131	    && GET_MODE_CLASS (mode) == MODE_INT
7132	    && GET_MODE_SIZE (mode) == 1
7133	    && modifier != EXPAND_WRITE)
7134	  return gen_int_mode (TREE_STRING_POINTER (string)
7135			       [TREE_INT_CST_LOW (index)], mode);
7136
7137	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7138	op0 = memory_address (mode, op0);
7139	temp = gen_rtx_MEM (mode, op0);
7140	set_mem_attributes (temp, exp, 0);
7141
7142	/* If we are writing to this object and its type is a record with
7143	   readonly fields, we must mark it as readonly so it will
7144	   conflict with readonly references to those fields.  */
7145	if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7146	  RTX_UNCHANGING_P (temp) = 1;
7147
7148	return temp;
7149      }
7150
7151    case ARRAY_REF:
7152      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7153	abort ();
7154
7155      {
7156	tree array = TREE_OPERAND (exp, 0);
7157	tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7158	tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7159	tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7160	HOST_WIDE_INT i;
7161
7162	/* Optimize the special-case of a zero lower bound.
7163
7164	   We convert the low_bound to sizetype to avoid some problems
7165	   with constant folding.  (E.g. suppose the lower bound is 1,
7166	   and its mode is QI.  Without the conversion,  (ARRAY
7167	   +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7168	   +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
7169
7170	if (! integer_zerop (low_bound))
7171	  index = size_diffop (index, convert (sizetype, low_bound));
7172
7173	/* Fold an expression like: "foo"[2].
7174	   This is not done in fold so it won't happen inside &.
7175	   Don't fold if this is for wide characters since it's too
7176	   difficult to do correctly and this is a very rare case.  */
7177
7178	if (modifier != EXPAND_CONST_ADDRESS
7179	    && modifier != EXPAND_INITIALIZER
7180	    && modifier != EXPAND_MEMORY
7181	    && TREE_CODE (array) == STRING_CST
7182	    && TREE_CODE (index) == INTEGER_CST
7183	    && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7184	    && GET_MODE_CLASS (mode) == MODE_INT
7185	    && GET_MODE_SIZE (mode) == 1)
7186	  return gen_int_mode (TREE_STRING_POINTER (array)
7187			       [TREE_INT_CST_LOW (index)], mode);
7188
7189	/* If this is a constant index into a constant array,
7190	   just get the value from the array.  Handle both the cases when
7191	   we have an explicit constructor and when our operand is a variable
7192	   that was declared const.  */
7193
7194	if (modifier != EXPAND_CONST_ADDRESS
7195	    && modifier != EXPAND_INITIALIZER
7196	    && modifier != EXPAND_MEMORY
7197	    && TREE_CODE (array) == CONSTRUCTOR
7198	    && ! TREE_SIDE_EFFECTS (array)
7199	    && TREE_CODE (index) == INTEGER_CST
7200	    && 0 > compare_tree_int (index,
7201				     list_length (CONSTRUCTOR_ELTS
7202						  (TREE_OPERAND (exp, 0)))))
7203	  {
7204	    tree elem;
7205
7206	    for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7207		 i = TREE_INT_CST_LOW (index);
7208		 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7209	      ;
7210
7211	    if (elem)
7212	      return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7213				  modifier);
7214	  }
7215
7216	else if (optimize >= 1
7217		 && modifier != EXPAND_CONST_ADDRESS
7218		 && modifier != EXPAND_INITIALIZER
7219		 && modifier != EXPAND_MEMORY
7220		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7221		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7222		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7223	  {
7224	    if (TREE_CODE (index) == INTEGER_CST)
7225	      {
7226		tree init = DECL_INITIAL (array);
7227
7228		if (TREE_CODE (init) == CONSTRUCTOR)
7229		  {
7230		    tree elem;
7231
7232		    for (elem = CONSTRUCTOR_ELTS (init);
7233			 (elem
7234			  && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7235			 elem = TREE_CHAIN (elem))
7236		      ;
7237
7238		    if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7239		      return expand_expr (fold (TREE_VALUE (elem)), target,
7240					  tmode, modifier);
7241		  }
7242		else if (TREE_CODE (init) == STRING_CST
7243			 && 0 > compare_tree_int (index,
7244						  TREE_STRING_LENGTH (init)))
7245		  {
7246		    tree type = TREE_TYPE (TREE_TYPE (init));
7247		    enum machine_mode mode = TYPE_MODE (type);
7248
7249		    if (GET_MODE_CLASS (mode) == MODE_INT
7250			&& GET_MODE_SIZE (mode) == 1)
7251		      return gen_int_mode (TREE_STRING_POINTER (init)
7252					   [TREE_INT_CST_LOW (index)], mode);
7253		  }
7254	      }
7255	  }
7256      }
7257      /* Fall through.  */
7258
7259    case COMPONENT_REF:
7260    case BIT_FIELD_REF:
7261    case ARRAY_RANGE_REF:
7262      /* If the operand is a CONSTRUCTOR, we can just extract the
7263	 appropriate field if it is present.  Don't do this if we have
7264	 already written the data since we want to refer to that copy
7265	 and varasm.c assumes that's what we'll do.  */
7266      if (code == COMPONENT_REF
7267	  && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7268	  && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7269	{
7270	  tree elt;
7271
7272	  for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7273	       elt = TREE_CHAIN (elt))
7274	    if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7275		/* We can normally use the value of the field in the
7276		   CONSTRUCTOR.  However, if this is a bitfield in
7277		   an integral mode that we can fit in a HOST_WIDE_INT,
7278		   we must mask only the number of bits in the bitfield,
7279		   since this is done implicitly by the constructor.  If
7280		   the bitfield does not meet either of those conditions,
7281		   we can't do this optimization.  */
7282		&& (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7283		    || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7284			 == MODE_INT)
7285			&& (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7286			    <= HOST_BITS_PER_WIDE_INT))))
7287	      {
7288		if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7289		    && modifier == EXPAND_STACK_PARM)
7290		  target = 0;
7291		op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7292		if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7293		  {
7294		    HOST_WIDE_INT bitsize
7295		      = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7296		    enum machine_mode imode
7297		      = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7298
7299		    if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7300		      {
7301			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7302			op0 = expand_and (imode, op0, op1, target);
7303		      }
7304		    else
7305		      {
7306			tree count
7307			  = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7308					 0);
7309
7310			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7311					    target, 0);
7312			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7313					    target, 0);
7314		      }
7315		  }
7316
7317		return op0;
7318	      }
7319	}
7320
7321      {
7322	enum machine_mode mode1;
7323	HOST_WIDE_INT bitsize, bitpos;
7324	tree offset;
7325	int volatilep = 0;
7326	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7327					&mode1, &unsignedp, &volatilep);
7328	rtx orig_op0;
7329
7330	/* If we got back the original object, something is wrong.  Perhaps
7331	   we are evaluating an expression too early.  In any event, don't
7332	   infinitely recurse.  */
7333	if (tem == exp)
7334	  abort ();
7335
7336	/* If TEM's type is a union of variable size, pass TARGET to the inner
7337	   computation, since it will need a temporary and TARGET is known
7338	   to have to do.  This occurs in unchecked conversion in Ada.  */
7339
7340	orig_op0 = op0
7341	  = expand_expr (tem,
7342			 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7343			  && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7344			      != INTEGER_CST)
7345			  && modifier != EXPAND_STACK_PARM
7346			  ? target : NULL_RTX),
7347			 VOIDmode,
7348			 (modifier == EXPAND_INITIALIZER
7349			  || modifier == EXPAND_CONST_ADDRESS
7350			  || modifier == EXPAND_STACK_PARM)
7351			 ? modifier : EXPAND_NORMAL);
7352
7353	/* If this is a constant, put it into a register if it is a
7354	   legitimate constant and OFFSET is 0 and memory if it isn't.  */
7355	if (CONSTANT_P (op0))
7356	  {
7357	    enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7358	    if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7359		&& offset == 0)
7360	      op0 = force_reg (mode, op0);
7361	    else
7362	      op0 = validize_mem (force_const_mem (mode, op0));
7363	  }
7364
7365	if (offset != 0)
7366	  {
7367	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7368					  EXPAND_SUM);
7369
7370	    /* If this object is in a register, put it into memory.
7371	       This case can't occur in C, but can in Ada if we have
7372	       unchecked conversion of an expression from a scalar type to
7373	       an array or record type.  */
7374	    if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7375		|| GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7376	      {
7377		/* If the operand is a SAVE_EXPR, we can deal with this by
7378		   forcing the SAVE_EXPR into memory.  */
7379		if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7380		  {
7381		    put_var_into_stack (TREE_OPERAND (exp, 0),
7382					/*rescan=*/true);
7383		    op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7384		  }
7385		else
7386		  {
7387		    tree nt
7388		      = build_qualified_type (TREE_TYPE (tem),
7389					      (TYPE_QUALS (TREE_TYPE (tem))
7390					       | TYPE_QUAL_CONST));
7391		    rtx memloc = assign_temp (nt, 1, 1, 1);
7392
7393		    emit_move_insn (memloc, op0);
7394		    op0 = memloc;
7395		  }
7396	      }
7397
7398	    if (GET_CODE (op0) != MEM)
7399	      abort ();
7400
7401#ifdef POINTERS_EXTEND_UNSIGNED
7402	    if (GET_MODE (offset_rtx) != Pmode)
7403	      offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7404#else
7405	    if (GET_MODE (offset_rtx) != ptr_mode)
7406	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7407#endif
7408
7409	    /* A constant address in OP0 can have VOIDmode, we must not try
7410	       to call force_reg for that case.  Avoid that case.  */
7411	    if (GET_CODE (op0) == MEM
7412		&& GET_MODE (op0) == BLKmode
7413		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
7414		&& bitsize != 0
7415		&& (bitpos % bitsize) == 0
7416		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7417		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7418	      {
7419		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7420		bitpos = 0;
7421	      }
7422
7423	    op0 = offset_address (op0, offset_rtx,
7424				  highest_pow2_factor (offset));
7425	  }
7426
7427	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7428	   record its alignment as BIGGEST_ALIGNMENT.  */
7429	if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7430	    && is_aligning_offset (offset, tem))
7431	  set_mem_align (op0, BIGGEST_ALIGNMENT);
7432
7433	/* Don't forget about volatility even if this is a bitfield.  */
7434	if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7435	  {
7436	    if (op0 == orig_op0)
7437	      op0 = copy_rtx (op0);
7438
7439	    MEM_VOLATILE_P (op0) = 1;
7440	  }
7441
7442	/* The following code doesn't handle CONCAT.
7443	   Assume only bitpos == 0 can be used for CONCAT, due to
7444	   one element arrays having the same mode as its element.  */
7445	if (GET_CODE (op0) == CONCAT)
7446	  {
7447	    if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7448	      abort ();
7449	    return op0;
7450	  }
7451
7452	/* In cases where an aligned union has an unaligned object
7453	   as a field, we might be extracting a BLKmode value from
7454	   an integer-mode (e.g., SImode) object.  Handle this case
7455	   by doing the extract into an object as wide as the field
7456	   (which we know to be the width of a basic mode), then
7457	   storing into memory, and changing the mode to BLKmode.  */
7458	if (mode1 == VOIDmode
7459	    || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7460	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
7461		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7462		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7463		&& modifier != EXPAND_CONST_ADDRESS
7464		&& modifier != EXPAND_INITIALIZER)
7465	    /* If the field isn't aligned enough to fetch as a memref,
7466	       fetch it as a bit field.  */
7467	    || (mode1 != BLKmode
7468		&& SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7469		&& ((TYPE_ALIGN (TREE_TYPE (tem))
7470		     < GET_MODE_ALIGNMENT (mode))
7471		    || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7472	    /* If the type and the field are a constant size and the
7473	       size of the type isn't the same size as the bitfield,
7474	       we must use bitfield operations.  */
7475	    || (bitsize >= 0
7476		&& (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7477		    == INTEGER_CST)
7478		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7479					  bitsize)))
7480	  {
7481	    enum machine_mode ext_mode = mode;
7482
7483	    if (ext_mode == BLKmode
7484		&& ! (target != 0 && GET_CODE (op0) == MEM
7485		      && GET_CODE (target) == MEM
7486		      && bitpos % BITS_PER_UNIT == 0))
7487	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7488
7489	    if (ext_mode == BLKmode)
7490	      {
7491		/* In this case, BITPOS must start at a byte boundary and
7492		   TARGET, if specified, must be a MEM.  */
7493		if (GET_CODE (op0) != MEM
7494		    || (target != 0 && GET_CODE (target) != MEM)
7495		    || bitpos % BITS_PER_UNIT != 0)
7496		  abort ();
7497
7498		op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7499		if (target == 0)
7500		  target = assign_temp (type, 0, 1, 1);
7501
7502		emit_block_move (target, op0,
7503				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7504					  / BITS_PER_UNIT),
7505				 (modifier == EXPAND_STACK_PARM
7506				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7507
7508		return target;
7509	      }
7510
7511	    op0 = validize_mem (op0);
7512
7513	    if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7514	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7515
7516	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7517				     (modifier == EXPAND_STACK_PARM
7518				      ? NULL_RTX : target),
7519				     ext_mode, ext_mode,
7520				     int_size_in_bytes (TREE_TYPE (tem)));
7521
7522	    /* If the result is a record type and BITSIZE is narrower than
7523	       the mode of OP0, an integral mode, and this is a big endian
7524	       machine, we must put the field into the high-order bits.  */
7525	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7526		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7527		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7528	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7529				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7530					    - bitsize),
7531				  op0, 1);
7532
7533	    if (mode == BLKmode)
7534	      {
7535		rtx new = assign_temp (build_qualified_type
7536				       ((*lang_hooks.types.type_for_mode)
7537					(ext_mode, 0),
7538					TYPE_QUAL_CONST), 0, 1, 1);
7539
7540		emit_move_insn (new, op0);
7541		op0 = copy_rtx (new);
7542		PUT_MODE (op0, BLKmode);
7543		set_mem_attributes (op0, exp, 1);
7544	      }
7545
7546	    return op0;
7547	  }
7548
7549	/* If the result is BLKmode, use that to access the object
7550	   now as well.  */
7551	if (mode == BLKmode)
7552	  mode1 = BLKmode;
7553
7554	/* Get a reference to just this component.  */
7555	if (modifier == EXPAND_CONST_ADDRESS
7556	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7557	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7558	else
7559	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7560
7561	if (op0 == orig_op0)
7562	  op0 = copy_rtx (op0);
7563
7564	set_mem_attributes (op0, exp, 0);
7565	if (GET_CODE (XEXP (op0, 0)) == REG)
7566	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7567
7568	MEM_VOLATILE_P (op0) |= volatilep;
7569	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7570	    || modifier == EXPAND_CONST_ADDRESS
7571	    || modifier == EXPAND_INITIALIZER)
7572	  return op0;
7573	else if (target == 0)
7574	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7575
7576	convert_move (target, op0, unsignedp);
7577	return target;
7578      }
7579
7580    case VTABLE_REF:
7581      {
7582	rtx insn, before = get_last_insn (), vtbl_ref;
7583
7584	/* Evaluate the interior expression.  */
7585	subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7586				 tmode, modifier);
7587
7588	/* Get or create an instruction off which to hang a note.  */
7589	if (REG_P (subtarget))
7590	  {
7591	    target = subtarget;
7592	    insn = get_last_insn ();
7593	    if (insn == before)
7594	      abort ();
7595	    if (! INSN_P (insn))
7596	      insn = prev_nonnote_insn (insn);
7597	  }
7598	else
7599	  {
7600	    target = gen_reg_rtx (GET_MODE (subtarget));
7601	    insn = emit_move_insn (target, subtarget);
7602	  }
7603
7604	/* Collect the data for the note.  */
7605	vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7606	vtbl_ref = plus_constant (vtbl_ref,
7607				  tree_low_cst (TREE_OPERAND (exp, 2), 0));
7608	/* Discard the initial CONST that was added.  */
7609	vtbl_ref = XEXP (vtbl_ref, 0);
7610
7611	REG_NOTES (insn)
7612	  = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7613
7614	return target;
7615      }
7616
7617      /* Intended for a reference to a buffer of a file-object in Pascal.
7618	 But it's not certain that a special tree code will really be
7619	 necessary for these.  INDIRECT_REF might work for them.  */
7620    case BUFFER_REF:
7621      abort ();
7622
7623    case IN_EXPR:
7624      {
7625	/* Pascal set IN expression.
7626
7627	   Algorithm:
7628	       rlo       = set_low - (set_low%bits_per_word);
7629	       the_word  = set [ (index - rlo)/bits_per_word ];
7630	       bit_index = index % bits_per_word;
7631	       bitmask   = 1 << bit_index;
7632	       return !!(the_word & bitmask);  */
7633
7634	tree set = TREE_OPERAND (exp, 0);
7635	tree index = TREE_OPERAND (exp, 1);
7636	int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7637	tree set_type = TREE_TYPE (set);
7638	tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7639	tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7640	rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7641	rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7642	rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7643	rtx setval = expand_expr (set, 0, VOIDmode, 0);
7644	rtx setaddr = XEXP (setval, 0);
7645	enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7646	rtx rlow;
7647	rtx diff, quo, rem, addr, bit, result;
7648
7649	/* If domain is empty, answer is no.  Likewise if index is constant
7650	   and out of bounds.  */
7651	if (((TREE_CODE (set_high_bound) == INTEGER_CST
7652	     && TREE_CODE (set_low_bound) == INTEGER_CST
7653	     && tree_int_cst_lt (set_high_bound, set_low_bound))
7654	     || (TREE_CODE (index) == INTEGER_CST
7655		 && TREE_CODE (set_low_bound) == INTEGER_CST
7656		 && tree_int_cst_lt (index, set_low_bound))
7657	     || (TREE_CODE (set_high_bound) == INTEGER_CST
7658		 && TREE_CODE (index) == INTEGER_CST
7659		 && tree_int_cst_lt (set_high_bound, index))))
7660	  return const0_rtx;
7661
7662	if (target == 0)
7663	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7664
7665	/* If we get here, we have to generate the code for both cases
7666	   (in range and out of range).  */
7667
7668	op0 = gen_label_rtx ();
7669	op1 = gen_label_rtx ();
7670
7671	if (! (GET_CODE (index_val) == CONST_INT
7672	       && GET_CODE (lo_r) == CONST_INT))
7673	  emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7674				   GET_MODE (index_val), iunsignedp, op1);
7675
7676	if (! (GET_CODE (index_val) == CONST_INT
7677	       && GET_CODE (hi_r) == CONST_INT))
7678	  emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7679				   GET_MODE (index_val), iunsignedp, op1);
7680
7681	/* Calculate the element number of bit zero in the first word
7682	   of the set.  */
7683	if (GET_CODE (lo_r) == CONST_INT)
7684	  rlow = GEN_INT (INTVAL (lo_r)
7685			  & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7686	else
7687	  rlow = expand_binop (index_mode, and_optab, lo_r,
7688			       GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7689			       NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7690
7691	diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7692			     NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7693
7694	quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7695			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7696	rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7697			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7698
7699	addr = memory_address (byte_mode,
7700			       expand_binop (index_mode, add_optab, diff,
7701					     setaddr, NULL_RTX, iunsignedp,
7702					     OPTAB_LIB_WIDEN));
7703
7704	/* Extract the bit we want to examine.  */
7705	bit = expand_shift (RSHIFT_EXPR, byte_mode,
7706			    gen_rtx_MEM (byte_mode, addr),
7707			    make_tree (TREE_TYPE (index), rem),
7708			    NULL_RTX, 1);
7709	result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7710			       GET_MODE (target) == byte_mode ? target : 0,
7711			       1, OPTAB_LIB_WIDEN);
7712
7713	if (result != target)
7714	  convert_move (target, result, 1);
7715
7716	/* Output the code to handle the out-of-range case.  */
7717	emit_jump (op0);
7718	emit_label (op1);
7719	emit_move_insn (target, const0_rtx);
7720	emit_label (op0);
7721	return target;
7722      }
7723
7724    case WITH_CLEANUP_EXPR:
7725      if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7726	{
7727	  WITH_CLEANUP_EXPR_RTL (exp)
7728	    = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7729	  expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7730				  CLEANUP_EH_ONLY (exp));
7731
7732	  /* That's it for this cleanup.  */
7733	  TREE_OPERAND (exp, 1) = 0;
7734	}
7735      return WITH_CLEANUP_EXPR_RTL (exp);
7736
7737    case CLEANUP_POINT_EXPR:
7738      {
7739	/* Start a new binding layer that will keep track of all cleanup
7740	   actions to be performed.  */
7741	expand_start_bindings (2);
7742
7743	target_temp_slot_level = temp_slot_level;
7744
7745	op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7746	/* If we're going to use this value, load it up now.  */
7747	if (! ignore)
7748	  op0 = force_not_mem (op0);
7749	preserve_temp_slots (op0);
7750	expand_end_bindings (NULL_TREE, 0, 0);
7751      }
7752      return op0;
7753
7754    case CALL_EXPR:
7755      /* Check for a built-in function.  */
7756      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7757	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7758	      == FUNCTION_DECL)
7759	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7760	{
7761	  if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7762	      == BUILT_IN_FRONTEND)
7763	    return (*lang_hooks.expand_expr) (exp, original_target,
7764					      tmode, modifier);
7765	  else
7766	    return expand_builtin (exp, target, subtarget, tmode, ignore);
7767	}
7768
7769      return expand_call (exp, target, ignore);
7770
7771    case NON_LVALUE_EXPR:
7772    case NOP_EXPR:
7773    case CONVERT_EXPR:
7774    case REFERENCE_EXPR:
7775      if (TREE_OPERAND (exp, 0) == error_mark_node)
7776	return const0_rtx;
7777
7778      if (TREE_CODE (type) == UNION_TYPE)
7779	{
7780	  tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7781
7782	  /* If both input and output are BLKmode, this conversion isn't doing
7783	     anything except possibly changing memory attribute.  */
7784	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7785	    {
7786	      rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7787					modifier);
7788
7789	      result = copy_rtx (result);
7790	      set_mem_attributes (result, exp, 0);
7791	      return result;
7792	    }
7793
7794	  if (target == 0)
7795	    target = assign_temp (type, 0, 1, 1);
7796
7797	  if (GET_CODE (target) == MEM)
7798	    /* Store data into beginning of memory target.  */
7799	    store_expr (TREE_OPERAND (exp, 0),
7800			adjust_address (target, TYPE_MODE (valtype), 0),
7801			modifier == EXPAND_STACK_PARM ? 2 : 0);
7802
7803	  else if (GET_CODE (target) == REG)
7804	    /* Store this field into a union of the proper type.  */
7805	    store_field (target,
7806			 MIN ((int_size_in_bytes (TREE_TYPE
7807						  (TREE_OPERAND (exp, 0)))
7808			       * BITS_PER_UNIT),
7809			      (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7810			 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7811			 VOIDmode, 0, type, 0);
7812	  else
7813	    abort ();
7814
7815	  /* Return the entire union.  */
7816	  return target;
7817	}
7818
7819      if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7820	{
7821	  op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7822			     modifier);
7823
7824	  /* If the signedness of the conversion differs and OP0 is
7825	     a promoted SUBREG, clear that indication since we now
7826	     have to do the proper extension.  */
7827	  if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7828	      && GET_CODE (op0) == SUBREG)
7829	    SUBREG_PROMOTED_VAR_P (op0) = 0;
7830
7831	  return op0;
7832	}
7833
7834      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7835      if (GET_MODE (op0) == mode)
7836	return op0;
7837
7838      /* If OP0 is a constant, just convert it into the proper mode.  */
7839      if (CONSTANT_P (op0))
7840	{
7841	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7842	  enum machine_mode inner_mode = TYPE_MODE (inner_type);
7843
7844	  if (modifier == EXPAND_INITIALIZER)
7845	    return simplify_gen_subreg (mode, op0, inner_mode,
7846					subreg_lowpart_offset (mode,
7847							       inner_mode));
7848	  else
7849	    return convert_modes (mode, inner_mode, op0,
7850				  TREE_UNSIGNED (inner_type));
7851	}
7852
7853      if (modifier == EXPAND_INITIALIZER)
7854	return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7855
7856      if (target == 0)
7857	return
7858	  convert_to_mode (mode, op0,
7859			   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7860      else
7861	convert_move (target, op0,
7862		      TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7863      return target;
7864
7865    case VIEW_CONVERT_EXPR:
7866      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7867
7868      /* If the input and output modes are both the same, we are done.
7869	 Otherwise, if neither mode is BLKmode and both are within a word, we
7870	 can use gen_lowpart.  If neither is true, make sure the operand is
7871	 in memory and convert the MEM to the new mode.  */
7872      if (TYPE_MODE (type) == GET_MODE (op0))
7873	;
7874      else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7875	       && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7876	       && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7877	op0 = gen_lowpart (TYPE_MODE (type), op0);
7878      else if (GET_CODE (op0) != MEM)
7879	{
7880	  /* If the operand is not a MEM, force it into memory.  Since we
7881	     are going to be be changing the mode of the MEM, don't call
7882	     force_const_mem for constants because we don't allow pool
7883	     constants to change mode.  */
7884	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7885
7886	  if (TREE_ADDRESSABLE (exp))
7887	    abort ();
7888
7889	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7890	    target
7891	      = assign_stack_temp_for_type
7892		(TYPE_MODE (inner_type),
7893		 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7894
7895	  emit_move_insn (target, op0);
7896	  op0 = target;
7897	}
7898
7899      /* At this point, OP0 is in the correct mode.  If the output type is such
7900	 that the operand is known to be aligned, indicate that it is.
7901	 Otherwise, we need only be concerned about alignment for non-BLKmode
7902	 results.  */
7903      if (GET_CODE (op0) == MEM)
7904	{
7905	  op0 = copy_rtx (op0);
7906
7907	  if (TYPE_ALIGN_OK (type))
7908	    set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7909	  else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7910		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7911	    {
7912	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7913	      HOST_WIDE_INT temp_size
7914		= MAX (int_size_in_bytes (inner_type),
7915		       (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7916	      rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7917						    temp_size, 0, type);
7918	      rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7919
7920	      if (TREE_ADDRESSABLE (exp))
7921		abort ();
7922
7923	      if (GET_MODE (op0) == BLKmode)
7924		emit_block_move (new_with_op0_mode, op0,
7925				 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7926				 (modifier == EXPAND_STACK_PARM
7927				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7928	      else
7929		emit_move_insn (new_with_op0_mode, op0);
7930
7931	      op0 = new;
7932	    }
7933
7934	  op0 = adjust_address (op0, TYPE_MODE (type), 0);
7935	}
7936
7937      return op0;
7938
7939    case PLUS_EXPR:
7940      this_optab = ! unsignedp && flag_trapv
7941                   && (GET_MODE_CLASS (mode) == MODE_INT)
7942                   ? addv_optab : add_optab;
7943
7944      /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7945	 something else, make sure we add the register to the constant and
7946	 then to the other thing.  This case can occur during strength
7947	 reduction and doing it this way will produce better code if the
7948	 frame pointer or argument pointer is eliminated.
7949
7950	 fold-const.c will ensure that the constant is always in the inner
7951	 PLUS_EXPR, so the only case we need to do anything about is if
7952	 sp, ap, or fp is our second argument, in which case we must swap
7953	 the innermost first argument and our second argument.  */
7954
7955      if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7956	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7957	  && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7958	  && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7959	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7960	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7961	{
7962	  tree t = TREE_OPERAND (exp, 1);
7963
7964	  TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7965	  TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7966	}
7967
7968      /* If the result is to be ptr_mode and we are adding an integer to
7969	 something, we might be forming a constant.  So try to use
7970	 plus_constant.  If it produces a sum and we can't accept it,
7971	 use force_operand.  This allows P = &ARR[const] to generate
7972	 efficient code on machines where a SYMBOL_REF is not a valid
7973	 address.
7974
7975	 If this is an EXPAND_SUM call, always return the sum.  */
7976      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7977	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7978	{
7979	  if (modifier == EXPAND_STACK_PARM)
7980	    target = 0;
7981	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7982	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7983	      && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7984	    {
7985	      rtx constant_part;
7986
7987	      op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7988				 EXPAND_SUM);
7989	      /* Use immed_double_const to ensure that the constant is
7990		 truncated according to the mode of OP1, then sign extended
7991		 to a HOST_WIDE_INT.  Using the constant directly can result
7992		 in non-canonical RTL in a 64x32 cross compile.  */
7993	      constant_part
7994		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7995				      (HOST_WIDE_INT) 0,
7996				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7997	      op1 = plus_constant (op1, INTVAL (constant_part));
7998	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7999		op1 = force_operand (op1, target);
8000	      return op1;
8001	    }
8002
8003	  else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8004		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8005		   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8006	    {
8007	      rtx constant_part;
8008
8009	      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8010				 (modifier == EXPAND_INITIALIZER
8011				 ? EXPAND_INITIALIZER : EXPAND_SUM));
8012	      if (! CONSTANT_P (op0))
8013		{
8014		  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8015				     VOIDmode, modifier);
8016		  /* Don't go to both_summands if modifier
8017		     says it's not right to return a PLUS.  */
8018		  if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8019		    goto binop2;
8020		  goto both_summands;
8021		}
8022	      /* Use immed_double_const to ensure that the constant is
8023		 truncated according to the mode of OP1, then sign extended
8024		 to a HOST_WIDE_INT.  Using the constant directly can result
8025		 in non-canonical RTL in a 64x32 cross compile.  */
8026	      constant_part
8027		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8028				      (HOST_WIDE_INT) 0,
8029				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8030	      op0 = plus_constant (op0, INTVAL (constant_part));
8031	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8032		op0 = force_operand (op0, target);
8033	      return op0;
8034	    }
8035	}
8036
8037      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8038	subtarget = 0;
8039
8040      /* No sense saving up arithmetic to be done
8041	 if it's all in the wrong mode to form part of an address.
8042	 And force_operand won't know whether to sign-extend or
8043	 zero-extend.  */
8044      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8045	  || mode != ptr_mode)
8046	{
8047	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8048	  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8049	  if (op0 == const0_rtx)
8050	    return op1;
8051	  if (op1 == const0_rtx)
8052	    return op0;
8053	  goto binop2;
8054	}
8055
8056      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8057      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8058
8059      /* We come here from MINUS_EXPR when the second operand is a
8060         constant.  */
8061    both_summands:
8062      /* Make sure any term that's a sum with a constant comes last.  */
8063      if (GET_CODE (op0) == PLUS
8064	  && CONSTANT_P (XEXP (op0, 1)))
8065	{
8066	  temp = op0;
8067	  op0 = op1;
8068	  op1 = temp;
8069	}
8070      /* If adding to a sum including a constant,
8071	 associate it to put the constant outside.  */
8072      if (GET_CODE (op1) == PLUS
8073	  && CONSTANT_P (XEXP (op1, 1)))
8074	{
8075	  rtx constant_term = const0_rtx;
8076
8077	  temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8078	  if (temp != 0)
8079	    op0 = temp;
8080	  /* Ensure that MULT comes first if there is one.  */
8081	  else if (GET_CODE (op0) == MULT)
8082	    op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8083	  else
8084	    op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8085
8086	  /* Let's also eliminate constants from op0 if possible.  */
8087	  op0 = eliminate_constant_term (op0, &constant_term);
8088
8089	  /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8090	     their sum should be a constant.  Form it into OP1, since the
8091	     result we want will then be OP0 + OP1.  */
8092
8093	  temp = simplify_binary_operation (PLUS, mode, constant_term,
8094					    XEXP (op1, 1));
8095	  if (temp != 0)
8096	    op1 = temp;
8097	  else
8098	    op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8099	}
8100
8101      /* Put a constant term last and put a multiplication first.  */
8102      if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8103	temp = op1, op1 = op0, op0 = temp;
8104
8105      temp = simplify_binary_operation (PLUS, mode, op0, op1);
8106      return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8107
8108    case MINUS_EXPR:
8109      /* For initializers, we are allowed to return a MINUS of two
8110	 symbolic constants.  Here we handle all cases when both operands
8111	 are constant.  */
8112      /* Handle difference of two symbolic constants,
8113	 for the sake of an initializer.  */
8114      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8115	  && really_constant_p (TREE_OPERAND (exp, 0))
8116	  && really_constant_p (TREE_OPERAND (exp, 1)))
8117	{
8118	  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8119				 modifier);
8120	  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8121				 modifier);
8122
8123	  /* If the last operand is a CONST_INT, use plus_constant of
8124	     the negated constant.  Else make the MINUS.  */
8125	  if (GET_CODE (op1) == CONST_INT)
8126	    return plus_constant (op0, - INTVAL (op1));
8127	  else
8128	    return gen_rtx_MINUS (mode, op0, op1);
8129	}
8130
8131      this_optab = ! unsignedp && flag_trapv
8132                   && (GET_MODE_CLASS(mode) == MODE_INT)
8133                   ? subv_optab : sub_optab;
8134
8135      /* No sense saving up arithmetic to be done
8136	 if it's all in the wrong mode to form part of an address.
8137	 And force_operand won't know whether to sign-extend or
8138	 zero-extend.  */
8139      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8140	  || mode != ptr_mode)
8141	goto binop;
8142
8143      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8144	subtarget = 0;
8145
8146      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8147      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8148
8149      /* Convert A - const to A + (-const).  */
8150      if (GET_CODE (op1) == CONST_INT)
8151	{
8152	  op1 = negate_rtx (mode, op1);
8153	  goto both_summands;
8154	}
8155
8156      goto binop2;
8157
8158    case MULT_EXPR:
8159      /* If first operand is constant, swap them.
8160	 Thus the following special case checks need only
8161	 check the second operand.  */
8162      if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8163	{
8164	  tree t1 = TREE_OPERAND (exp, 0);
8165	  TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8166	  TREE_OPERAND (exp, 1) = t1;
8167	}
8168
8169      /* Attempt to return something suitable for generating an
8170	 indexed address, for machines that support that.  */
8171
8172      if (modifier == EXPAND_SUM && mode == ptr_mode
8173	  && host_integerp (TREE_OPERAND (exp, 1), 0))
8174	{
8175	  tree exp1 = TREE_OPERAND (exp, 1);
8176
8177	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8178			     EXPAND_SUM);
8179
8180	  /* If we knew for certain that this is arithmetic for an array
8181	     reference, and we knew the bounds of the array, then we could
8182	     apply the distributive law across (PLUS X C) for constant C.
8183	     Without such knowledge, we risk overflowing the computation
8184	     when both X and C are large, but X+C isn't.  */
8185	  /* ??? Could perhaps special-case EXP being unsigned and C being
8186	     positive.  In that case we are certain that X+C is no smaller
8187	     than X and so the transformed expression will overflow iff the
8188	     original would have.  */
8189
8190	  if (GET_CODE (op0) != REG)
8191	    op0 = force_operand (op0, NULL_RTX);
8192	  if (GET_CODE (op0) != REG)
8193	    op0 = copy_to_mode_reg (mode, op0);
8194
8195	  return gen_rtx_MULT (mode, op0,
8196			       gen_int_mode (tree_low_cst (exp1, 0),
8197					     TYPE_MODE (TREE_TYPE (exp1))));
8198	}
8199
8200      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8201	subtarget = 0;
8202
8203      if (modifier == EXPAND_STACK_PARM)
8204	target = 0;
8205
8206      /* Check for multiplying things that have been extended
8207	 from a narrower type.  If this machine supports multiplying
8208	 in that narrower type with a result in the desired type,
8209	 do it that way, and avoid the explicit type-conversion.  */
8210      if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8211	  && TREE_CODE (type) == INTEGER_TYPE
8212	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8213	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8214	  && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8215	       && int_fits_type_p (TREE_OPERAND (exp, 1),
8216				   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8217	       /* Don't use a widening multiply if a shift will do.  */
8218	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8219		    > HOST_BITS_PER_WIDE_INT)
8220		   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8221	      ||
8222	      (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8223	       && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8224		   ==
8225		   TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8226	       /* If both operands are extended, they must either both
8227		  be zero-extended or both be sign-extended.  */
8228	       && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8229		   ==
8230		   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8231	{
8232	  enum machine_mode innermode
8233	    = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8234	  optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8235			? smul_widen_optab : umul_widen_optab);
8236	  this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8237			? umul_widen_optab : smul_widen_optab);
8238	  if (mode == GET_MODE_WIDER_MODE (innermode))
8239	    {
8240	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8241		{
8242		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8243				     NULL_RTX, VOIDmode, 0);
8244		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8245		    op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8246				       VOIDmode, 0);
8247		  else
8248		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8249				       NULL_RTX, VOIDmode, 0);
8250		  goto binop2;
8251		}
8252	      else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8253		       && innermode == word_mode)
8254		{
8255		  rtx htem;
8256		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8257				     NULL_RTX, VOIDmode, 0);
8258		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8259		    op1 = convert_modes (innermode, mode,
8260					 expand_expr (TREE_OPERAND (exp, 1),
8261						      NULL_RTX, VOIDmode, 0),
8262					 unsignedp);
8263		  else
8264		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8265				       NULL_RTX, VOIDmode, 0);
8266		  temp = expand_binop (mode, other_optab, op0, op1, target,
8267				       unsignedp, OPTAB_LIB_WIDEN);
8268		  htem = expand_mult_highpart_adjust (innermode,
8269						      gen_highpart (innermode, temp),
8270						      op0, op1,
8271						      gen_highpart (innermode, temp),
8272						      unsignedp);
8273		  emit_move_insn (gen_highpart (innermode, temp), htem);
8274		  return temp;
8275		}
8276	    }
8277	}
8278      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8279      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8280      return expand_mult (mode, op0, op1, target, unsignedp);
8281
8282    case TRUNC_DIV_EXPR:
8283    case FLOOR_DIV_EXPR:
8284    case CEIL_DIV_EXPR:
8285    case ROUND_DIV_EXPR:
8286    case EXACT_DIV_EXPR:
8287      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8288	subtarget = 0;
8289      if (modifier == EXPAND_STACK_PARM)
8290	target = 0;
8291      /* Possible optimization: compute the dividend with EXPAND_SUM
8292	 then if the divisor is constant can optimize the case
8293	 where some terms of the dividend have coeffs divisible by it.  */
8294      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8295      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8296      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8297
8298    case RDIV_EXPR:
8299      /* Emit a/b as a*(1/b).  Later we may manage CSE the reciprocal saving
8300         expensive divide.  If not, combine will rebuild the original
8301         computation.  */
8302      if (flag_unsafe_math_optimizations && optimize && !optimize_size
8303	  && TREE_CODE (type) == REAL_TYPE
8304	  && !real_onep (TREE_OPERAND (exp, 0)))
8305        return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8306				   build (RDIV_EXPR, type,
8307					  build_real (type, dconst1),
8308					  TREE_OPERAND (exp, 1))),
8309			    target, tmode, modifier);
8310      this_optab = sdiv_optab;
8311      goto binop;
8312
8313    case TRUNC_MOD_EXPR:
8314    case FLOOR_MOD_EXPR:
8315    case CEIL_MOD_EXPR:
8316    case ROUND_MOD_EXPR:
8317      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8318	subtarget = 0;
8319      if (modifier == EXPAND_STACK_PARM)
8320	target = 0;
8321      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8322      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8323      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8324
8325    case FIX_ROUND_EXPR:
8326    case FIX_FLOOR_EXPR:
8327    case FIX_CEIL_EXPR:
8328      abort ();			/* Not used for C.  */
8329
8330    case FIX_TRUNC_EXPR:
8331      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8332      if (target == 0 || modifier == EXPAND_STACK_PARM)
8333	target = gen_reg_rtx (mode);
8334      expand_fix (target, op0, unsignedp);
8335      return target;
8336
8337    case FLOAT_EXPR:
8338      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8339      if (target == 0 || modifier == EXPAND_STACK_PARM)
8340	target = gen_reg_rtx (mode);
8341      /* expand_float can't figure out what to do if FROM has VOIDmode.
8342	 So give it the correct mode.  With -O, cse will optimize this.  */
8343      if (GET_MODE (op0) == VOIDmode)
8344	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8345				op0);
8346      expand_float (target, op0,
8347		    TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8348      return target;
8349
8350    case NEGATE_EXPR:
8351      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8352      if (modifier == EXPAND_STACK_PARM)
8353	target = 0;
8354      temp = expand_unop (mode,
8355			  ! unsignedp && flag_trapv
8356			  && (GET_MODE_CLASS(mode) == MODE_INT)
8357			  ? negv_optab : neg_optab, op0, target, 0);
8358      if (temp == 0)
8359	abort ();
8360      return temp;
8361
8362    case ABS_EXPR:
8363      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8364      if (modifier == EXPAND_STACK_PARM)
8365	target = 0;
8366
8367      /* Handle complex values specially.  */
8368      if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8369	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8370	return expand_complex_abs (mode, op0, target, unsignedp);
8371
8372      /* Unsigned abs is simply the operand.  Testing here means we don't
8373	 risk generating incorrect code below.  */
8374      if (TREE_UNSIGNED (type))
8375	return op0;
8376
8377      return expand_abs (mode, op0, target, unsignedp,
8378			 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8379
8380    case MAX_EXPR:
8381    case MIN_EXPR:
8382      target = original_target;
8383      if (target == 0
8384	  || modifier == EXPAND_STACK_PARM
8385	  || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8386	  || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8387	  || GET_MODE (target) != mode
8388	  || (GET_CODE (target) == REG
8389	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
8390	target = gen_reg_rtx (mode);
8391      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8392      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8393
8394      /* First try to do it with a special MIN or MAX instruction.
8395	 If that does not win, use a conditional jump to select the proper
8396	 value.  */
8397      this_optab = (TREE_UNSIGNED (type)
8398		    ? (code == MIN_EXPR ? umin_optab : umax_optab)
8399		    : (code == MIN_EXPR ? smin_optab : smax_optab));
8400
8401      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8402			   OPTAB_WIDEN);
8403      if (temp != 0)
8404	return temp;
8405
8406      /* At this point, a MEM target is no longer useful; we will get better
8407	 code without it.  */
8408
8409      if (GET_CODE (target) == MEM)
8410	target = gen_reg_rtx (mode);
8411
8412      if (target != op0)
8413	emit_move_insn (target, op0);
8414
8415      op0 = gen_label_rtx ();
8416
8417      /* If this mode is an integer too wide to compare properly,
8418	 compare word by word.  Rely on cse to optimize constant cases.  */
8419      if (GET_MODE_CLASS (mode) == MODE_INT
8420	  && ! can_compare_p (GE, mode, ccp_jump))
8421	{
8422	  if (code == MAX_EXPR)
8423	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8424					  target, op1, NULL_RTX, op0);
8425	  else
8426	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8427					  op1, target, NULL_RTX, op0);
8428	}
8429      else
8430	{
8431	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8432	  do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8433				   unsignedp, mode, NULL_RTX, NULL_RTX,
8434				   op0);
8435	}
8436      emit_move_insn (target, op1);
8437      emit_label (op0);
8438      return target;
8439
8440    case BIT_NOT_EXPR:
8441      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8442      if (modifier == EXPAND_STACK_PARM)
8443	target = 0;
8444      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8445      if (temp == 0)
8446	abort ();
8447      return temp;
8448
8449    case FFS_EXPR:
8450      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8451      if (modifier == EXPAND_STACK_PARM)
8452	target = 0;
8453      temp = expand_unop (mode, ffs_optab, op0, target, 1);
8454      if (temp == 0)
8455	abort ();
8456      return temp;
8457
8458      /* ??? Can optimize bitwise operations with one arg constant.
8459	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8460	 and (a bitwise1 b) bitwise2 b (etc)
8461	 but that is probably not worth while.  */
8462
8463      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
8464	 boolean values when we want in all cases to compute both of them.  In
8465	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8466	 as actual zero-or-1 values and then bitwise anding.  In cases where
8467	 there cannot be any side effects, better code would be made by
8468	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8469	 how to recognize those cases.  */
8470
8471    case TRUTH_AND_EXPR:
8472    case BIT_AND_EXPR:
8473      this_optab = and_optab;
8474      goto binop;
8475
8476    case TRUTH_OR_EXPR:
8477    case BIT_IOR_EXPR:
8478      this_optab = ior_optab;
8479      goto binop;
8480
8481    case TRUTH_XOR_EXPR:
8482    case BIT_XOR_EXPR:
8483      this_optab = xor_optab;
8484      goto binop;
8485
8486    case LSHIFT_EXPR:
8487    case RSHIFT_EXPR:
8488    case LROTATE_EXPR:
8489    case RROTATE_EXPR:
8490      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8491	subtarget = 0;
8492      if (modifier == EXPAND_STACK_PARM)
8493	target = 0;
8494      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8495      return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8496			   unsignedp);
8497
8498      /* Could determine the answer when only additive constants differ.  Also,
8499	 the addition of one can be handled by changing the condition.  */
8500    case LT_EXPR:
8501    case LE_EXPR:
8502    case GT_EXPR:
8503    case GE_EXPR:
8504    case EQ_EXPR:
8505    case NE_EXPR:
8506    case UNORDERED_EXPR:
8507    case ORDERED_EXPR:
8508    case UNLT_EXPR:
8509    case UNLE_EXPR:
8510    case UNGT_EXPR:
8511    case UNGE_EXPR:
8512    case UNEQ_EXPR:
8513      temp = do_store_flag (exp,
8514			    modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8515			    tmode != VOIDmode ? tmode : mode, 0);
8516      if (temp != 0)
8517	return temp;
8518
8519      /* For foo != 0, load foo, and if it is nonzero load 1 instead.  */
8520      if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8521	  && original_target
8522	  && GET_CODE (original_target) == REG
8523	  && (GET_MODE (original_target)
8524	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8525	{
8526	  temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8527			      VOIDmode, 0);
8528
8529	  /* If temp is constant, we can just compute the result.  */
8530	  if (GET_CODE (temp) == CONST_INT)
8531	    {
8532	      if (INTVAL (temp) != 0)
8533	        emit_move_insn (target, const1_rtx);
8534	      else
8535	        emit_move_insn (target, const0_rtx);
8536
8537	      return target;
8538	    }
8539
8540	  if (temp != original_target)
8541	    {
8542	      enum machine_mode mode1 = GET_MODE (temp);
8543	      if (mode1 == VOIDmode)
8544		mode1 = tmode != VOIDmode ? tmode : mode;
8545
8546	      temp = copy_to_mode_reg (mode1, temp);
8547	    }
8548
8549	  op1 = gen_label_rtx ();
8550	  emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8551				   GET_MODE (temp), unsignedp, op1);
8552	  emit_move_insn (temp, const1_rtx);
8553	  emit_label (op1);
8554	  return temp;
8555	}
8556
8557      /* If no set-flag instruction, must generate a conditional
8558	 store into a temporary variable.  Drop through
8559	 and handle this like && and ||.  */
8560
8561    case TRUTH_ANDIF_EXPR:
8562    case TRUTH_ORIF_EXPR:
8563      if (! ignore
8564	  && (target == 0
8565	      || modifier == EXPAND_STACK_PARM
8566	      || ! safe_from_p (target, exp, 1)
8567	      /* Make sure we don't have a hard reg (such as function's return
8568		 value) live across basic blocks, if not optimizing.  */
8569	      || (!optimize && GET_CODE (target) == REG
8570		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8571	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8572
8573      if (target)
8574	emit_clr_insn (target);
8575
8576      op1 = gen_label_rtx ();
8577      jumpifnot (exp, op1);
8578
8579      if (target)
8580	emit_0_to_1_insn (target);
8581
8582      emit_label (op1);
8583      return ignore ? const0_rtx : target;
8584
8585    case TRUTH_NOT_EXPR:
8586      if (modifier == EXPAND_STACK_PARM)
8587	target = 0;
8588      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8589      /* The parser is careful to generate TRUTH_NOT_EXPR
8590	 only with operands that are always zero or one.  */
8591      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8592			   target, 1, OPTAB_LIB_WIDEN);
8593      if (temp == 0)
8594	abort ();
8595      return temp;
8596
8597    case COMPOUND_EXPR:
8598      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8599      emit_queue ();
8600      return expand_expr (TREE_OPERAND (exp, 1),
8601			  (ignore ? const0_rtx : target),
8602			  VOIDmode, modifier);
8603
8604    case COND_EXPR:
8605      /* If we would have a "singleton" (see below) were it not for a
8606	 conversion in each arm, bring that conversion back out.  */
8607      if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8608	  && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8609	  && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8610	      == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8611	{
8612	  tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8613	  tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8614
8615	  if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8616	       && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8617	      || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8618		  && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8619	      || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8620		  && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8621	      || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8622		  && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8623	    return expand_expr (build1 (NOP_EXPR, type,
8624					build (COND_EXPR, TREE_TYPE (iftrue),
8625					       TREE_OPERAND (exp, 0),
8626					       iftrue, iffalse)),
8627				target, tmode, modifier);
8628	}
8629
8630      {
8631	/* Note that COND_EXPRs whose type is a structure or union
8632	   are required to be constructed to contain assignments of
8633	   a temporary variable, so that we can evaluate them here
8634	   for side effect only.  If type is void, we must do likewise.  */
8635
8636	/* If an arm of the branch requires a cleanup,
8637	   only that cleanup is performed.  */
8638
8639	tree singleton = 0;
8640	tree binary_op = 0, unary_op = 0;
8641
8642	/* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8643	   convert it to our mode, if necessary.  */
8644	if (integer_onep (TREE_OPERAND (exp, 1))
8645	    && integer_zerop (TREE_OPERAND (exp, 2))
8646	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8647	  {
8648	    if (ignore)
8649	      {
8650		expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8651			     modifier);
8652		return const0_rtx;
8653	      }
8654
8655	    if (modifier == EXPAND_STACK_PARM)
8656	      target = 0;
8657	    op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8658	    if (GET_MODE (op0) == mode)
8659	      return op0;
8660
8661	    if (target == 0)
8662	      target = gen_reg_rtx (mode);
8663	    convert_move (target, op0, unsignedp);
8664	    return target;
8665	  }
8666
8667	/* Check for X ? A + B : A.  If we have this, we can copy A to the
8668	   output and conditionally add B.  Similarly for unary operations.
8669	   Don't do this if X has side-effects because those side effects
8670	   might affect A or B and the "?" operation is a sequence point in
8671	   ANSI.  (operand_equal_p tests for side effects.)  */
8672
8673	if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8674	    && operand_equal_p (TREE_OPERAND (exp, 2),
8675				TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8676	  singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8677	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8678		 && operand_equal_p (TREE_OPERAND (exp, 1),
8679				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8680	  singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8681	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8682		 && operand_equal_p (TREE_OPERAND (exp, 2),
8683				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8684	  singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8685	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8686		 && operand_equal_p (TREE_OPERAND (exp, 1),
8687				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8688	  singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8689
8690	/* If we are not to produce a result, we have no target.  Otherwise,
8691	   if a target was specified use it; it will not be used as an
8692	   intermediate target unless it is safe.  If no target, use a
8693	   temporary.  */
8694
8695	if (ignore)
8696	  temp = 0;
8697	else if (modifier == EXPAND_STACK_PARM)
8698	  temp = assign_temp (type, 0, 0, 1);
8699	else if (original_target
8700		 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8701		     || (singleton && GET_CODE (original_target) == REG
8702			 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8703			 && original_target == var_rtx (singleton)))
8704		 && GET_MODE (original_target) == mode
8705#ifdef HAVE_conditional_move
8706		 && (! can_conditionally_move_p (mode)
8707		     || GET_CODE (original_target) == REG
8708		     || TREE_ADDRESSABLE (type))
8709#endif
8710		 && (GET_CODE (original_target) != MEM
8711		     || TREE_ADDRESSABLE (type)))
8712	  temp = original_target;
8713	else if (TREE_ADDRESSABLE (type))
8714	  abort ();
8715	else
8716	  temp = assign_temp (type, 0, 0, 1);
8717
8718	/* If we had X ? A + C : A, with C a constant power of 2, and we can
8719	   do the test of X as a store-flag operation, do this as
8720	   A + ((X != 0) << log C).  Similarly for other simple binary
8721	   operators.  Only do for C == 1 if BRANCH_COST is low.  */
8722	if (temp && singleton && binary_op
8723	    && (TREE_CODE (binary_op) == PLUS_EXPR
8724		|| TREE_CODE (binary_op) == MINUS_EXPR
8725		|| TREE_CODE (binary_op) == BIT_IOR_EXPR
8726		|| TREE_CODE (binary_op) == BIT_XOR_EXPR)
8727	    && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8728		: integer_onep (TREE_OPERAND (binary_op, 1)))
8729	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8730	  {
8731	    rtx result;
8732	    tree cond;
8733	    optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8734			    ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8735			       ? addv_optab : add_optab)
8736			    : TREE_CODE (binary_op) == MINUS_EXPR
8737			    ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8738			       ? subv_optab : sub_optab)
8739			    : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8740			    : xor_optab);
8741
8742	    /* If we had X ? A : A + 1, do this as A + (X == 0).  */
8743	    if (singleton == TREE_OPERAND (exp, 1))
8744	      cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8745	    else
8746	      cond = TREE_OPERAND (exp, 0);
8747
8748	    result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8749					   ? temp : NULL_RTX),
8750				    mode, BRANCH_COST <= 1);
8751
8752	    if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8753	      result = expand_shift (LSHIFT_EXPR, mode, result,
8754				     build_int_2 (tree_log2
8755						  (TREE_OPERAND
8756						   (binary_op, 1)),
8757						  0),
8758				     (safe_from_p (temp, singleton, 1)
8759				      ? temp : NULL_RTX), 0);
8760
8761	    if (result)
8762	      {
8763		op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8764		return expand_binop (mode, boptab, op1, result, temp,
8765				     unsignedp, OPTAB_LIB_WIDEN);
8766	      }
8767	  }
8768
8769	do_pending_stack_adjust ();
8770	NO_DEFER_POP;
8771	op0 = gen_label_rtx ();
8772
8773	if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8774	  {
8775	    if (temp != 0)
8776	      {
8777		/* If the target conflicts with the other operand of the
8778		   binary op, we can't use it.  Also, we can't use the target
8779		   if it is a hard register, because evaluating the condition
8780		   might clobber it.  */
8781		if ((binary_op
8782		     && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8783		    || (GET_CODE (temp) == REG
8784			&& REGNO (temp) < FIRST_PSEUDO_REGISTER))
8785		  temp = gen_reg_rtx (mode);
8786		store_expr (singleton, temp,
8787			    modifier == EXPAND_STACK_PARM ? 2 : 0);
8788	      }
8789	    else
8790	      expand_expr (singleton,
8791			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8792	    if (singleton == TREE_OPERAND (exp, 1))
8793	      jumpif (TREE_OPERAND (exp, 0), op0);
8794	    else
8795	      jumpifnot (TREE_OPERAND (exp, 0), op0);
8796
8797	    start_cleanup_deferral ();
8798	    if (binary_op && temp == 0)
8799	      /* Just touch the other operand.  */
8800	      expand_expr (TREE_OPERAND (binary_op, 1),
8801			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8802	    else if (binary_op)
8803	      store_expr (build (TREE_CODE (binary_op), type,
8804				 make_tree (type, temp),
8805				 TREE_OPERAND (binary_op, 1)),
8806			  temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8807	    else
8808	      store_expr (build1 (TREE_CODE (unary_op), type,
8809				  make_tree (type, temp)),
8810			  temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8811	    op1 = op0;
8812	  }
8813	/* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8814	   comparison operator.  If we have one of these cases, set the
8815	   output to A, branch on A (cse will merge these two references),
8816	   then set the output to FOO.  */
8817	else if (temp
8818		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8819		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8820		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8821				     TREE_OPERAND (exp, 1), 0)
8822		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8823		     || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8824		 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8825	  {
8826	    if (GET_CODE (temp) == REG
8827		&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
8828	      temp = gen_reg_rtx (mode);
8829	    store_expr (TREE_OPERAND (exp, 1), temp,
8830			modifier == EXPAND_STACK_PARM ? 2 : 0);
8831	    jumpif (TREE_OPERAND (exp, 0), op0);
8832
8833	    start_cleanup_deferral ();
8834	    store_expr (TREE_OPERAND (exp, 2), temp,
8835			modifier == EXPAND_STACK_PARM ? 2 : 0);
8836	    op1 = op0;
8837	  }
8838	else if (temp
8839		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8840		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8841		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8842				     TREE_OPERAND (exp, 2), 0)
8843		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8844		     || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8845		 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8846	  {
8847	    if (GET_CODE (temp) == REG
8848		&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
8849	      temp = gen_reg_rtx (mode);
8850	    store_expr (TREE_OPERAND (exp, 2), temp,
8851			modifier == EXPAND_STACK_PARM ? 2 : 0);
8852	    jumpifnot (TREE_OPERAND (exp, 0), op0);
8853
8854	    start_cleanup_deferral ();
8855	    store_expr (TREE_OPERAND (exp, 1), temp,
8856			modifier == EXPAND_STACK_PARM ? 2 : 0);
8857	    op1 = op0;
8858	  }
8859	else
8860	  {
8861	    op1 = gen_label_rtx ();
8862	    jumpifnot (TREE_OPERAND (exp, 0), op0);
8863
8864	    start_cleanup_deferral ();
8865
8866	    /* One branch of the cond can be void, if it never returns. For
8867	       example A ? throw : E  */
8868	    if (temp != 0
8869		&& TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8870	      store_expr (TREE_OPERAND (exp, 1), temp,
8871			  modifier == EXPAND_STACK_PARM ? 2 : 0);
8872	    else
8873	      expand_expr (TREE_OPERAND (exp, 1),
8874			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8875	    end_cleanup_deferral ();
8876	    emit_queue ();
8877	    emit_jump_insn (gen_jump (op1));
8878	    emit_barrier ();
8879	    emit_label (op0);
8880	    start_cleanup_deferral ();
8881	    if (temp != 0
8882		&& TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8883	      store_expr (TREE_OPERAND (exp, 2), temp,
8884			  modifier == EXPAND_STACK_PARM ? 2 : 0);
8885	    else
8886	      expand_expr (TREE_OPERAND (exp, 2),
8887			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8888	  }
8889
8890	end_cleanup_deferral ();
8891
8892	emit_queue ();
8893	emit_label (op1);
8894	OK_DEFER_POP;
8895
8896	return temp;
8897      }
8898
8899    case TARGET_EXPR:
8900      {
8901	/* Something needs to be initialized, but we didn't know
8902	   where that thing was when building the tree.  For example,
8903	   it could be the return value of a function, or a parameter
8904	   to a function which lays down in the stack, or a temporary
8905	   variable which must be passed by reference.
8906
8907	   We guarantee that the expression will either be constructed
8908	   or copied into our original target.  */
8909
8910	tree slot = TREE_OPERAND (exp, 0);
8911	tree cleanups = NULL_TREE;
8912	tree exp1;
8913
8914	if (TREE_CODE (slot) != VAR_DECL)
8915	  abort ();
8916
8917	if (! ignore)
8918	  target = original_target;
8919
8920	/* Set this here so that if we get a target that refers to a
8921	   register variable that's already been used, put_reg_into_stack
8922	   knows that it should fix up those uses.  */
8923	TREE_USED (slot) = 1;
8924
8925	if (target == 0)
8926	  {
8927	    if (DECL_RTL_SET_P (slot))
8928	      {
8929		target = DECL_RTL (slot);
8930		/* If we have already expanded the slot, so don't do
8931		   it again.  (mrs)  */
8932		if (TREE_OPERAND (exp, 1) == NULL_TREE)
8933		  return target;
8934	      }
8935	    else
8936	      {
8937		target = assign_temp (type, 2, 0, 1);
8938		/* All temp slots at this level must not conflict.  */
8939		preserve_temp_slots (target);
8940		SET_DECL_RTL (slot, target);
8941		if (TREE_ADDRESSABLE (slot))
8942		  put_var_into_stack (slot, /*rescan=*/false);
8943
8944		/* Since SLOT is not known to the called function
8945		   to belong to its stack frame, we must build an explicit
8946		   cleanup.  This case occurs when we must build up a reference
8947		   to pass the reference as an argument.  In this case,
8948		   it is very likely that such a reference need not be
8949		   built here.  */
8950
8951		if (TREE_OPERAND (exp, 2) == 0)
8952		  TREE_OPERAND (exp, 2)
8953		    = (*lang_hooks.maybe_build_cleanup) (slot);
8954		cleanups = TREE_OPERAND (exp, 2);
8955	      }
8956	  }
8957	else
8958	  {
8959	    /* This case does occur, when expanding a parameter which
8960	       needs to be constructed on the stack.  The target
8961	       is the actual stack address that we want to initialize.
8962	       The function we call will perform the cleanup in this case.  */
8963
8964	    /* If we have already assigned it space, use that space,
8965	       not target that we were passed in, as our target
8966	       parameter is only a hint.  */
8967	    if (DECL_RTL_SET_P (slot))
8968	      {
8969		target = DECL_RTL (slot);
8970		/* If we have already expanded the slot, so don't do
8971                   it again.  (mrs)  */
8972		if (TREE_OPERAND (exp, 1) == NULL_TREE)
8973		  return target;
8974	      }
8975	    else
8976	      {
8977		SET_DECL_RTL (slot, target);
8978		/* If we must have an addressable slot, then make sure that
8979		   the RTL that we just stored in slot is OK.  */
8980		if (TREE_ADDRESSABLE (slot))
8981		  put_var_into_stack (slot, /*rescan=*/true);
8982	      }
8983	  }
8984
8985	exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8986	/* Mark it as expanded.  */
8987	TREE_OPERAND (exp, 1) = NULL_TREE;
8988
8989	store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8990
8991	expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8992
8993	return target;
8994      }
8995
8996    case INIT_EXPR:
8997      {
8998	tree lhs = TREE_OPERAND (exp, 0);
8999	tree rhs = TREE_OPERAND (exp, 1);
9000
9001	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9002	return temp;
9003      }
9004
9005    case MODIFY_EXPR:
9006      {
9007	/* If lhs is complex, expand calls in rhs before computing it.
9008	   That's so we don't compute a pointer and save it over a
9009	   call.  If lhs is simple, compute it first so we can give it
9010	   as a target if the rhs is just a call.  This avoids an
9011	   extra temp and copy and that prevents a partial-subsumption
9012	   which makes bad code.  Actually we could treat
9013	   component_ref's of vars like vars.  */
9014
9015	tree lhs = TREE_OPERAND (exp, 0);
9016	tree rhs = TREE_OPERAND (exp, 1);
9017
9018	temp = 0;
9019
9020	/* Check for |= or &= of a bitfield of size one into another bitfield
9021	   of size 1.  In this case, (unless we need the result of the
9022	   assignment) we can do this more efficiently with a
9023	   test followed by an assignment, if necessary.
9024
9025	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
9026	   things change so we do, this code should be enhanced to
9027	   support it.  */
9028	if (ignore
9029	    && TREE_CODE (lhs) == COMPONENT_REF
9030	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
9031		|| TREE_CODE (rhs) == BIT_AND_EXPR)
9032	    && TREE_OPERAND (rhs, 0) == lhs
9033	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9034	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9035	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9036	  {
9037	    rtx label = gen_label_rtx ();
9038
9039	    do_jump (TREE_OPERAND (rhs, 1),
9040		     TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9041		     TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9042	    expand_assignment (lhs, convert (TREE_TYPE (rhs),
9043					     (TREE_CODE (rhs) == BIT_IOR_EXPR
9044					      ? integer_one_node
9045					      : integer_zero_node)),
9046			       0, 0);
9047	    do_pending_stack_adjust ();
9048	    emit_label (label);
9049	    return const0_rtx;
9050	  }
9051
9052	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9053
9054	return temp;
9055      }
9056
9057    case RETURN_EXPR:
9058      if (!TREE_OPERAND (exp, 0))
9059	expand_null_return ();
9060      else
9061	expand_return (TREE_OPERAND (exp, 0));
9062      return const0_rtx;
9063
9064    case PREINCREMENT_EXPR:
9065    case PREDECREMENT_EXPR:
9066      return expand_increment (exp, 0, ignore);
9067
9068    case POSTINCREMENT_EXPR:
9069    case POSTDECREMENT_EXPR:
9070      /* Faster to treat as pre-increment if result is not used.  */
9071      return expand_increment (exp, ! ignore, ignore);
9072
9073    case ADDR_EXPR:
9074      if (modifier == EXPAND_STACK_PARM)
9075	target = 0;
9076      /* Are we taking the address of a nested function?  */
9077      if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9078	  && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9079	  && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9080	  && ! TREE_STATIC (exp))
9081	{
9082	  op0 = trampoline_address (TREE_OPERAND (exp, 0));
9083	  op0 = force_operand (op0, target);
9084	}
9085      /* If we are taking the address of something erroneous, just
9086	 return a zero.  */
9087      else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9088	return const0_rtx;
9089      /* If we are taking the address of a constant and are at the
9090	 top level, we have to use output_constant_def since we can't
9091	 call force_const_mem at top level.  */
9092      else if (cfun == 0
9093	       && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9094		   || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9095		       == 'c')))
9096	op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9097      else
9098	{
9099	  /* We make sure to pass const0_rtx down if we came in with
9100	     ignore set, to avoid doing the cleanups twice for something.  */
9101	  op0 = expand_expr (TREE_OPERAND (exp, 0),
9102			     ignore ? const0_rtx : NULL_RTX, VOIDmode,
9103			     (modifier == EXPAND_INITIALIZER
9104			      ? modifier : EXPAND_CONST_ADDRESS));
9105
9106	  /* If we are going to ignore the result, OP0 will have been set
9107	     to const0_rtx, so just return it.  Don't get confused and
9108	     think we are taking the address of the constant.  */
9109	  if (ignore)
9110	    return op0;
9111
9112	  /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9113	     clever and returns a REG when given a MEM.  */
9114	  op0 = protect_from_queue (op0, 1);
9115
9116	  /* We would like the object in memory.  If it is a constant, we can
9117	     have it be statically allocated into memory.  For a non-constant,
9118	     we need to allocate some memory and store the value into it.  */
9119
9120	  if (CONSTANT_P (op0))
9121	    op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9122				   op0);
9123	  else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9124		   || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9125		   || GET_CODE (op0) == PARALLEL)
9126	    {
9127	      /* If the operand is a SAVE_EXPR, we can deal with this by
9128		 forcing the SAVE_EXPR into memory.  */
9129	      if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9130		{
9131		  put_var_into_stack (TREE_OPERAND (exp, 0),
9132				      /*rescan=*/true);
9133		  op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9134		}
9135	      else
9136		{
9137		  /* If this object is in a register, it can't be BLKmode.  */
9138		  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9139		  rtx memloc = assign_temp (inner_type, 1, 1, 1);
9140
9141		  if (GET_CODE (op0) == PARALLEL)
9142		    /* Handle calls that pass values in multiple
9143		       non-contiguous locations.  The Irix 6 ABI has examples
9144		       of this.  */
9145		    emit_group_store (memloc, op0,
9146				      int_size_in_bytes (inner_type));
9147		  else
9148		    emit_move_insn (memloc, op0);
9149
9150		  op0 = memloc;
9151		}
9152	    }
9153
9154	  if (GET_CODE (op0) != MEM)
9155	    abort ();
9156
9157	  mark_temp_addr_taken (op0);
9158	  if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9159	    {
9160	      op0 = XEXP (op0, 0);
9161#ifdef POINTERS_EXTEND_UNSIGNED
9162	      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9163		  && mode == ptr_mode)
9164		op0 = convert_memory_address (ptr_mode, op0);
9165#endif
9166	      return op0;
9167	    }
9168
9169	  /* If OP0 is not aligned as least as much as the type requires, we
9170	     need to make a temporary, copy OP0 to it, and take the address of
9171	     the temporary.  We want to use the alignment of the type, not of
9172	     the operand.  Note that this is incorrect for FUNCTION_TYPE, but
9173	     the test for BLKmode means that can't happen.  The test for
9174	     BLKmode is because we never make mis-aligned MEMs with
9175	     non-BLKmode.
9176
9177	     We don't need to do this at all if the machine doesn't have
9178	     strict alignment.  */
9179	  if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9180	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9181		  > MEM_ALIGN (op0))
9182	      && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9183	    {
9184	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9185	      rtx new;
9186
9187	      if (TYPE_ALIGN_OK (inner_type))
9188		abort ();
9189
9190	      if (TREE_ADDRESSABLE (inner_type))
9191		{
9192		  /* We can't make a bitwise copy of this object, so fail.  */
9193		  error ("cannot take the address of an unaligned member");
9194		  return const0_rtx;
9195		}
9196
9197	      new = assign_stack_temp_for_type
9198		(TYPE_MODE (inner_type),
9199		 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9200		 : int_size_in_bytes (inner_type),
9201		 1, build_qualified_type (inner_type,
9202					  (TYPE_QUALS (inner_type)
9203					   | TYPE_QUAL_CONST)));
9204
9205	      emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9206			       (modifier == EXPAND_STACK_PARM
9207				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9208
9209	      op0 = new;
9210	    }
9211
9212	  op0 = force_operand (XEXP (op0, 0), target);
9213	}
9214
9215      if (flag_force_addr
9216	  && GET_CODE (op0) != REG
9217	  && modifier != EXPAND_CONST_ADDRESS
9218	  && modifier != EXPAND_INITIALIZER
9219	  && modifier != EXPAND_SUM)
9220	op0 = force_reg (Pmode, op0);
9221
9222      if (GET_CODE (op0) == REG
9223	  && ! REG_USERVAR_P (op0))
9224	mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9225
9226#ifdef POINTERS_EXTEND_UNSIGNED
9227      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9228	  && mode == ptr_mode)
9229	op0 = convert_memory_address (ptr_mode, op0);
9230#endif
9231
9232      return op0;
9233
9234    case ENTRY_VALUE_EXPR:
9235      abort ();
9236
9237    /* COMPLEX type for Extended Pascal & Fortran  */
9238    case COMPLEX_EXPR:
9239      {
9240	enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9241	rtx insns;
9242
9243	/* Get the rtx code of the operands.  */
9244	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9245	op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9246
9247	if (! target)
9248	  target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9249
9250	start_sequence ();
9251
9252	/* Move the real (op0) and imaginary (op1) parts to their location.  */
9253	emit_move_insn (gen_realpart (mode, target), op0);
9254	emit_move_insn (gen_imagpart (mode, target), op1);
9255
9256	insns = get_insns ();
9257	end_sequence ();
9258
9259	/* Complex construction should appear as a single unit.  */
9260	/* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9261	   each with a separate pseudo as destination.
9262	   It's not correct for flow to treat them as a unit.  */
9263	if (GET_CODE (target) != CONCAT)
9264	  emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9265	else
9266	  emit_insn (insns);
9267
9268	return target;
9269      }
9270
9271    case REALPART_EXPR:
9272      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9273      return gen_realpart (mode, op0);
9274
9275    case IMAGPART_EXPR:
9276      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9277      return gen_imagpart (mode, op0);
9278
9279    case CONJ_EXPR:
9280      {
9281	enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9282	rtx imag_t;
9283	rtx insns;
9284
9285	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9286
9287	if (! target)
9288	  target = gen_reg_rtx (mode);
9289
9290	start_sequence ();
9291
9292	/* Store the realpart and the negated imagpart to target.  */
9293	emit_move_insn (gen_realpart (partmode, target),
9294			gen_realpart (partmode, op0));
9295
9296	imag_t = gen_imagpart (partmode, target);
9297	temp = expand_unop (partmode,
9298			    ! unsignedp && flag_trapv
9299			    && (GET_MODE_CLASS(partmode) == MODE_INT)
9300			    ? negv_optab : neg_optab,
9301			    gen_imagpart (partmode, op0), imag_t, 0);
9302	if (temp != imag_t)
9303	  emit_move_insn (imag_t, temp);
9304
9305	insns = get_insns ();
9306	end_sequence ();
9307
9308	/* Conjugate should appear as a single unit
9309	   If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9310	   each with a separate pseudo as destination.
9311	   It's not correct for flow to treat them as a unit.  */
9312	if (GET_CODE (target) != CONCAT)
9313	  emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9314	else
9315	  emit_insn (insns);
9316
9317	return target;
9318      }
9319
9320    case TRY_CATCH_EXPR:
9321      {
9322	tree handler = TREE_OPERAND (exp, 1);
9323
9324	expand_eh_region_start ();
9325
9326	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9327
9328	expand_eh_region_end_cleanup (handler);
9329
9330	return op0;
9331      }
9332
9333    case TRY_FINALLY_EXPR:
9334      {
9335	tree try_block = TREE_OPERAND (exp, 0);
9336	tree finally_block = TREE_OPERAND (exp, 1);
9337
9338        if (!optimize || unsafe_for_reeval (finally_block) > 1)
9339	  {
9340	    /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9341	       is not sufficient, so we cannot expand the block twice.
9342	       So we play games with GOTO_SUBROUTINE_EXPR to let us
9343	       expand the thing only once.  */
9344	    /* When not optimizing, we go ahead with this form since
9345	       (1) user breakpoints operate more predictably without
9346		   code duplication, and
9347	       (2) we're not running any of the global optimizers
9348	           that would explode in time/space with the highly
9349		   connected CFG created by the indirect branching.  */
9350
9351	    rtx finally_label = gen_label_rtx ();
9352	    rtx done_label = gen_label_rtx ();
9353	    rtx return_link = gen_reg_rtx (Pmode);
9354	    tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9355			          (tree) finally_label, (tree) return_link);
9356	    TREE_SIDE_EFFECTS (cleanup) = 1;
9357
9358	    /* Start a new binding layer that will keep track of all cleanup
9359	       actions to be performed.  */
9360	    expand_start_bindings (2);
9361	    target_temp_slot_level = temp_slot_level;
9362
9363	    expand_decl_cleanup (NULL_TREE, cleanup);
9364	    op0 = expand_expr (try_block, target, tmode, modifier);
9365
9366	    preserve_temp_slots (op0);
9367	    expand_end_bindings (NULL_TREE, 0, 0);
9368	    emit_jump (done_label);
9369	    emit_label (finally_label);
9370	    expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9371	    emit_indirect_jump (return_link);
9372	    emit_label (done_label);
9373	  }
9374	else
9375	  {
9376	    expand_start_bindings (2);
9377	    target_temp_slot_level = temp_slot_level;
9378
9379	    expand_decl_cleanup (NULL_TREE, finally_block);
9380	    op0 = expand_expr (try_block, target, tmode, modifier);
9381
9382	    preserve_temp_slots (op0);
9383	    expand_end_bindings (NULL_TREE, 0, 0);
9384	  }
9385
9386	return op0;
9387      }
9388
9389    case GOTO_SUBROUTINE_EXPR:
9390      {
9391	rtx subr = (rtx) TREE_OPERAND (exp, 0);
9392	rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9393	rtx return_address = gen_label_rtx ();
9394	emit_move_insn (return_link,
9395			gen_rtx_LABEL_REF (Pmode, return_address));
9396	emit_jump (subr);
9397	emit_label (return_address);
9398	return const0_rtx;
9399      }
9400
9401    case VA_ARG_EXPR:
9402      return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9403
9404    case EXC_PTR_EXPR:
9405      return get_exception_pointer (cfun);
9406
9407    case FDESC_EXPR:
9408      /* Function descriptors are not valid except for as
9409	 initialization constants, and should not be expanded.  */
9410      abort ();
9411
9412    default:
9413      return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9414    }
9415
9416  /* Here to do an ordinary binary operator, generating an instruction
9417     from the optab already placed in `this_optab'.  */
9418 binop:
9419  if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9420    subtarget = 0;
9421  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9422  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9423 binop2:
9424  if (modifier == EXPAND_STACK_PARM)
9425    target = 0;
9426  temp = expand_binop (mode, this_optab, op0, op1, target,
9427		       unsignedp, OPTAB_LIB_WIDEN);
9428  if (temp == 0)
9429    abort ();
9430  return temp;
9431}
9432
9433/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9434   when applied to the address of EXP produces an address known to be
9435   aligned more than BIGGEST_ALIGNMENT.  */
9436
9437static int
9438is_aligning_offset (offset, exp)
9439     tree offset;
9440     tree exp;
9441{
9442  /* Strip off any conversions and WITH_RECORD_EXPR nodes.  */
9443  while (TREE_CODE (offset) == NON_LVALUE_EXPR
9444	 || TREE_CODE (offset) == NOP_EXPR
9445	 || TREE_CODE (offset) == CONVERT_EXPR
9446	 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9447    offset = TREE_OPERAND (offset, 0);
9448
9449  /* We must now have a BIT_AND_EXPR with a constant that is one less than
9450     power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
9451  if (TREE_CODE (offset) != BIT_AND_EXPR
9452      || !host_integerp (TREE_OPERAND (offset, 1), 1)
9453      || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9454      || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9455    return 0;
9456
9457  /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9458     It must be NEGATE_EXPR.  Then strip any more conversions.  */
9459  offset = TREE_OPERAND (offset, 0);
9460  while (TREE_CODE (offset) == NON_LVALUE_EXPR
9461	 || TREE_CODE (offset) == NOP_EXPR
9462	 || TREE_CODE (offset) == CONVERT_EXPR)
9463    offset = TREE_OPERAND (offset, 0);
9464
9465  if (TREE_CODE (offset) != NEGATE_EXPR)
9466    return 0;
9467
9468  offset = TREE_OPERAND (offset, 0);
9469  while (TREE_CODE (offset) == NON_LVALUE_EXPR
9470	 || TREE_CODE (offset) == NOP_EXPR
9471	 || TREE_CODE (offset) == CONVERT_EXPR)
9472    offset = TREE_OPERAND (offset, 0);
9473
9474  /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9475     whose type is the same as EXP.  */
9476  return (TREE_CODE (offset) == ADDR_EXPR
9477	  && (TREE_OPERAND (offset, 0) == exp
9478	      || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9479		  && (TREE_TYPE (TREE_OPERAND (offset, 0))
9480		      == TREE_TYPE (exp)))));
9481}
9482
9483/* Return the tree node if an ARG corresponds to a string constant or zero
9484   if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
9485   in bytes within the string that ARG is accessing.  The type of the
9486   offset will be `sizetype'.  */
9487
9488tree
9489string_constant (arg, ptr_offset)
9490     tree arg;
9491     tree *ptr_offset;
9492{
9493  STRIP_NOPS (arg);
9494
9495  if (TREE_CODE (arg) == ADDR_EXPR
9496      && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9497    {
9498      *ptr_offset = size_zero_node;
9499      return TREE_OPERAND (arg, 0);
9500    }
9501  else if (TREE_CODE (arg) == PLUS_EXPR)
9502    {
9503      tree arg0 = TREE_OPERAND (arg, 0);
9504      tree arg1 = TREE_OPERAND (arg, 1);
9505
9506      STRIP_NOPS (arg0);
9507      STRIP_NOPS (arg1);
9508
9509      if (TREE_CODE (arg0) == ADDR_EXPR
9510	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9511	{
9512	  *ptr_offset = convert (sizetype, arg1);
9513	  return TREE_OPERAND (arg0, 0);
9514	}
9515      else if (TREE_CODE (arg1) == ADDR_EXPR
9516	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9517	{
9518	  *ptr_offset = convert (sizetype, arg0);
9519	  return TREE_OPERAND (arg1, 0);
9520	}
9521    }
9522
9523  return 0;
9524}
9525
9526/* Expand code for a post- or pre- increment or decrement
9527   and return the RTX for the result.
9528   POST is 1 for postinc/decrements and 0 for preinc/decrements.  */
9529
9530static rtx
9531expand_increment (exp, post, ignore)
9532     tree exp;
9533     int post, ignore;
9534{
9535  rtx op0, op1;
9536  rtx temp, value;
9537  tree incremented = TREE_OPERAND (exp, 0);
9538  optab this_optab = add_optab;
9539  int icode;
9540  enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9541  int op0_is_copy = 0;
9542  int single_insn = 0;
9543  /* 1 means we can't store into OP0 directly,
9544     because it is a subreg narrower than a word,
9545     and we don't dare clobber the rest of the word.  */
9546  int bad_subreg = 0;
9547
9548  /* Stabilize any component ref that might need to be
9549     evaluated more than once below.  */
9550  if (!post
9551      || TREE_CODE (incremented) == BIT_FIELD_REF
9552      || (TREE_CODE (incremented) == COMPONENT_REF
9553	  && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9554	      || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9555    incremented = stabilize_reference (incremented);
9556  /* Nested *INCREMENT_EXPRs can happen in C++.  We must force innermost
9557     ones into save exprs so that they don't accidentally get evaluated
9558     more than once by the code below.  */
9559  if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9560      || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9561    incremented = save_expr (incremented);
9562
9563  /* Compute the operands as RTX.
9564     Note whether OP0 is the actual lvalue or a copy of it:
9565     I believe it is a copy iff it is a register or subreg
9566     and insns were generated in computing it.  */
9567
9568  temp = get_last_insn ();
9569  op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9570
9571  /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9572     in place but instead must do sign- or zero-extension during assignment,
9573     so we copy it into a new register and let the code below use it as
9574     a copy.
9575
9576     Note that we can safely modify this SUBREG since it is know not to be
9577     shared (it was made by the expand_expr call above).  */
9578
9579  if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9580    {
9581      if (post)
9582	SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9583      else
9584	bad_subreg = 1;
9585    }
9586  else if (GET_CODE (op0) == SUBREG
9587	   && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9588    {
9589      /* We cannot increment this SUBREG in place.  If we are
9590	 post-incrementing, get a copy of the old value.  Otherwise,
9591	 just mark that we cannot increment in place.  */
9592      if (post)
9593	op0 = copy_to_reg (op0);
9594      else
9595	bad_subreg = 1;
9596    }
9597
9598  op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9599		 && temp != get_last_insn ());
9600  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9601
9602  /* Decide whether incrementing or decrementing.  */
9603  if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9604      || TREE_CODE (exp) == PREDECREMENT_EXPR)
9605    this_optab = sub_optab;
9606
9607  /* Convert decrement by a constant into a negative increment.  */
9608  if (this_optab == sub_optab
9609      && GET_CODE (op1) == CONST_INT)
9610    {
9611      op1 = GEN_INT (-INTVAL (op1));
9612      this_optab = add_optab;
9613    }
9614
9615  if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9616    this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9617
9618  /* For a preincrement, see if we can do this with a single instruction.  */
9619  if (!post)
9620    {
9621      icode = (int) this_optab->handlers[(int) mode].insn_code;
9622      if (icode != (int) CODE_FOR_nothing
9623	  /* Make sure that OP0 is valid for operands 0 and 1
9624	     of the insn we want to queue.  */
9625	  && (*insn_data[icode].operand[0].predicate) (op0, mode)
9626	  && (*insn_data[icode].operand[1].predicate) (op0, mode)
9627	  && (*insn_data[icode].operand[2].predicate) (op1, mode))
9628	single_insn = 1;
9629    }
9630
9631  /* If OP0 is not the actual lvalue, but rather a copy in a register,
9632     then we cannot just increment OP0.  We must therefore contrive to
9633     increment the original value.  Then, for postincrement, we can return
9634     OP0 since it is a copy of the old value.  For preincrement, expand here
9635     unless we can do it with a single insn.
9636
9637     Likewise if storing directly into OP0 would clobber high bits
9638     we need to preserve (bad_subreg).  */
9639  if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9640    {
9641      /* This is the easiest way to increment the value wherever it is.
9642	 Problems with multiple evaluation of INCREMENTED are prevented
9643	 because either (1) it is a component_ref or preincrement,
9644	 in which case it was stabilized above, or (2) it is an array_ref
9645	 with constant index in an array in a register, which is
9646	 safe to reevaluate.  */
9647      tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9648			     || TREE_CODE (exp) == PREDECREMENT_EXPR)
9649			    ? MINUS_EXPR : PLUS_EXPR),
9650			   TREE_TYPE (exp),
9651			   incremented,
9652			   TREE_OPERAND (exp, 1));
9653
9654      while (TREE_CODE (incremented) == NOP_EXPR
9655	     || TREE_CODE (incremented) == CONVERT_EXPR)
9656	{
9657	  newexp = convert (TREE_TYPE (incremented), newexp);
9658	  incremented = TREE_OPERAND (incremented, 0);
9659	}
9660
9661      temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9662      return post ? op0 : temp;
9663    }
9664
9665  if (post)
9666    {
9667      /* We have a true reference to the value in OP0.
9668	 If there is an insn to add or subtract in this mode, queue it.
9669	 Queueing the increment insn avoids the register shuffling
9670	 that often results if we must increment now and first save
9671	 the old value for subsequent use.  */
9672
9673#if 0  /* Turned off to avoid making extra insn for indexed memref.  */
9674      op0 = stabilize (op0);
9675#endif
9676
9677      icode = (int) this_optab->handlers[(int) mode].insn_code;
9678      if (icode != (int) CODE_FOR_nothing
9679	  /* Make sure that OP0 is valid for operands 0 and 1
9680	     of the insn we want to queue.  */
9681	  && (*insn_data[icode].operand[0].predicate) (op0, mode)
9682	  && (*insn_data[icode].operand[1].predicate) (op0, mode))
9683	{
9684	  if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9685	    op1 = force_reg (mode, op1);
9686
9687	  return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9688	}
9689      if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9690	{
9691	  rtx addr = (general_operand (XEXP (op0, 0), mode)
9692		      ? force_reg (Pmode, XEXP (op0, 0))
9693		      : copy_to_reg (XEXP (op0, 0)));
9694	  rtx temp, result;
9695
9696	  op0 = replace_equiv_address (op0, addr);
9697	  temp = force_reg (GET_MODE (op0), op0);
9698	  if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9699	    op1 = force_reg (mode, op1);
9700
9701	  /* The increment queue is LIFO, thus we have to `queue'
9702	     the instructions in reverse order.  */
9703	  enqueue_insn (op0, gen_move_insn (op0, temp));
9704	  result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9705	  return result;
9706	}
9707    }
9708
9709  /* Preincrement, or we can't increment with one simple insn.  */
9710  if (post)
9711    /* Save a copy of the value before inc or dec, to return it later.  */
9712    temp = value = copy_to_reg (op0);
9713  else
9714    /* Arrange to return the incremented value.  */
9715    /* Copy the rtx because expand_binop will protect from the queue,
9716       and the results of that would be invalid for us to return
9717       if our caller does emit_queue before using our result.  */
9718    temp = copy_rtx (value = op0);
9719
9720  /* Increment however we can.  */
9721  op1 = expand_binop (mode, this_optab, value, op1, op0,
9722		      TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9723
9724  /* Make sure the value is stored into OP0.  */
9725  if (op1 != op0)
9726    emit_move_insn (op0, op1);
9727
9728  return temp;
9729}
9730
9731/* At the start of a function, record that we have no previously-pushed
9732   arguments waiting to be popped.  */
9733
9734void
9735init_pending_stack_adjust ()
9736{
9737  pending_stack_adjust = 0;
9738}
9739
9740/* When exiting from function, if safe, clear out any pending stack adjust
9741   so the adjustment won't get done.
9742
9743   Note, if the current function calls alloca, then it must have a
9744   frame pointer regardless of the value of flag_omit_frame_pointer.  */
9745
9746void
9747clear_pending_stack_adjust ()
9748{
9749#ifdef EXIT_IGNORE_STACK
9750  if (optimize > 0
9751      && (! flag_omit_frame_pointer || current_function_calls_alloca)
9752      && EXIT_IGNORE_STACK
9753      && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9754      && ! flag_inline_functions)
9755    {
9756      stack_pointer_delta -= pending_stack_adjust,
9757      pending_stack_adjust = 0;
9758    }
9759#endif
9760}
9761
9762/* Pop any previously-pushed arguments that have not been popped yet.  */
9763
9764void
9765do_pending_stack_adjust ()
9766{
9767  if (inhibit_defer_pop == 0)
9768    {
9769      if (pending_stack_adjust != 0)
9770	adjust_stack (GEN_INT (pending_stack_adjust));
9771      pending_stack_adjust = 0;
9772    }
9773}
9774
9775/* Expand conditional expressions.  */
9776
9777/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9778   LABEL is an rtx of code CODE_LABEL, in this function and all the
9779   functions here.  */
9780
9781void
9782jumpifnot (exp, label)
9783     tree exp;
9784     rtx label;
9785{
9786  do_jump (exp, label, NULL_RTX);
9787}
9788
9789/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero.  */
9790
9791void
9792jumpif (exp, label)
9793     tree exp;
9794     rtx label;
9795{
9796  do_jump (exp, NULL_RTX, label);
9797}
9798
9799/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9800   the result is zero, or IF_TRUE_LABEL if the result is one.
9801   Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9802   meaning fall through in that case.
9803
9804   do_jump always does any pending stack adjust except when it does not
9805   actually perform a jump.  An example where there is no jump
9806   is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9807
9808   This function is responsible for optimizing cases such as
9809   &&, || and comparison operators in EXP.  */
9810
9811void
9812do_jump (exp, if_false_label, if_true_label)
9813     tree exp;
9814     rtx if_false_label, if_true_label;
9815{
9816  enum tree_code code = TREE_CODE (exp);
9817  /* Some cases need to create a label to jump to
9818     in order to properly fall through.
9819     These cases set DROP_THROUGH_LABEL nonzero.  */
9820  rtx drop_through_label = 0;
9821  rtx temp;
9822  int i;
9823  tree type;
9824  enum machine_mode mode;
9825
9826#ifdef MAX_INTEGER_COMPUTATION_MODE
9827  check_max_integer_computation_mode (exp);
9828#endif
9829
9830  emit_queue ();
9831
9832  switch (code)
9833    {
9834    case ERROR_MARK:
9835      break;
9836
9837    case INTEGER_CST:
9838      temp = integer_zerop (exp) ? if_false_label : if_true_label;
9839      if (temp)
9840	emit_jump (temp);
9841      break;
9842
9843#if 0
9844      /* This is not true with #pragma weak  */
9845    case ADDR_EXPR:
9846      /* The address of something can never be zero.  */
9847      if (if_true_label)
9848	emit_jump (if_true_label);
9849      break;
9850#endif
9851
9852    case UNSAVE_EXPR:
9853      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9854      TREE_OPERAND (exp, 0)
9855	= (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
9856      break;
9857
9858    case NOP_EXPR:
9859      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9860	  || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9861	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9862	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9863	goto normal;
9864    case CONVERT_EXPR:
9865      /* If we are narrowing the operand, we have to do the compare in the
9866	 narrower mode.  */
9867      if ((TYPE_PRECISION (TREE_TYPE (exp))
9868	   < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9869	goto normal;
9870    case NON_LVALUE_EXPR:
9871    case REFERENCE_EXPR:
9872    case ABS_EXPR:
9873    case NEGATE_EXPR:
9874    case LROTATE_EXPR:
9875    case RROTATE_EXPR:
9876      /* These cannot change zero->nonzero or vice versa.  */
9877      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9878      break;
9879
9880    case WITH_RECORD_EXPR:
9881      /* Put the object on the placeholder list, recurse through our first
9882	 operand, and pop the list.  */
9883      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9884				    placeholder_list);
9885      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9886      placeholder_list = TREE_CHAIN (placeholder_list);
9887      break;
9888
9889#if 0
9890      /* This is never less insns than evaluating the PLUS_EXPR followed by
9891	 a test and can be longer if the test is eliminated.  */
9892    case PLUS_EXPR:
9893      /* Reduce to minus.  */
9894      exp = build (MINUS_EXPR, TREE_TYPE (exp),
9895		   TREE_OPERAND (exp, 0),
9896		   fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9897				 TREE_OPERAND (exp, 1))));
9898      /* Process as MINUS.  */
9899#endif
9900
9901    case MINUS_EXPR:
9902      /* Nonzero iff operands of minus differ.  */
9903      do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9904				  TREE_OPERAND (exp, 0),
9905				  TREE_OPERAND (exp, 1)),
9906			   NE, NE, if_false_label, if_true_label);
9907      break;
9908
9909    case BIT_AND_EXPR:
9910      /* If we are AND'ing with a small constant, do this comparison in the
9911	 smallest type that fits.  If the machine doesn't have comparisons
9912	 that small, it will be converted back to the wider comparison.
9913	 This helps if we are testing the sign bit of a narrower object.
9914	 combine can't do this for us because it can't know whether a
9915	 ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */
9916
9917      if (! SLOW_BYTE_ACCESS
9918	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9919	  && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9920	  && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9921	  && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9922	  && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9923	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9924	  && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9925	      != CODE_FOR_nothing))
9926	{
9927	  do_jump (convert (type, exp), if_false_label, if_true_label);
9928	  break;
9929	}
9930      goto normal;
9931
9932    case TRUTH_NOT_EXPR:
9933      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9934      break;
9935
9936    case TRUTH_ANDIF_EXPR:
9937      if (if_false_label == 0)
9938	if_false_label = drop_through_label = gen_label_rtx ();
9939      do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9940      start_cleanup_deferral ();
9941      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9942      end_cleanup_deferral ();
9943      break;
9944
9945    case TRUTH_ORIF_EXPR:
9946      if (if_true_label == 0)
9947	if_true_label = drop_through_label = gen_label_rtx ();
9948      do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9949      start_cleanup_deferral ();
9950      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9951      end_cleanup_deferral ();
9952      break;
9953
9954    case COMPOUND_EXPR:
9955      push_temp_slots ();
9956      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9957      preserve_temp_slots (NULL_RTX);
9958      free_temp_slots ();
9959      pop_temp_slots ();
9960      emit_queue ();
9961      do_pending_stack_adjust ();
9962      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9963      break;
9964
9965    case COMPONENT_REF:
9966    case BIT_FIELD_REF:
9967    case ARRAY_REF:
9968    case ARRAY_RANGE_REF:
9969      {
9970	HOST_WIDE_INT bitsize, bitpos;
9971	int unsignedp;
9972	enum machine_mode mode;
9973	tree type;
9974	tree offset;
9975	int volatilep = 0;
9976
9977	/* Get description of this reference.  We don't actually care
9978	   about the underlying object here.  */
9979	get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9980			     &unsignedp, &volatilep);
9981
9982	type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9983	if (! SLOW_BYTE_ACCESS
9984	    && type != 0 && bitsize >= 0
9985	    && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9986	    && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9987		!= CODE_FOR_nothing))
9988	  {
9989	    do_jump (convert (type, exp), if_false_label, if_true_label);
9990	    break;
9991	  }
9992	goto normal;
9993      }
9994
9995    case COND_EXPR:
9996      /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases.  */
9997      if (integer_onep (TREE_OPERAND (exp, 1))
9998	  && integer_zerop (TREE_OPERAND (exp, 2)))
9999	do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10000
10001      else if (integer_zerop (TREE_OPERAND (exp, 1))
10002	       && integer_onep (TREE_OPERAND (exp, 2)))
10003	do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10004
10005      else
10006	{
10007	  rtx label1 = gen_label_rtx ();
10008	  drop_through_label = gen_label_rtx ();
10009
10010	  do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10011
10012	  start_cleanup_deferral ();
10013	  /* Now the THEN-expression.  */
10014	  do_jump (TREE_OPERAND (exp, 1),
10015		   if_false_label ? if_false_label : drop_through_label,
10016		   if_true_label ? if_true_label : drop_through_label);
10017	  /* In case the do_jump just above never jumps.  */
10018	  do_pending_stack_adjust ();
10019	  emit_label (label1);
10020
10021	  /* Now the ELSE-expression.  */
10022	  do_jump (TREE_OPERAND (exp, 2),
10023		   if_false_label ? if_false_label : drop_through_label,
10024		   if_true_label ? if_true_label : drop_through_label);
10025	  end_cleanup_deferral ();
10026	}
10027      break;
10028
10029    case EQ_EXPR:
10030      {
10031	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10032
10033	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10034	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10035	  {
10036	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10037	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10038	    do_jump
10039	      (fold
10040	       (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10041		       fold (build (EQ_EXPR, TREE_TYPE (exp),
10042				    fold (build1 (REALPART_EXPR,
10043						  TREE_TYPE (inner_type),
10044						  exp0)),
10045				    fold (build1 (REALPART_EXPR,
10046						  TREE_TYPE (inner_type),
10047						  exp1)))),
10048		       fold (build (EQ_EXPR, TREE_TYPE (exp),
10049				    fold (build1 (IMAGPART_EXPR,
10050						  TREE_TYPE (inner_type),
10051						  exp0)),
10052				    fold (build1 (IMAGPART_EXPR,
10053						  TREE_TYPE (inner_type),
10054						  exp1)))))),
10055	       if_false_label, if_true_label);
10056	  }
10057
10058	else if (integer_zerop (TREE_OPERAND (exp, 1)))
10059	  do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10060
10061	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10062		 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
10063	  do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10064	else
10065	  do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
10066	break;
10067      }
10068
10069    case NE_EXPR:
10070      {
10071	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10072
10073	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10074	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10075	  {
10076	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10077	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10078	    do_jump
10079	      (fold
10080	       (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10081		       fold (build (NE_EXPR, TREE_TYPE (exp),
10082				    fold (build1 (REALPART_EXPR,
10083						  TREE_TYPE (inner_type),
10084						  exp0)),
10085				    fold (build1 (REALPART_EXPR,
10086						  TREE_TYPE (inner_type),
10087						  exp1)))),
10088		       fold (build (NE_EXPR, TREE_TYPE (exp),
10089				    fold (build1 (IMAGPART_EXPR,
10090						  TREE_TYPE (inner_type),
10091						  exp0)),
10092				    fold (build1 (IMAGPART_EXPR,
10093						  TREE_TYPE (inner_type),
10094						  exp1)))))),
10095	       if_false_label, if_true_label);
10096	  }
10097
10098	else if (integer_zerop (TREE_OPERAND (exp, 1)))
10099	  do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10100
10101	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10102		 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
10103	  do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10104	else
10105	  do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
10106	break;
10107      }
10108
10109    case LT_EXPR:
10110      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10111      if (GET_MODE_CLASS (mode) == MODE_INT
10112	  && ! can_compare_p (LT, mode, ccp_jump))
10113	do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10114      else
10115	do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
10116      break;
10117
10118    case LE_EXPR:
10119      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10120      if (GET_MODE_CLASS (mode) == MODE_INT
10121	  && ! can_compare_p (LE, mode, ccp_jump))
10122	do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10123      else
10124	do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
10125      break;
10126
10127    case GT_EXPR:
10128      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10129      if (GET_MODE_CLASS (mode) == MODE_INT
10130	  && ! can_compare_p (GT, mode, ccp_jump))
10131	do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10132      else
10133	do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
10134      break;
10135
10136    case GE_EXPR:
10137      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10138      if (GET_MODE_CLASS (mode) == MODE_INT
10139	  && ! can_compare_p (GE, mode, ccp_jump))
10140	do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10141      else
10142	do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
10143      break;
10144
10145    case UNORDERED_EXPR:
10146    case ORDERED_EXPR:
10147      {
10148	enum rtx_code cmp, rcmp;
10149	int do_rev;
10150
10151	if (code == UNORDERED_EXPR)
10152	  cmp = UNORDERED, rcmp = ORDERED;
10153	else
10154	  cmp = ORDERED, rcmp = UNORDERED;
10155	mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10156
10157	do_rev = 0;
10158	if (! can_compare_p (cmp, mode, ccp_jump)
10159	    && (can_compare_p (rcmp, mode, ccp_jump)
10160		/* If the target doesn't provide either UNORDERED or ORDERED
10161		   comparisons, canonicalize on UNORDERED for the library.  */
10162		|| rcmp == UNORDERED))
10163	  do_rev = 1;
10164
10165	if (! do_rev)
10166	  do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
10167	else
10168	  do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
10169      }
10170      break;
10171
10172    {
10173      enum rtx_code rcode1;
10174      enum tree_code tcode2;
10175
10176      case UNLT_EXPR:
10177	rcode1 = UNLT;
10178	tcode2 = LT_EXPR;
10179	goto unordered_bcc;
10180      case UNLE_EXPR:
10181	rcode1 = UNLE;
10182	tcode2 = LE_EXPR;
10183	goto unordered_bcc;
10184      case UNGT_EXPR:
10185	rcode1 = UNGT;
10186	tcode2 = GT_EXPR;
10187	goto unordered_bcc;
10188      case UNGE_EXPR:
10189	rcode1 = UNGE;
10190	tcode2 = GE_EXPR;
10191	goto unordered_bcc;
10192      case UNEQ_EXPR:
10193	rcode1 = UNEQ;
10194	tcode2 = EQ_EXPR;
10195	goto unordered_bcc;
10196
10197      unordered_bcc:
10198	mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10199	if (can_compare_p (rcode1, mode, ccp_jump))
10200	  do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
10201			       if_true_label);
10202	else
10203	  {
10204	    tree op0 = save_expr (TREE_OPERAND (exp, 0));
10205	    tree op1 = save_expr (TREE_OPERAND (exp, 1));
10206	    tree cmp0, cmp1;
10207
10208	    /* If the target doesn't support combined unordered
10209	       compares, decompose into UNORDERED + comparison.  */
10210	    cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
10211	    cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
10212	    exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
10213	    do_jump (exp, if_false_label, if_true_label);
10214	  }
10215      }
10216      break;
10217
10218      /* Special case:
10219		__builtin_expect (<test>, 0)	and
10220		__builtin_expect (<test>, 1)
10221
10222	 We need to do this here, so that <test> is not converted to a SCC
10223	 operation on machines that use condition code registers and COMPARE
10224	 like the PowerPC, and then the jump is done based on whether the SCC
10225	 operation produced a 1 or 0.  */
10226    case CALL_EXPR:
10227      /* Check for a built-in function.  */
10228      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
10229	{
10230	  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10231	  tree arglist = TREE_OPERAND (exp, 1);
10232
10233	  if (TREE_CODE (fndecl) == FUNCTION_DECL
10234	      && DECL_BUILT_IN (fndecl)
10235	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10236	      && arglist != NULL_TREE
10237	      && TREE_CHAIN (arglist) != NULL_TREE)
10238	    {
10239	      rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10240						    if_true_label);
10241
10242	      if (seq != NULL_RTX)
10243		{
10244		  emit_insn (seq);
10245		  return;
10246		}
10247	    }
10248	}
10249      /* fall through and generate the normal code.  */
10250
10251    default:
10252    normal:
10253      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10254#if 0
10255      /* This is not needed any more and causes poor code since it causes
10256	 comparisons and tests from non-SI objects to have different code
10257	 sequences.  */
10258      /* Copy to register to avoid generating bad insns by cse
10259	 from (set (mem ...) (arithop))  (set (cc0) (mem ...)).  */
10260      if (!cse_not_expected && GET_CODE (temp) == MEM)
10261	temp = copy_to_reg (temp);
10262#endif
10263      do_pending_stack_adjust ();
10264      /* Do any postincrements in the expression that was tested.  */
10265      emit_queue ();
10266
10267      if (GET_CODE (temp) == CONST_INT
10268	  || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10269	  || GET_CODE (temp) == LABEL_REF)
10270	{
10271	  rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10272	  if (target)
10273	    emit_jump (target);
10274	}
10275      else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10276	       && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10277	/* Note swapping the labels gives us not-equal.  */
10278	do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10279      else if (GET_MODE (temp) != VOIDmode)
10280	do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10281				 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10282				 GET_MODE (temp), NULL_RTX,
10283				 if_false_label, if_true_label);
10284      else
10285	abort ();
10286    }
10287
10288  if (drop_through_label)
10289    {
10290      /* If do_jump produces code that might be jumped around,
10291	 do any stack adjusts from that code, before the place
10292	 where control merges in.  */
10293      do_pending_stack_adjust ();
10294      emit_label (drop_through_label);
10295    }
10296}
10297
10298/* Given a comparison expression EXP for values too wide to be compared
10299   with one insn, test the comparison and jump to the appropriate label.
10300   The code of EXP is ignored; we always test GT if SWAP is 0,
10301   and LT if SWAP is 1.  */
10302
10303static void
10304do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10305     tree exp;
10306     int swap;
10307     rtx if_false_label, if_true_label;
10308{
10309  rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10310  rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10311  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10312  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10313
10314  do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10315}
10316
10317/* Compare OP0 with OP1, word at a time, in mode MODE.
10318   UNSIGNEDP says to do unsigned comparison.
10319   Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise.  */
10320
10321void
10322do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10323     enum machine_mode mode;
10324     int unsignedp;
10325     rtx op0, op1;
10326     rtx if_false_label, if_true_label;
10327{
10328  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10329  rtx drop_through_label = 0;
10330  int i;
10331
10332  if (! if_true_label || ! if_false_label)
10333    drop_through_label = gen_label_rtx ();
10334  if (! if_true_label)
10335    if_true_label = drop_through_label;
10336  if (! if_false_label)
10337    if_false_label = drop_through_label;
10338
10339  /* Compare a word at a time, high order first.  */
10340  for (i = 0; i < nwords; i++)
10341    {
10342      rtx op0_word, op1_word;
10343
10344      if (WORDS_BIG_ENDIAN)
10345	{
10346	  op0_word = operand_subword_force (op0, i, mode);
10347	  op1_word = operand_subword_force (op1, i, mode);
10348	}
10349      else
10350	{
10351	  op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10352	  op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10353	}
10354
10355      /* All but high-order word must be compared as unsigned.  */
10356      do_compare_rtx_and_jump (op0_word, op1_word, GT,
10357			       (unsignedp || i > 0), word_mode, NULL_RTX,
10358			       NULL_RTX, if_true_label);
10359
10360      /* Consider lower words only if these are equal.  */
10361      do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10362			       NULL_RTX, NULL_RTX, if_false_label);
10363    }
10364
10365  if (if_false_label)
10366    emit_jump (if_false_label);
10367  if (drop_through_label)
10368    emit_label (drop_through_label);
10369}
10370
10371/* Given an EQ_EXPR expression EXP for values too wide to be compared
10372   with one insn, test the comparison and jump to the appropriate label.  */
10373
10374static void
10375do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10376     tree exp;
10377     rtx if_false_label, if_true_label;
10378{
10379  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10380  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10381  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10382  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10383  int i;
10384  rtx drop_through_label = 0;
10385
10386  if (! if_false_label)
10387    drop_through_label = if_false_label = gen_label_rtx ();
10388
10389  for (i = 0; i < nwords; i++)
10390    do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10391			     operand_subword_force (op1, i, mode),
10392			     EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10393			     word_mode, NULL_RTX, if_false_label, NULL_RTX);
10394
10395  if (if_true_label)
10396    emit_jump (if_true_label);
10397  if (drop_through_label)
10398    emit_label (drop_through_label);
10399}
10400
10401/* Jump according to whether OP0 is 0.
10402   We assume that OP0 has an integer mode that is too wide
10403   for the available compare insns.  */
10404
10405void
10406do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10407     rtx op0;
10408     rtx if_false_label, if_true_label;
10409{
10410  int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10411  rtx part;
10412  int i;
10413  rtx drop_through_label = 0;
10414
10415  /* The fastest way of doing this comparison on almost any machine is to
10416     "or" all the words and compare the result.  If all have to be loaded
10417     from memory and this is a very wide item, it's possible this may
10418     be slower, but that's highly unlikely.  */
10419
10420  part = gen_reg_rtx (word_mode);
10421  emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10422  for (i = 1; i < nwords && part != 0; i++)
10423    part = expand_binop (word_mode, ior_optab, part,
10424			 operand_subword_force (op0, i, GET_MODE (op0)),
10425			 part, 1, OPTAB_WIDEN);
10426
10427  if (part != 0)
10428    {
10429      do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10430			       NULL_RTX, if_false_label, if_true_label);
10431
10432      return;
10433    }
10434
10435  /* If we couldn't do the "or" simply, do this with a series of compares.  */
10436  if (! if_false_label)
10437    drop_through_label = if_false_label = gen_label_rtx ();
10438
10439  for (i = 0; i < nwords; i++)
10440    do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10441			     const0_rtx, EQ, 1, word_mode, NULL_RTX,
10442			     if_false_label, NULL_RTX);
10443
10444  if (if_true_label)
10445    emit_jump (if_true_label);
10446
10447  if (drop_through_label)
10448    emit_label (drop_through_label);
10449}
10450
10451/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10452   (including code to compute the values to be compared)
10453   and set (CC0) according to the result.
10454   The decision as to signed or unsigned comparison must be made by the caller.
10455
10456   We force a stack adjustment unless there are currently
10457   things pushed on the stack that aren't yet used.
10458
10459   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10460   compared.  */
10461
10462rtx
10463compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10464     rtx op0, op1;
10465     enum rtx_code code;
10466     int unsignedp;
10467     enum machine_mode mode;
10468     rtx size;
10469{
10470  enum rtx_code ucode;
10471  rtx tem;
10472
10473  /* If one operand is constant, make it the second one.  Only do this
10474     if the other operand is not constant as well.  */
10475
10476  if (swap_commutative_operands_p (op0, op1))
10477    {
10478      tem = op0;
10479      op0 = op1;
10480      op1 = tem;
10481      code = swap_condition (code);
10482    }
10483
10484  if (flag_force_mem)
10485    {
10486      op0 = force_not_mem (op0);
10487      op1 = force_not_mem (op1);
10488    }
10489
10490  do_pending_stack_adjust ();
10491
10492  ucode = unsignedp ? unsigned_condition (code) : code;
10493  if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10494    return tem;
10495
10496#if 0
10497  /* There's no need to do this now that combine.c can eliminate lots of
10498     sign extensions.  This can be less efficient in certain cases on other
10499     machines.  */
10500
10501  /* If this is a signed equality comparison, we can do it as an
10502     unsigned comparison since zero-extension is cheaper than sign
10503     extension and comparisons with zero are done as unsigned.  This is
10504     the case even on machines that can do fast sign extension, since
10505     zero-extension is easier to combine with other operations than
10506     sign-extension is.  If we are comparing against a constant, we must
10507     convert it to what it would look like unsigned.  */
10508  if ((code == EQ || code == NE) && ! unsignedp
10509      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10510    {
10511      if (GET_CODE (op1) == CONST_INT
10512	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10513	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10514      unsignedp = 1;
10515    }
10516#endif
10517
10518  emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10519
10520#if HAVE_cc0
10521  return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10522#else
10523  return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10524#endif
10525}
10526
10527/* Like do_compare_and_jump but expects the values to compare as two rtx's.
10528   The decision as to signed or unsigned comparison must be made by the caller.
10529
10530   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10531   compared.  */
10532
10533void
10534do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10535			 if_false_label, if_true_label)
10536     rtx op0, op1;
10537     enum rtx_code code;
10538     int unsignedp;
10539     enum machine_mode mode;
10540     rtx size;
10541     rtx if_false_label, if_true_label;
10542{
10543  enum rtx_code ucode;
10544  rtx tem;
10545  int dummy_true_label = 0;
10546
10547  /* Reverse the comparison if that is safe and we want to jump if it is
10548     false.  */
10549  if (! if_true_label && ! FLOAT_MODE_P (mode))
10550    {
10551      if_true_label = if_false_label;
10552      if_false_label = 0;
10553      code = reverse_condition (code);
10554    }
10555
10556  /* If one operand is constant, make it the second one.  Only do this
10557     if the other operand is not constant as well.  */
10558
10559  if (swap_commutative_operands_p (op0, op1))
10560    {
10561      tem = op0;
10562      op0 = op1;
10563      op1 = tem;
10564      code = swap_condition (code);
10565    }
10566
10567  if (flag_force_mem)
10568    {
10569      op0 = force_not_mem (op0);
10570      op1 = force_not_mem (op1);
10571    }
10572
10573  do_pending_stack_adjust ();
10574
10575  ucode = unsignedp ? unsigned_condition (code) : code;
10576  if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10577    {
10578      if (tem == const_true_rtx)
10579	{
10580	  if (if_true_label)
10581	    emit_jump (if_true_label);
10582	}
10583      else
10584	{
10585	  if (if_false_label)
10586	    emit_jump (if_false_label);
10587	}
10588      return;
10589    }
10590
10591#if 0
10592  /* There's no need to do this now that combine.c can eliminate lots of
10593     sign extensions.  This can be less efficient in certain cases on other
10594     machines.  */
10595
10596  /* If this is a signed equality comparison, we can do it as an
10597     unsigned comparison since zero-extension is cheaper than sign
10598     extension and comparisons with zero are done as unsigned.  This is
10599     the case even on machines that can do fast sign extension, since
10600     zero-extension is easier to combine with other operations than
10601     sign-extension is.  If we are comparing against a constant, we must
10602     convert it to what it would look like unsigned.  */
10603  if ((code == EQ || code == NE) && ! unsignedp
10604      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10605    {
10606      if (GET_CODE (op1) == CONST_INT
10607	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10608	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10609      unsignedp = 1;
10610    }
10611#endif
10612
10613  if (! if_true_label)
10614    {
10615      dummy_true_label = 1;
10616      if_true_label = gen_label_rtx ();
10617    }
10618
10619  emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10620			   if_true_label);
10621
10622  if (if_false_label)
10623    emit_jump (if_false_label);
10624  if (dummy_true_label)
10625    emit_label (if_true_label);
10626}
10627
10628/* Generate code for a comparison expression EXP (including code to compute
10629   the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10630   IF_TRUE_LABEL.  One of the labels can be NULL_RTX, in which case the
10631   generated code will drop through.
10632   SIGNED_CODE should be the rtx operation for this comparison for
10633   signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10634
10635   We force a stack adjustment unless there are currently
10636   things pushed on the stack that aren't yet used.  */
10637
10638static void
10639do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10640		     if_true_label)
10641     tree exp;
10642     enum rtx_code signed_code, unsigned_code;
10643     rtx if_false_label, if_true_label;
10644{
10645  rtx op0, op1;
10646  tree type;
10647  enum machine_mode mode;
10648  int unsignedp;
10649  enum rtx_code code;
10650
10651  /* Don't crash if the comparison was erroneous.  */
10652  op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10653  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10654    return;
10655
10656  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10657  if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10658    return;
10659
10660  type = TREE_TYPE (TREE_OPERAND (exp, 0));
10661  mode = TYPE_MODE (type);
10662  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10663      && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10664	  || (GET_MODE_BITSIZE (mode)
10665	      > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10666								      1)))))))
10667    {
10668      /* op0 might have been replaced by promoted constant, in which
10669	 case the type of second argument should be used.  */
10670      type = TREE_TYPE (TREE_OPERAND (exp, 1));
10671      mode = TYPE_MODE (type);
10672    }
10673  unsignedp = TREE_UNSIGNED (type);
10674  code = unsignedp ? unsigned_code : signed_code;
10675
10676#ifdef HAVE_canonicalize_funcptr_for_compare
10677  /* If function pointers need to be "canonicalized" before they can
10678     be reliably compared, then canonicalize them.  */
10679  if (HAVE_canonicalize_funcptr_for_compare
10680      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10681      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10682	  == FUNCTION_TYPE))
10683    {
10684      rtx new_op0 = gen_reg_rtx (mode);
10685
10686      emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10687      op0 = new_op0;
10688    }
10689
10690  if (HAVE_canonicalize_funcptr_for_compare
10691      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10692      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10693	  == FUNCTION_TYPE))
10694    {
10695      rtx new_op1 = gen_reg_rtx (mode);
10696
10697      emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10698      op1 = new_op1;
10699    }
10700#endif
10701
10702  /* Do any postincrements in the expression that was tested.  */
10703  emit_queue ();
10704
10705  do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10706			   ((mode == BLKmode)
10707			    ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10708			   if_false_label, if_true_label);
10709}
10710
10711/* Generate code to calculate EXP using a store-flag instruction
10712   and return an rtx for the result.  EXP is either a comparison
10713   or a TRUTH_NOT_EXPR whose operand is a comparison.
10714
10715   If TARGET is nonzero, store the result there if convenient.
10716
10717   If ONLY_CHEAP is nonzero, only do this if it is likely to be very
10718   cheap.
10719
10720   Return zero if there is no suitable set-flag instruction
10721   available on this machine.
10722
10723   Once expand_expr has been called on the arguments of the comparison,
10724   we are committed to doing the store flag, since it is not safe to
10725   re-evaluate the expression.  We emit the store-flag insn by calling
10726   emit_store_flag, but only expand the arguments if we have a reason
10727   to believe that emit_store_flag will be successful.  If we think that
10728   it will, but it isn't, we have to simulate the store-flag with a
10729   set/jump/set sequence.  */
10730
10731static rtx
10732do_store_flag (exp, target, mode, only_cheap)
10733     tree exp;
10734     rtx target;
10735     enum machine_mode mode;
10736     int only_cheap;
10737{
10738  enum rtx_code code;
10739  tree arg0, arg1, type;
10740  tree tem;
10741  enum machine_mode operand_mode;
10742  int invert = 0;
10743  int unsignedp;
10744  rtx op0, op1;
10745  enum insn_code icode;
10746  rtx subtarget = target;
10747  rtx result, label;
10748
10749  /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10750     result at the end.  We can't simply invert the test since it would
10751     have already been inverted if it were valid.  This case occurs for
10752     some floating-point comparisons.  */
10753
10754  if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10755    invert = 1, exp = TREE_OPERAND (exp, 0);
10756
10757  arg0 = TREE_OPERAND (exp, 0);
10758  arg1 = TREE_OPERAND (exp, 1);
10759
10760  /* Don't crash if the comparison was erroneous.  */
10761  if (arg0 == error_mark_node || arg1 == error_mark_node)
10762    return const0_rtx;
10763
10764  type = TREE_TYPE (arg0);
10765  operand_mode = TYPE_MODE (type);
10766  unsignedp = TREE_UNSIGNED (type);
10767
10768  /* We won't bother with BLKmode store-flag operations because it would mean
10769     passing a lot of information to emit_store_flag.  */
10770  if (operand_mode == BLKmode)
10771    return 0;
10772
10773  /* We won't bother with store-flag operations involving function pointers
10774     when function pointers must be canonicalized before comparisons.  */
10775#ifdef HAVE_canonicalize_funcptr_for_compare
10776  if (HAVE_canonicalize_funcptr_for_compare
10777      && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10778	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10779	       == FUNCTION_TYPE))
10780	  || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10781	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10782		  == FUNCTION_TYPE))))
10783    return 0;
10784#endif
10785
10786  STRIP_NOPS (arg0);
10787  STRIP_NOPS (arg1);
10788
10789  /* Get the rtx comparison code to use.  We know that EXP is a comparison
10790     operation of some type.  Some comparisons against 1 and -1 can be
10791     converted to comparisons with zero.  Do so here so that the tests
10792     below will be aware that we have a comparison with zero.   These
10793     tests will not catch constants in the first operand, but constants
10794     are rarely passed as the first operand.  */
10795
10796  switch (TREE_CODE (exp))
10797    {
10798    case EQ_EXPR:
10799      code = EQ;
10800      break;
10801    case NE_EXPR:
10802      code = NE;
10803      break;
10804    case LT_EXPR:
10805      if (integer_onep (arg1))
10806	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10807      else
10808	code = unsignedp ? LTU : LT;
10809      break;
10810    case LE_EXPR:
10811      if (! unsignedp && integer_all_onesp (arg1))
10812	arg1 = integer_zero_node, code = LT;
10813      else
10814	code = unsignedp ? LEU : LE;
10815      break;
10816    case GT_EXPR:
10817      if (! unsignedp && integer_all_onesp (arg1))
10818	arg1 = integer_zero_node, code = GE;
10819      else
10820	code = unsignedp ? GTU : GT;
10821      break;
10822    case GE_EXPR:
10823      if (integer_onep (arg1))
10824	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10825      else
10826	code = unsignedp ? GEU : GE;
10827      break;
10828
10829    case UNORDERED_EXPR:
10830      code = UNORDERED;
10831      break;
10832    case ORDERED_EXPR:
10833      code = ORDERED;
10834      break;
10835    case UNLT_EXPR:
10836      code = UNLT;
10837      break;
10838    case UNLE_EXPR:
10839      code = UNLE;
10840      break;
10841    case UNGT_EXPR:
10842      code = UNGT;
10843      break;
10844    case UNGE_EXPR:
10845      code = UNGE;
10846      break;
10847    case UNEQ_EXPR:
10848      code = UNEQ;
10849      break;
10850
10851    default:
10852      abort ();
10853    }
10854
10855  /* Put a constant second.  */
10856  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10857    {
10858      tem = arg0; arg0 = arg1; arg1 = tem;
10859      code = swap_condition (code);
10860    }
10861
10862  /* If this is an equality or inequality test of a single bit, we can
10863     do this by shifting the bit being tested to the low-order bit and
10864     masking the result with the constant 1.  If the condition was EQ,
10865     we xor it with 1.  This does not require an scc insn and is faster
10866     than an scc insn even if we have it.  */
10867
10868  if ((code == NE || code == EQ)
10869      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10870      && integer_pow2p (TREE_OPERAND (arg0, 1)))
10871    {
10872      tree inner = TREE_OPERAND (arg0, 0);
10873      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10874      int ops_unsignedp;
10875
10876      /* If INNER is a right shift of a constant and it plus BITNUM does
10877	 not overflow, adjust BITNUM and INNER.  */
10878
10879      if (TREE_CODE (inner) == RSHIFT_EXPR
10880	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10881	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10882	  && bitnum < TYPE_PRECISION (type)
10883	  && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10884				   bitnum - TYPE_PRECISION (type)))
10885	{
10886	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10887	  inner = TREE_OPERAND (inner, 0);
10888	}
10889
10890      /* If we are going to be able to omit the AND below, we must do our
10891	 operations as unsigned.  If we must use the AND, we have a choice.
10892	 Normally unsigned is faster, but for some machines signed is.  */
10893      ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10894#ifdef LOAD_EXTEND_OP
10895		       : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10896#else
10897		       : 1
10898#endif
10899		       );
10900
10901      if (! get_subtarget (subtarget)
10902	  || GET_MODE (subtarget) != operand_mode
10903	  || ! safe_from_p (subtarget, inner, 1))
10904	subtarget = 0;
10905
10906      op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10907
10908      if (bitnum != 0)
10909	op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10910			    size_int (bitnum), subtarget, ops_unsignedp);
10911
10912      if (GET_MODE (op0) != mode)
10913	op0 = convert_to_mode (mode, op0, ops_unsignedp);
10914
10915      if ((code == EQ && ! invert) || (code == NE && invert))
10916	op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10917			    ops_unsignedp, OPTAB_LIB_WIDEN);
10918
10919      /* Put the AND last so it can combine with more things.  */
10920      if (bitnum != TYPE_PRECISION (type) - 1)
10921	op0 = expand_and (mode, op0, const1_rtx, subtarget);
10922
10923      return op0;
10924    }
10925
10926  /* Now see if we are likely to be able to do this.  Return if not.  */
10927  if (! can_compare_p (code, operand_mode, ccp_store_flag))
10928    return 0;
10929
10930  icode = setcc_gen_code[(int) code];
10931  if (icode == CODE_FOR_nothing
10932      || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10933    {
10934      /* We can only do this if it is one of the special cases that
10935	 can be handled without an scc insn.  */
10936      if ((code == LT && integer_zerop (arg1))
10937	  || (! only_cheap && code == GE && integer_zerop (arg1)))
10938	;
10939      else if (BRANCH_COST >= 0
10940	       && ! only_cheap && (code == NE || code == EQ)
10941	       && TREE_CODE (type) != REAL_TYPE
10942	       && ((abs_optab->handlers[(int) operand_mode].insn_code
10943		    != CODE_FOR_nothing)
10944		   || (ffs_optab->handlers[(int) operand_mode].insn_code
10945		       != CODE_FOR_nothing)))
10946	;
10947      else
10948	return 0;
10949    }
10950
10951  if (! get_subtarget (target)
10952      || GET_MODE (subtarget) != operand_mode
10953      || ! safe_from_p (subtarget, arg1, 1))
10954    subtarget = 0;
10955
10956  op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10957  op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10958
10959  if (target == 0)
10960    target = gen_reg_rtx (mode);
10961
10962  /* Pass copies of OP0 and OP1 in case they contain a QUEUED.  This is safe
10963     because, if the emit_store_flag does anything it will succeed and
10964     OP0 and OP1 will not be used subsequently.  */
10965
10966  result = emit_store_flag (target, code,
10967			    queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10968			    queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10969			    operand_mode, unsignedp, 1);
10970
10971  if (result)
10972    {
10973      if (invert)
10974	result = expand_binop (mode, xor_optab, result, const1_rtx,
10975			       result, 0, OPTAB_LIB_WIDEN);
10976      return result;
10977    }
10978
10979  /* If this failed, we have to do this with set/compare/jump/set code.  */
10980  if (GET_CODE (target) != REG
10981      || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10982    target = gen_reg_rtx (GET_MODE (target));
10983
10984  emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10985  result = compare_from_rtx (op0, op1, code, unsignedp,
10986			     operand_mode, NULL_RTX);
10987  if (GET_CODE (result) == CONST_INT)
10988    return (((result == const0_rtx && ! invert)
10989	     || (result != const0_rtx && invert))
10990	    ? const0_rtx : const1_rtx);
10991
10992  /* The code of RESULT may not match CODE if compare_from_rtx
10993     decided to swap its operands and reverse the original code.
10994
10995     We know that compare_from_rtx returns either a CONST_INT or
10996     a new comparison code, so it is safe to just extract the
10997     code from RESULT.  */
10998  code = GET_CODE (result);
10999
11000  label = gen_label_rtx ();
11001  if (bcc_gen_fctn[(int) code] == 0)
11002    abort ();
11003
11004  emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11005  emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11006  emit_label (label);
11007
11008  return target;
11009}
11010
11011
11012/* Stubs in case we haven't got a casesi insn.  */
11013#ifndef HAVE_casesi
11014# define HAVE_casesi 0
11015# define gen_casesi(a, b, c, d, e) (0)
11016# define CODE_FOR_casesi CODE_FOR_nothing
11017#endif
11018
11019/* If the machine does not have a case insn that compares the bounds,
11020   this means extra overhead for dispatch tables, which raises the
11021   threshold for using them.  */
11022#ifndef CASE_VALUES_THRESHOLD
11023#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
11024#endif /* CASE_VALUES_THRESHOLD */
11025
11026unsigned int
11027case_values_threshold ()
11028{
11029  return CASE_VALUES_THRESHOLD;
11030}
11031
11032/* Attempt to generate a casesi instruction.  Returns 1 if successful,
11033   0 otherwise (i.e. if there is no casesi instruction).  */
11034int
11035try_casesi (index_type, index_expr, minval, range,
11036	    table_label, default_label)
11037     tree index_type, index_expr, minval, range;
11038     rtx table_label ATTRIBUTE_UNUSED;
11039     rtx default_label;
11040{
11041  enum machine_mode index_mode = SImode;
11042  int index_bits = GET_MODE_BITSIZE (index_mode);
11043  rtx op1, op2, index;
11044  enum machine_mode op_mode;
11045
11046  if (! HAVE_casesi)
11047    return 0;
11048
11049  /* Convert the index to SImode.  */
11050  if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11051    {
11052      enum machine_mode omode = TYPE_MODE (index_type);
11053      rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
11054
11055      /* We must handle the endpoints in the original mode.  */
11056      index_expr = build (MINUS_EXPR, index_type,
11057			  index_expr, minval);
11058      minval = integer_zero_node;
11059      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11060      emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11061			       omode, 1, default_label);
11062      /* Now we can safely truncate.  */
11063      index = convert_to_mode (index_mode, index, 0);
11064    }
11065  else
11066    {
11067      if (TYPE_MODE (index_type) != index_mode)
11068	{
11069	  index_expr = convert ((*lang_hooks.types.type_for_size)
11070				(index_bits, 0), index_expr);
11071	  index_type = TREE_TYPE (index_expr);
11072	}
11073
11074      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11075    }
11076  emit_queue ();
11077  index = protect_from_queue (index, 0);
11078  do_pending_stack_adjust ();
11079
11080  op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
11081  if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
11082      (index, op_mode))
11083    index = copy_to_mode_reg (op_mode, index);
11084
11085  op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
11086
11087  op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
11088  op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
11089		       op1, TREE_UNSIGNED (TREE_TYPE (minval)));
11090  if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
11091      (op1, op_mode))
11092    op1 = copy_to_mode_reg (op_mode, op1);
11093
11094  op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
11095
11096  op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
11097  op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
11098		       op2, TREE_UNSIGNED (TREE_TYPE (range)));
11099  if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
11100      (op2, op_mode))
11101    op2 = copy_to_mode_reg (op_mode, op2);
11102
11103  emit_jump_insn (gen_casesi (index, op1, op2,
11104			      table_label, default_label));
11105  return 1;
11106}
11107
11108/* Attempt to generate a tablejump instruction; same concept.  */
11109#ifndef HAVE_tablejump
11110#define HAVE_tablejump 0
11111#define gen_tablejump(x, y) (0)
11112#endif
11113
11114/* Subroutine of the next function.
11115
11116   INDEX is the value being switched on, with the lowest value
11117   in the table already subtracted.
11118   MODE is its expected mode (needed if INDEX is constant).
11119   RANGE is the length of the jump table.
11120   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11121
11122   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11123   index value is out of range.  */
11124
11125static void
11126do_tablejump (index, mode, range, table_label, default_label)
11127     rtx index, range, table_label, default_label;
11128     enum machine_mode mode;
11129{
11130  rtx temp, vector;
11131
11132  if (INTVAL (range) > cfun->max_jumptable_ents)
11133    cfun->max_jumptable_ents = INTVAL (range);
11134
11135  /* Do an unsigned comparison (in the proper mode) between the index
11136     expression and the value which represents the length of the range.
11137     Since we just finished subtracting the lower bound of the range
11138     from the index expression, this comparison allows us to simultaneously
11139     check that the original index expression value is both greater than
11140     or equal to the minimum value of the range and less than or equal to
11141     the maximum value of the range.  */
11142
11143  emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11144			   default_label);
11145
11146  /* If index is in range, it must fit in Pmode.
11147     Convert to Pmode so we can index with it.  */
11148  if (mode != Pmode)
11149    index = convert_to_mode (Pmode, index, 1);
11150
11151  /* Don't let a MEM slip thru, because then INDEX that comes
11152     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11153     and break_out_memory_refs will go to work on it and mess it up.  */
11154#ifdef PIC_CASE_VECTOR_ADDRESS
11155  if (flag_pic && GET_CODE (index) != REG)
11156    index = copy_to_mode_reg (Pmode, index);
11157#endif
11158
11159  /* If flag_force_addr were to affect this address
11160     it could interfere with the tricky assumptions made
11161     about addresses that contain label-refs,
11162     which may be valid only very near the tablejump itself.  */
11163  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11164     GET_MODE_SIZE, because this indicates how large insns are.  The other
11165     uses should all be Pmode, because they are addresses.  This code
11166     could fail if addresses and insns are not the same size.  */
11167  index = gen_rtx_PLUS (Pmode,
11168			gen_rtx_MULT (Pmode, index,
11169				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11170			gen_rtx_LABEL_REF (Pmode, table_label));
11171#ifdef PIC_CASE_VECTOR_ADDRESS
11172  if (flag_pic)
11173    index = PIC_CASE_VECTOR_ADDRESS (index);
11174  else
11175#endif
11176    index = memory_address_noforce (CASE_VECTOR_MODE, index);
11177  temp = gen_reg_rtx (CASE_VECTOR_MODE);
11178  vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11179  RTX_UNCHANGING_P (vector) = 1;
11180  MEM_NOTRAP_P (vector) = 1;
11181  convert_move (temp, vector, 0);
11182
11183  emit_jump_insn (gen_tablejump (temp, table_label));
11184
11185  /* If we are generating PIC code or if the table is PC-relative, the
11186     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
11187  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11188    emit_barrier ();
11189}
11190
11191int
11192try_tablejump (index_type, index_expr, minval, range,
11193	       table_label, default_label)
11194     tree index_type, index_expr, minval, range;
11195     rtx table_label, default_label;
11196{
11197  rtx index;
11198
11199  if (! HAVE_tablejump)
11200    return 0;
11201
11202  index_expr = fold (build (MINUS_EXPR, index_type,
11203			    convert (index_type, index_expr),
11204			    convert (index_type, minval)));
11205  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11206  emit_queue ();
11207  index = protect_from_queue (index, 0);
11208  do_pending_stack_adjust ();
11209
11210  do_tablejump (index, TYPE_MODE (index_type),
11211		convert_modes (TYPE_MODE (index_type),
11212			       TYPE_MODE (TREE_TYPE (range)),
11213			       expand_expr (range, NULL_RTX,
11214					    VOIDmode, 0),
11215			       TREE_UNSIGNED (TREE_TYPE (range))),
11216		table_label, default_label);
11217  return 1;
11218}
11219
11220/* Nonzero if the mode is a valid vector mode for this architecture.
11221   This returns nonzero even if there is no hardware support for the
11222   vector mode, but we can emulate with narrower modes.  */
11223
11224int
11225vector_mode_valid_p (mode)
11226     enum machine_mode mode;
11227{
11228  enum mode_class class = GET_MODE_CLASS (mode);
11229  enum machine_mode innermode;
11230
11231  /* Doh!  What's going on?  */
11232  if (class != MODE_VECTOR_INT
11233      && class != MODE_VECTOR_FLOAT)
11234    return 0;
11235
11236  /* Hardware support.  Woo hoo!  */
11237  if (VECTOR_MODE_SUPPORTED_P (mode))
11238    return 1;
11239
11240  innermode = GET_MODE_INNER (mode);
11241
11242  /* We should probably return 1 if requesting V4DI and we have no DI,
11243     but we have V2DI, but this is probably very unlikely.  */
11244
11245  /* If we have support for the inner mode, we can safely emulate it.
11246     We may not have V2DI, but me can emulate with a pair of DIs.  */
11247  return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
11248}
11249
11250/* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
11251static rtx
11252const_vector_from_tree (exp)
11253     tree exp;
11254{
11255  rtvec v;
11256  int units, i;
11257  tree link, elt;
11258  enum machine_mode inner, mode;
11259
11260  mode = TYPE_MODE (TREE_TYPE (exp));
11261
11262  if (is_zeros_p (exp))
11263    return CONST0_RTX (mode);
11264
11265  units = GET_MODE_NUNITS (mode);
11266  inner = GET_MODE_INNER (mode);
11267
11268  v = rtvec_alloc (units);
11269
11270  link = TREE_VECTOR_CST_ELTS (exp);
11271  for (i = 0; link; link = TREE_CHAIN (link), ++i)
11272    {
11273      elt = TREE_VALUE (link);
11274
11275      if (TREE_CODE (elt) == REAL_CST)
11276	RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11277							 inner);
11278      else
11279	RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
11280					       TREE_INT_CST_HIGH (elt),
11281					       inner);
11282    }
11283
11284  return gen_rtx_raw_CONST_VECTOR (mode, v);
11285}
11286
11287#include "gt-expr.h"
11288