expr.c revision 117395
1/* Convert tree expression to rtl instructions, for GNU compiler.
2   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "machmode.h"
25#include "real.h"
26#include "rtl.h"
27#include "tree.h"
28#include "flags.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "except.h"
32#include "function.h"
33#include "insn-config.h"
34#include "insn-attr.h"
35/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
36#include "expr.h"
37#include "optabs.h"
38#include "libfuncs.h"
39#include "recog.h"
40#include "reload.h"
41#include "output.h"
42#include "typeclass.h"
43#include "toplev.h"
44#include "ggc.h"
45#include "langhooks.h"
46#include "intl.h"
47#include "tm_p.h"
48
49/* Decide whether a function's arguments should be processed
50   from first to last or from last to first.
51
52   They should if the stack and args grow in opposite directions, but
53   only if we have push insns.  */
54
55#ifdef PUSH_ROUNDING
56
57#ifndef PUSH_ARGS_REVERSED
58#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59#define PUSH_ARGS_REVERSED	/* If it's last to first.  */
60#endif
61#endif
62
63#endif
64
65#ifndef STACK_PUSH_CODE
66#ifdef STACK_GROWS_DOWNWARD
67#define STACK_PUSH_CODE PRE_DEC
68#else
69#define STACK_PUSH_CODE PRE_INC
70#endif
71#endif
72
73/* Assume that case vectors are not pc-relative.  */
74#ifndef CASE_VECTOR_PC_RELATIVE
75#define CASE_VECTOR_PC_RELATIVE 0
76#endif
77
78/* Convert defined/undefined to boolean.  */
79#ifdef TARGET_MEM_FUNCTIONS
80#undef TARGET_MEM_FUNCTIONS
81#define TARGET_MEM_FUNCTIONS 1
82#else
83#define TARGET_MEM_FUNCTIONS 0
84#endif
85
86
87/* If this is nonzero, we do not bother generating VOLATILE
88   around volatile memory references, and we are willing to
89   output indirect addresses.  If cse is to follow, we reject
90   indirect addresses so a useful potential cse is generated;
91   if it is used only once, instruction combination will produce
92   the same indirect address eventually.  */
93int cse_not_expected;
94
95/* Chain of pending expressions for PLACEHOLDER_EXPR to replace.  */
96static tree placeholder_list = 0;
97
98/* This structure is used by move_by_pieces to describe the move to
99   be performed.  */
100struct move_by_pieces
101{
102  rtx to;
103  rtx to_addr;
104  int autinc_to;
105  int explicit_inc_to;
106  rtx from;
107  rtx from_addr;
108  int autinc_from;
109  int explicit_inc_from;
110  unsigned HOST_WIDE_INT len;
111  HOST_WIDE_INT offset;
112  int reverse;
113};
114
115/* This structure is used by store_by_pieces to describe the clear to
116   be performed.  */
117
118struct store_by_pieces
119{
120  rtx to;
121  rtx to_addr;
122  int autinc_to;
123  int explicit_inc_to;
124  unsigned HOST_WIDE_INT len;
125  HOST_WIDE_INT offset;
126  rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
127  PTR constfundata;
128  int reverse;
129};
130
131static rtx enqueue_insn		PARAMS ((rtx, rtx));
132static unsigned HOST_WIDE_INT move_by_pieces_ninsns
133				PARAMS ((unsigned HOST_WIDE_INT,
134					 unsigned int));
135static void move_by_pieces_1	PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
136					 struct move_by_pieces *));
137static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
138static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
139static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
140static tree emit_block_move_libcall_fn PARAMS ((int));
141static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
142static rtx clear_by_pieces_1	PARAMS ((PTR, HOST_WIDE_INT,
143					 enum machine_mode));
144static void clear_by_pieces	PARAMS ((rtx, unsigned HOST_WIDE_INT,
145					 unsigned int));
146static void store_by_pieces_1	PARAMS ((struct store_by_pieces *,
147					 unsigned int));
148static void store_by_pieces_2	PARAMS ((rtx (*) (rtx, ...),
149					 enum machine_mode,
150					 struct store_by_pieces *));
151static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
152static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
153static tree clear_storage_libcall_fn PARAMS ((int));
154static rtx compress_float_constant PARAMS ((rtx, rtx));
155static rtx get_subtarget	PARAMS ((rtx));
156static int is_zeros_p		PARAMS ((tree));
157static int mostly_zeros_p	PARAMS ((tree));
158static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
159					     HOST_WIDE_INT, enum machine_mode,
160					     tree, tree, int, int));
161static void store_constructor	PARAMS ((tree, rtx, int, HOST_WIDE_INT));
162static rtx store_field		PARAMS ((rtx, HOST_WIDE_INT,
163					 HOST_WIDE_INT, enum machine_mode,
164					 tree, enum machine_mode, int, tree,
165					 int));
166static rtx var_rtx		PARAMS ((tree));
167static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
168static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
169static int is_aligning_offset	PARAMS ((tree, tree));
170static rtx expand_increment	PARAMS ((tree, int, int));
171static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
172static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
173static void do_compare_and_jump	PARAMS ((tree, enum rtx_code, enum rtx_code,
174					 rtx, rtx));
175static rtx do_store_flag	PARAMS ((tree, rtx, enum machine_mode, int));
176#ifdef PUSH_ROUNDING
177static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
178#endif
179static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
180
181/* Record for each mode whether we can move a register directly to or
182   from an object of that mode in memory.  If we can't, we won't try
183   to use that mode directly when accessing a field of that mode.  */
184
185static char direct_load[NUM_MACHINE_MODES];
186static char direct_store[NUM_MACHINE_MODES];
187
188/* Record for each mode whether we can float-extend from memory.  */
189
190static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
191
192/* If a memory-to-memory move would take MOVE_RATIO or more simple
193   move-instruction sequences, we will do a movstr or libcall instead.  */
194
195#ifndef MOVE_RATIO
196#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
197#define MOVE_RATIO 2
198#else
199/* If we are optimizing for space (-Os), cut down the default move ratio.  */
200#define MOVE_RATIO (optimize_size ? 3 : 15)
201#endif
202#endif
203
204/* This macro is used to determine whether move_by_pieces should be called
205   to perform a structure copy.  */
206#ifndef MOVE_BY_PIECES_P
207#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
208  (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
209#endif
210
211/* If a clear memory operation would take CLEAR_RATIO or more simple
212   move-instruction sequences, we will do a clrstr or libcall instead.  */
213
214#ifndef CLEAR_RATIO
215#if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
216#define CLEAR_RATIO 2
217#else
218/* If we are optimizing for space, cut down the default clear ratio.  */
219#define CLEAR_RATIO (optimize_size ? 3 : 15)
220#endif
221#endif
222
223/* This macro is used to determine whether clear_by_pieces should be
224   called to clear storage.  */
225#ifndef CLEAR_BY_PIECES_P
226#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
227  (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
228#endif
229
230/* This array records the insn_code of insns to perform block moves.  */
231enum insn_code movstr_optab[NUM_MACHINE_MODES];
232
233/* This array records the insn_code of insns to perform block clears.  */
234enum insn_code clrstr_optab[NUM_MACHINE_MODES];
235
236/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow.  */
237
238#ifndef SLOW_UNALIGNED_ACCESS
239#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
240#endif
241
242/* This is run once per compilation to set up which modes can be used
243   directly in memory and to initialize the block move optab.  */
244
245void
246init_expr_once ()
247{
248  rtx insn, pat;
249  enum machine_mode mode;
250  int num_clobbers;
251  rtx mem, mem1;
252  rtx reg;
253
254  /* Try indexing by frame ptr and try by stack ptr.
255     It is known that on the Convex the stack ptr isn't a valid index.
256     With luck, one or the other is valid on any machine.  */
257  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
258  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
259
260  /* A scratch register we can modify in-place below to avoid
261     useless RTL allocations.  */
262  reg = gen_rtx_REG (VOIDmode, -1);
263
264  insn = rtx_alloc (INSN);
265  pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
266  PATTERN (insn) = pat;
267
268  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
269       mode = (enum machine_mode) ((int) mode + 1))
270    {
271      int regno;
272
273      direct_load[(int) mode] = direct_store[(int) mode] = 0;
274      PUT_MODE (mem, mode);
275      PUT_MODE (mem1, mode);
276      PUT_MODE (reg, mode);
277
278      /* See if there is some register that can be used in this mode and
279	 directly loaded or stored from memory.  */
280
281      if (mode != VOIDmode && mode != BLKmode)
282	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
283	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
284	     regno++)
285	  {
286	    if (! HARD_REGNO_MODE_OK (regno, mode))
287	      continue;
288
289	    REGNO (reg) = regno;
290
291	    SET_SRC (pat) = mem;
292	    SET_DEST (pat) = reg;
293	    if (recog (pat, insn, &num_clobbers) >= 0)
294	      direct_load[(int) mode] = 1;
295
296	    SET_SRC (pat) = mem1;
297	    SET_DEST (pat) = reg;
298	    if (recog (pat, insn, &num_clobbers) >= 0)
299	      direct_load[(int) mode] = 1;
300
301	    SET_SRC (pat) = reg;
302	    SET_DEST (pat) = mem;
303	    if (recog (pat, insn, &num_clobbers) >= 0)
304	      direct_store[(int) mode] = 1;
305
306	    SET_SRC (pat) = reg;
307	    SET_DEST (pat) = mem1;
308	    if (recog (pat, insn, &num_clobbers) >= 0)
309	      direct_store[(int) mode] = 1;
310	  }
311    }
312
313  mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
314
315  for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
316       mode = GET_MODE_WIDER_MODE (mode))
317    {
318      enum machine_mode srcmode;
319      for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
320	   srcmode = GET_MODE_WIDER_MODE (srcmode))
321	{
322	  enum insn_code ic;
323
324	  ic = can_extend_p (mode, srcmode, 0);
325	  if (ic == CODE_FOR_nothing)
326	    continue;
327
328	  PUT_MODE (mem, srcmode);
329
330	  if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
331	    float_extend_from_mem[mode][srcmode] = true;
332	}
333    }
334}
335
336/* This is run at the start of compiling a function.  */
337
338void
339init_expr ()
340{
341  cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
342
343  pending_chain = 0;
344  pending_stack_adjust = 0;
345  stack_pointer_delta = 0;
346  inhibit_defer_pop = 0;
347  saveregs_value = 0;
348  apply_args_value = 0;
349  forced_labels = 0;
350}
351
352/* Small sanity check that the queue is empty at the end of a function.  */
353
354void
355finish_expr_for_function ()
356{
357  if (pending_chain)
358    abort ();
359}
360
361/* Manage the queue of increment instructions to be output
362   for POSTINCREMENT_EXPR expressions, etc.  */
363
364/* Queue up to increment (or change) VAR later.  BODY says how:
365   BODY should be the same thing you would pass to emit_insn
366   to increment right away.  It will go to emit_insn later on.
367
368   The value is a QUEUED expression to be used in place of VAR
369   where you want to guarantee the pre-incrementation value of VAR.  */
370
371static rtx
372enqueue_insn (var, body)
373     rtx var, body;
374{
375  pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
376				  body, pending_chain);
377  return pending_chain;
378}
379
380/* Use protect_from_queue to convert a QUEUED expression
381   into something that you can put immediately into an instruction.
382   If the queued incrementation has not happened yet,
383   protect_from_queue returns the variable itself.
384   If the incrementation has happened, protect_from_queue returns a temp
385   that contains a copy of the old value of the variable.
386
387   Any time an rtx which might possibly be a QUEUED is to be put
388   into an instruction, it must be passed through protect_from_queue first.
389   QUEUED expressions are not meaningful in instructions.
390
391   Do not pass a value through protect_from_queue and then hold
392   on to it for a while before putting it in an instruction!
393   If the queue is flushed in between, incorrect code will result.  */
394
395rtx
396protect_from_queue (x, modify)
397     rtx x;
398     int modify;
399{
400  RTX_CODE code = GET_CODE (x);
401
402#if 0  /* A QUEUED can hang around after the queue is forced out.  */
403  /* Shortcut for most common case.  */
404  if (pending_chain == 0)
405    return x;
406#endif
407
408  if (code != QUEUED)
409    {
410      /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
411	 use of autoincrement.  Make a copy of the contents of the memory
412	 location rather than a copy of the address, but not if the value is
413	 of mode BLKmode.  Don't modify X in place since it might be
414	 shared.  */
415      if (code == MEM && GET_MODE (x) != BLKmode
416	  && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
417	{
418	  rtx y = XEXP (x, 0);
419	  rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
420
421	  if (QUEUED_INSN (y))
422	    {
423	      rtx temp = gen_reg_rtx (GET_MODE (x));
424
425	      emit_insn_before (gen_move_insn (temp, new),
426				QUEUED_INSN (y));
427	      return temp;
428	    }
429
430	  /* Copy the address into a pseudo, so that the returned value
431	     remains correct across calls to emit_queue.  */
432	  return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
433	}
434
435      /* Otherwise, recursively protect the subexpressions of all
436	 the kinds of rtx's that can contain a QUEUED.  */
437      if (code == MEM)
438	{
439	  rtx tem = protect_from_queue (XEXP (x, 0), 0);
440	  if (tem != XEXP (x, 0))
441	    {
442	      x = copy_rtx (x);
443	      XEXP (x, 0) = tem;
444	    }
445	}
446      else if (code == PLUS || code == MULT)
447	{
448	  rtx new0 = protect_from_queue (XEXP (x, 0), 0);
449	  rtx new1 = protect_from_queue (XEXP (x, 1), 0);
450	  if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
451	    {
452	      x = copy_rtx (x);
453	      XEXP (x, 0) = new0;
454	      XEXP (x, 1) = new1;
455	    }
456	}
457      return x;
458    }
459  /* If the increment has not happened, use the variable itself.  Copy it
460     into a new pseudo so that the value remains correct across calls to
461     emit_queue.  */
462  if (QUEUED_INSN (x) == 0)
463    return copy_to_reg (QUEUED_VAR (x));
464  /* If the increment has happened and a pre-increment copy exists,
465     use that copy.  */
466  if (QUEUED_COPY (x) != 0)
467    return QUEUED_COPY (x);
468  /* The increment has happened but we haven't set up a pre-increment copy.
469     Set one up now, and use it.  */
470  QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471  emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
472		    QUEUED_INSN (x));
473  return QUEUED_COPY (x);
474}
475
476/* Return nonzero if X contains a QUEUED expression:
477   if it contains anything that will be altered by a queued increment.
478   We handle only combinations of MEM, PLUS, MINUS and MULT operators
479   since memory addresses generally contain only those.  */
480
481int
482queued_subexp_p (x)
483     rtx x;
484{
485  enum rtx_code code = GET_CODE (x);
486  switch (code)
487    {
488    case QUEUED:
489      return 1;
490    case MEM:
491      return queued_subexp_p (XEXP (x, 0));
492    case MULT:
493    case PLUS:
494    case MINUS:
495      return (queued_subexp_p (XEXP (x, 0))
496	      || queued_subexp_p (XEXP (x, 1)));
497    default:
498      return 0;
499    }
500}
501
502/* Perform all the pending incrementations.  */
503
504void
505emit_queue ()
506{
507  rtx p;
508  while ((p = pending_chain))
509    {
510      rtx body = QUEUED_BODY (p);
511
512      switch (GET_CODE (body))
513	{
514	case INSN:
515	case JUMP_INSN:
516	case CALL_INSN:
517	case CODE_LABEL:
518	case BARRIER:
519	case NOTE:
520	  QUEUED_INSN (p) = body;
521	  emit_insn (body);
522	  break;
523
524#ifdef ENABLE_CHECKING
525	case SEQUENCE:
526	  abort ();
527	  break;
528#endif
529
530	default:
531	  QUEUED_INSN (p) = emit_insn (body);
532	  break;
533	}
534
535      pending_chain = QUEUED_NEXT (p);
536    }
537}
538
539/* Copy data from FROM to TO, where the machine modes are not the same.
540   Both modes may be integer, or both may be floating.
541   UNSIGNEDP should be nonzero if FROM is an unsigned type.
542   This causes zero-extension instead of sign-extension.  */
543
544void
545convert_move (to, from, unsignedp)
546     rtx to, from;
547     int unsignedp;
548{
549  enum machine_mode to_mode = GET_MODE (to);
550  enum machine_mode from_mode = GET_MODE (from);
551  int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
552  int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
553  enum insn_code code;
554  rtx libcall;
555
556  /* rtx code for making an equivalent value.  */
557  enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
558			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
559
560  to = protect_from_queue (to, 1);
561  from = protect_from_queue (from, 0);
562
563  if (to_real != from_real)
564    abort ();
565
566  /* If FROM is a SUBREG that indicates that we have already done at least
567     the required extension, strip it.  We don't handle such SUBREGs as
568     TO here.  */
569
570  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
571      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
572	  >= GET_MODE_SIZE (to_mode))
573      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
574    from = gen_lowpart (to_mode, from), from_mode = to_mode;
575
576  if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
577    abort ();
578
579  if (to_mode == from_mode
580      || (from_mode == VOIDmode && CONSTANT_P (from)))
581    {
582      emit_move_insn (to, from);
583      return;
584    }
585
586  if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
587    {
588      if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
589	abort ();
590
591      if (VECTOR_MODE_P (to_mode))
592	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
593      else
594	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
595
596      emit_move_insn (to, from);
597      return;
598    }
599
600  if (to_real != from_real)
601    abort ();
602
603  if (to_real)
604    {
605      rtx value, insns;
606
607      if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
608	{
609	  /* Try converting directly if the insn is supported.  */
610	  if ((code = can_extend_p (to_mode, from_mode, 0))
611	      != CODE_FOR_nothing)
612	    {
613	      emit_unop_insn (code, to, from, UNKNOWN);
614	      return;
615	    }
616	}
617
618#ifdef HAVE_trunchfqf2
619      if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
620	{
621	  emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
622	  return;
623	}
624#endif
625#ifdef HAVE_trunctqfqf2
626      if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
627	{
628	  emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
629	  return;
630	}
631#endif
632#ifdef HAVE_truncsfqf2
633      if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
634	{
635	  emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
636	  return;
637	}
638#endif
639#ifdef HAVE_truncdfqf2
640      if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
641	{
642	  emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
643	  return;
644	}
645#endif
646#ifdef HAVE_truncxfqf2
647      if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
648	{
649	  emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
650	  return;
651	}
652#endif
653#ifdef HAVE_trunctfqf2
654      if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
655	{
656	  emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
657	  return;
658	}
659#endif
660
661#ifdef HAVE_trunctqfhf2
662      if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
663	{
664	  emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
665	  return;
666	}
667#endif
668#ifdef HAVE_truncsfhf2
669      if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
670	{
671	  emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
672	  return;
673	}
674#endif
675#ifdef HAVE_truncdfhf2
676      if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
677	{
678	  emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
679	  return;
680	}
681#endif
682#ifdef HAVE_truncxfhf2
683      if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
684	{
685	  emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
686	  return;
687	}
688#endif
689#ifdef HAVE_trunctfhf2
690      if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
691	{
692	  emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
693	  return;
694	}
695#endif
696
697#ifdef HAVE_truncsftqf2
698      if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
699	{
700	  emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
701	  return;
702	}
703#endif
704#ifdef HAVE_truncdftqf2
705      if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
706	{
707	  emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
708	  return;
709	}
710#endif
711#ifdef HAVE_truncxftqf2
712      if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
713	{
714	  emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
715	  return;
716	}
717#endif
718#ifdef HAVE_trunctftqf2
719      if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
720	{
721	  emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
722	  return;
723	}
724#endif
725
726#ifdef HAVE_truncdfsf2
727      if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
728	{
729	  emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
730	  return;
731	}
732#endif
733#ifdef HAVE_truncxfsf2
734      if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
735	{
736	  emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
737	  return;
738	}
739#endif
740#ifdef HAVE_trunctfsf2
741      if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
742	{
743	  emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
744	  return;
745	}
746#endif
747#ifdef HAVE_truncxfdf2
748      if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
749	{
750	  emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
751	  return;
752	}
753#endif
754#ifdef HAVE_trunctfdf2
755      if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
756	{
757	  emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
758	  return;
759	}
760#endif
761
762      libcall = (rtx) 0;
763      switch (from_mode)
764	{
765	case SFmode:
766	  switch (to_mode)
767	    {
768	    case DFmode:
769	      libcall = extendsfdf2_libfunc;
770	      break;
771
772	    case XFmode:
773	      libcall = extendsfxf2_libfunc;
774	      break;
775
776	    case TFmode:
777	      libcall = extendsftf2_libfunc;
778	      break;
779
780	    default:
781	      break;
782	    }
783	  break;
784
785	case DFmode:
786	  switch (to_mode)
787	    {
788	    case SFmode:
789	      libcall = truncdfsf2_libfunc;
790	      break;
791
792	    case XFmode:
793	      libcall = extenddfxf2_libfunc;
794	      break;
795
796	    case TFmode:
797	      libcall = extenddftf2_libfunc;
798	      break;
799
800	    default:
801	      break;
802	    }
803	  break;
804
805	case XFmode:
806	  switch (to_mode)
807	    {
808	    case SFmode:
809	      libcall = truncxfsf2_libfunc;
810	      break;
811
812	    case DFmode:
813	      libcall = truncxfdf2_libfunc;
814	      break;
815
816	    default:
817	      break;
818	    }
819	  break;
820
821	case TFmode:
822	  switch (to_mode)
823	    {
824	    case SFmode:
825	      libcall = trunctfsf2_libfunc;
826	      break;
827
828	    case DFmode:
829	      libcall = trunctfdf2_libfunc;
830	      break;
831
832	    default:
833	      break;
834	    }
835	  break;
836
837	default:
838	  break;
839	}
840
841      if (libcall == (rtx) 0)
842	/* This conversion is not implemented yet.  */
843	abort ();
844
845      start_sequence ();
846      value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
847				       1, from, from_mode);
848      insns = get_insns ();
849      end_sequence ();
850      emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
851								    from));
852      return;
853    }
854
855  /* Now both modes are integers.  */
856
857  /* Handle expanding beyond a word.  */
858  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
859      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
860    {
861      rtx insns;
862      rtx lowpart;
863      rtx fill_value;
864      rtx lowfrom;
865      int i;
866      enum machine_mode lowpart_mode;
867      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
868
869      /* Try converting directly if the insn is supported.  */
870      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
871	  != CODE_FOR_nothing)
872	{
873	  /* If FROM is a SUBREG, put it into a register.  Do this
874	     so that we always generate the same set of insns for
875	     better cse'ing; if an intermediate assignment occurred,
876	     we won't be doing the operation directly on the SUBREG.  */
877	  if (optimize > 0 && GET_CODE (from) == SUBREG)
878	    from = force_reg (from_mode, from);
879	  emit_unop_insn (code, to, from, equiv_code);
880	  return;
881	}
882      /* Next, try converting via full word.  */
883      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
884	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
885		   != CODE_FOR_nothing))
886	{
887	  if (GET_CODE (to) == REG)
888	    emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
889	  convert_move (gen_lowpart (word_mode, to), from, unsignedp);
890	  emit_unop_insn (code, to,
891			  gen_lowpart (word_mode, to), equiv_code);
892	  return;
893	}
894
895      /* No special multiword conversion insn; do it by hand.  */
896      start_sequence ();
897
898      /* Since we will turn this into a no conflict block, we must ensure
899	 that the source does not overlap the target.  */
900
901      if (reg_overlap_mentioned_p (to, from))
902	from = force_reg (from_mode, from);
903
904      /* Get a copy of FROM widened to a word, if necessary.  */
905      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
906	lowpart_mode = word_mode;
907      else
908	lowpart_mode = from_mode;
909
910      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
911
912      lowpart = gen_lowpart (lowpart_mode, to);
913      emit_move_insn (lowpart, lowfrom);
914
915      /* Compute the value to put in each remaining word.  */
916      if (unsignedp)
917	fill_value = const0_rtx;
918      else
919	{
920#ifdef HAVE_slt
921	  if (HAVE_slt
922	      && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
923	      && STORE_FLAG_VALUE == -1)
924	    {
925	      emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
926			     lowpart_mode, 0);
927	      fill_value = gen_reg_rtx (word_mode);
928	      emit_insn (gen_slt (fill_value));
929	    }
930	  else
931#endif
932	    {
933	      fill_value
934		= expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
935				size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
936				NULL_RTX, 0);
937	      fill_value = convert_to_mode (word_mode, fill_value, 1);
938	    }
939	}
940
941      /* Fill the remaining words.  */
942      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
943	{
944	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
945	  rtx subword = operand_subword (to, index, 1, to_mode);
946
947	  if (subword == 0)
948	    abort ();
949
950	  if (fill_value != subword)
951	    emit_move_insn (subword, fill_value);
952	}
953
954      insns = get_insns ();
955      end_sequence ();
956
957      emit_no_conflict_block (insns, to, from, NULL_RTX,
958			      gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
959      return;
960    }
961
962  /* Truncating multi-word to a word or less.  */
963  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
964      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
965    {
966      if (!((GET_CODE (from) == MEM
967	     && ! MEM_VOLATILE_P (from)
968	     && direct_load[(int) to_mode]
969	     && ! mode_dependent_address_p (XEXP (from, 0)))
970	    || GET_CODE (from) == REG
971	    || GET_CODE (from) == SUBREG))
972	from = force_reg (from_mode, from);
973      convert_move (to, gen_lowpart (word_mode, from), 0);
974      return;
975    }
976
977  /* Handle pointer conversion.  */			/* SPEE 900220.  */
978  if (to_mode == PQImode)
979    {
980      if (from_mode != QImode)
981	from = convert_to_mode (QImode, from, unsignedp);
982
983#ifdef HAVE_truncqipqi2
984      if (HAVE_truncqipqi2)
985	{
986	  emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
987	  return;
988	}
989#endif /* HAVE_truncqipqi2 */
990      abort ();
991    }
992
993  if (from_mode == PQImode)
994    {
995      if (to_mode != QImode)
996	{
997	  from = convert_to_mode (QImode, from, unsignedp);
998	  from_mode = QImode;
999	}
1000      else
1001	{
1002#ifdef HAVE_extendpqiqi2
1003	  if (HAVE_extendpqiqi2)
1004	    {
1005	      emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1006	      return;
1007	    }
1008#endif /* HAVE_extendpqiqi2 */
1009	  abort ();
1010	}
1011    }
1012
1013  if (to_mode == PSImode)
1014    {
1015      if (from_mode != SImode)
1016	from = convert_to_mode (SImode, from, unsignedp);
1017
1018#ifdef HAVE_truncsipsi2
1019      if (HAVE_truncsipsi2)
1020	{
1021	  emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1022	  return;
1023	}
1024#endif /* HAVE_truncsipsi2 */
1025      abort ();
1026    }
1027
1028  if (from_mode == PSImode)
1029    {
1030      if (to_mode != SImode)
1031	{
1032	  from = convert_to_mode (SImode, from, unsignedp);
1033	  from_mode = SImode;
1034	}
1035      else
1036	{
1037#ifdef HAVE_extendpsisi2
1038	  if (! unsignedp && HAVE_extendpsisi2)
1039	    {
1040	      emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1041	      return;
1042	    }
1043#endif /* HAVE_extendpsisi2 */
1044#ifdef HAVE_zero_extendpsisi2
1045	  if (unsignedp && HAVE_zero_extendpsisi2)
1046	    {
1047	      emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1048	      return;
1049	    }
1050#endif /* HAVE_zero_extendpsisi2 */
1051	  abort ();
1052	}
1053    }
1054
1055  if (to_mode == PDImode)
1056    {
1057      if (from_mode != DImode)
1058	from = convert_to_mode (DImode, from, unsignedp);
1059
1060#ifdef HAVE_truncdipdi2
1061      if (HAVE_truncdipdi2)
1062	{
1063	  emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1064	  return;
1065	}
1066#endif /* HAVE_truncdipdi2 */
1067      abort ();
1068    }
1069
1070  if (from_mode == PDImode)
1071    {
1072      if (to_mode != DImode)
1073	{
1074	  from = convert_to_mode (DImode, from, unsignedp);
1075	  from_mode = DImode;
1076	}
1077      else
1078	{
1079#ifdef HAVE_extendpdidi2
1080	  if (HAVE_extendpdidi2)
1081	    {
1082	      emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1083	      return;
1084	    }
1085#endif /* HAVE_extendpdidi2 */
1086	  abort ();
1087	}
1088    }
1089
1090  /* Now follow all the conversions between integers
1091     no more than a word long.  */
1092
1093  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
1094  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1095      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1096				GET_MODE_BITSIZE (from_mode)))
1097    {
1098      if (!((GET_CODE (from) == MEM
1099	     && ! MEM_VOLATILE_P (from)
1100	     && direct_load[(int) to_mode]
1101	     && ! mode_dependent_address_p (XEXP (from, 0)))
1102	    || GET_CODE (from) == REG
1103	    || GET_CODE (from) == SUBREG))
1104	from = force_reg (from_mode, from);
1105      if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1106	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1107	from = copy_to_reg (from);
1108      emit_move_insn (to, gen_lowpart (to_mode, from));
1109      return;
1110    }
1111
1112  /* Handle extension.  */
1113  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1114    {
1115      /* Convert directly if that works.  */
1116      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1117	  != CODE_FOR_nothing)
1118	{
1119	  if (flag_force_mem)
1120	    from = force_not_mem (from);
1121
1122	  emit_unop_insn (code, to, from, equiv_code);
1123	  return;
1124	}
1125      else
1126	{
1127	  enum machine_mode intermediate;
1128	  rtx tmp;
1129	  tree shift_amount;
1130
1131	  /* Search for a mode to convert via.  */
1132	  for (intermediate = from_mode; intermediate != VOIDmode;
1133	       intermediate = GET_MODE_WIDER_MODE (intermediate))
1134	    if (((can_extend_p (to_mode, intermediate, unsignedp)
1135		  != CODE_FOR_nothing)
1136		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1137		     && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1138					       GET_MODE_BITSIZE (intermediate))))
1139		&& (can_extend_p (intermediate, from_mode, unsignedp)
1140		    != CODE_FOR_nothing))
1141	      {
1142		convert_move (to, convert_to_mode (intermediate, from,
1143						   unsignedp), unsignedp);
1144		return;
1145	      }
1146
1147	  /* No suitable intermediate mode.
1148	     Generate what we need with	shifts.  */
1149	  shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1150				      - GET_MODE_BITSIZE (from_mode), 0);
1151	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
1152	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1153			      to, unsignedp);
1154	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1155			      to, unsignedp);
1156	  if (tmp != to)
1157	    emit_move_insn (to, tmp);
1158	  return;
1159	}
1160    }
1161
1162  /* Support special truncate insns for certain modes.  */
1163
1164  if (from_mode == DImode && to_mode == SImode)
1165    {
1166#ifdef HAVE_truncdisi2
1167      if (HAVE_truncdisi2)
1168	{
1169	  emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1170	  return;
1171	}
1172#endif
1173      convert_move (to, force_reg (from_mode, from), unsignedp);
1174      return;
1175    }
1176
1177  if (from_mode == DImode && to_mode == HImode)
1178    {
1179#ifdef HAVE_truncdihi2
1180      if (HAVE_truncdihi2)
1181	{
1182	  emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1183	  return;
1184	}
1185#endif
1186      convert_move (to, force_reg (from_mode, from), unsignedp);
1187      return;
1188    }
1189
1190  if (from_mode == DImode && to_mode == QImode)
1191    {
1192#ifdef HAVE_truncdiqi2
1193      if (HAVE_truncdiqi2)
1194	{
1195	  emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1196	  return;
1197	}
1198#endif
1199      convert_move (to, force_reg (from_mode, from), unsignedp);
1200      return;
1201    }
1202
1203  if (from_mode == SImode && to_mode == HImode)
1204    {
1205#ifdef HAVE_truncsihi2
1206      if (HAVE_truncsihi2)
1207	{
1208	  emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1209	  return;
1210	}
1211#endif
1212      convert_move (to, force_reg (from_mode, from), unsignedp);
1213      return;
1214    }
1215
1216  if (from_mode == SImode && to_mode == QImode)
1217    {
1218#ifdef HAVE_truncsiqi2
1219      if (HAVE_truncsiqi2)
1220	{
1221	  emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1222	  return;
1223	}
1224#endif
1225      convert_move (to, force_reg (from_mode, from), unsignedp);
1226      return;
1227    }
1228
1229  if (from_mode == HImode && to_mode == QImode)
1230    {
1231#ifdef HAVE_trunchiqi2
1232      if (HAVE_trunchiqi2)
1233	{
1234	  emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1235	  return;
1236	}
1237#endif
1238      convert_move (to, force_reg (from_mode, from), unsignedp);
1239      return;
1240    }
1241
1242  if (from_mode == TImode && to_mode == DImode)
1243    {
1244#ifdef HAVE_trunctidi2
1245      if (HAVE_trunctidi2)
1246	{
1247	  emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1248	  return;
1249	}
1250#endif
1251      convert_move (to, force_reg (from_mode, from), unsignedp);
1252      return;
1253    }
1254
1255  if (from_mode == TImode && to_mode == SImode)
1256    {
1257#ifdef HAVE_trunctisi2
1258      if (HAVE_trunctisi2)
1259	{
1260	  emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1261	  return;
1262	}
1263#endif
1264      convert_move (to, force_reg (from_mode, from), unsignedp);
1265      return;
1266    }
1267
1268  if (from_mode == TImode && to_mode == HImode)
1269    {
1270#ifdef HAVE_trunctihi2
1271      if (HAVE_trunctihi2)
1272	{
1273	  emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1274	  return;
1275	}
1276#endif
1277      convert_move (to, force_reg (from_mode, from), unsignedp);
1278      return;
1279    }
1280
1281  if (from_mode == TImode && to_mode == QImode)
1282    {
1283#ifdef HAVE_trunctiqi2
1284      if (HAVE_trunctiqi2)
1285	{
1286	  emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1287	  return;
1288	}
1289#endif
1290      convert_move (to, force_reg (from_mode, from), unsignedp);
1291      return;
1292    }
1293
1294  /* Handle truncation of volatile memrefs, and so on;
1295     the things that couldn't be truncated directly,
1296     and for which there was no special instruction.  */
1297  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1298    {
1299      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1300      emit_move_insn (to, temp);
1301      return;
1302    }
1303
1304  /* Mode combination is not recognized.  */
1305  abort ();
1306}
1307
1308/* Return an rtx for a value that would result
1309   from converting X to mode MODE.
1310   Both X and MODE may be floating, or both integer.
1311   UNSIGNEDP is nonzero if X is an unsigned value.
1312   This can be done by referring to a part of X in place
1313   or by copying to a new temporary with conversion.
1314
1315   This function *must not* call protect_from_queue
1316   except when putting X into an insn (in which case convert_move does it).  */
1317
1318rtx
1319convert_to_mode (mode, x, unsignedp)
1320     enum machine_mode mode;
1321     rtx x;
1322     int unsignedp;
1323{
1324  return convert_modes (mode, VOIDmode, x, unsignedp);
1325}
1326
1327/* Return an rtx for a value that would result
1328   from converting X from mode OLDMODE to mode MODE.
1329   Both modes may be floating, or both integer.
1330   UNSIGNEDP is nonzero if X is an unsigned value.
1331
1332   This can be done by referring to a part of X in place
1333   or by copying to a new temporary with conversion.
1334
1335   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1336
1337   This function *must not* call protect_from_queue
1338   except when putting X into an insn (in which case convert_move does it).  */
1339
1340rtx
1341convert_modes (mode, oldmode, x, unsignedp)
1342     enum machine_mode mode, oldmode;
1343     rtx x;
1344     int unsignedp;
1345{
1346  rtx temp;
1347
1348  /* If FROM is a SUBREG that indicates that we have already done at least
1349     the required extension, strip it.  */
1350
1351  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1352      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1353      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1354    x = gen_lowpart (mode, x);
1355
1356  if (GET_MODE (x) != VOIDmode)
1357    oldmode = GET_MODE (x);
1358
1359  if (mode == oldmode)
1360    return x;
1361
1362  /* There is one case that we must handle specially: If we are converting
1363     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1364     we are to interpret the constant as unsigned, gen_lowpart will do
1365     the wrong if the constant appears negative.  What we want to do is
1366     make the high-order word of the constant zero, not all ones.  */
1367
1368  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1369      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1370      && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1371    {
1372      HOST_WIDE_INT val = INTVAL (x);
1373
1374      if (oldmode != VOIDmode
1375	  && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1376	{
1377	  int width = GET_MODE_BITSIZE (oldmode);
1378
1379	  /* We need to zero extend VAL.  */
1380	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1381	}
1382
1383      return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1384    }
1385
1386  /* We can do this with a gen_lowpart if both desired and current modes
1387     are integer, and this is either a constant integer, a register, or a
1388     non-volatile MEM.  Except for the constant case where MODE is no
1389     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
1390
1391  if ((GET_CODE (x) == CONST_INT
1392       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1393      || (GET_MODE_CLASS (mode) == MODE_INT
1394	  && GET_MODE_CLASS (oldmode) == MODE_INT
1395	  && (GET_CODE (x) == CONST_DOUBLE
1396	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1397		  && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1398		       && direct_load[(int) mode])
1399		      || (GET_CODE (x) == REG
1400			  && (! HARD_REGISTER_P (x)
1401			      || HARD_REGNO_MODE_OK (REGNO (x), mode))
1402			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1403						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
1404    {
1405      /* ?? If we don't know OLDMODE, we have to assume here that
1406	 X does not need sign- or zero-extension.   This may not be
1407	 the case, but it's the best we can do.  */
1408      if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1409	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1410	{
1411	  HOST_WIDE_INT val = INTVAL (x);
1412	  int width = GET_MODE_BITSIZE (oldmode);
1413
1414	  /* We must sign or zero-extend in this case.  Start by
1415	     zero-extending, then sign extend if we need to.  */
1416	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1417	  if (! unsignedp
1418	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1419	    val |= (HOST_WIDE_INT) (-1) << width;
1420
1421	  return gen_int_mode (val, mode);
1422	}
1423
1424      return gen_lowpart (mode, x);
1425    }
1426
1427  temp = gen_reg_rtx (mode);
1428  convert_move (temp, x, unsignedp);
1429  return temp;
1430}
1431
1432/* This macro is used to determine what the largest unit size that
1433   move_by_pieces can use is.  */
1434
1435/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1436   move efficiently, as opposed to  MOVE_MAX which is the maximum
1437   number of bytes we can move with a single instruction.  */
1438
1439#ifndef MOVE_MAX_PIECES
1440#define MOVE_MAX_PIECES   MOVE_MAX
1441#endif
1442
1443/* STORE_MAX_PIECES is the number of bytes at a time that we can
1444   store efficiently.  Due to internal GCC limitations, this is
1445   MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1446   for an immediate constant.  */
1447
1448#define STORE_MAX_PIECES  MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1449
1450/* Generate several move instructions to copy LEN bytes from block FROM to
1451   block TO.  (These are MEM rtx's with BLKmode).  The caller must pass FROM
1452   and TO through protect_from_queue before calling.
1453
1454   If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1455   used to push FROM to the stack.
1456
1457   ALIGN is maximum alignment we can assume.  */
1458
1459void
1460move_by_pieces (to, from, len, align)
1461     rtx to, from;
1462     unsigned HOST_WIDE_INT len;
1463     unsigned int align;
1464{
1465  struct move_by_pieces data;
1466  rtx to_addr, from_addr = XEXP (from, 0);
1467  unsigned int max_size = MOVE_MAX_PIECES + 1;
1468  enum machine_mode mode = VOIDmode, tmode;
1469  enum insn_code icode;
1470
1471  data.offset = 0;
1472  data.from_addr = from_addr;
1473  if (to)
1474    {
1475      to_addr = XEXP (to, 0);
1476      data.to = to;
1477      data.autinc_to
1478	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1479	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1480      data.reverse
1481	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1482    }
1483  else
1484    {
1485      to_addr = NULL_RTX;
1486      data.to = NULL_RTX;
1487      data.autinc_to = 1;
1488#ifdef STACK_GROWS_DOWNWARD
1489      data.reverse = 1;
1490#else
1491      data.reverse = 0;
1492#endif
1493    }
1494  data.to_addr = to_addr;
1495  data.from = from;
1496  data.autinc_from
1497    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1498       || GET_CODE (from_addr) == POST_INC
1499       || GET_CODE (from_addr) == POST_DEC);
1500
1501  data.explicit_inc_from = 0;
1502  data.explicit_inc_to = 0;
1503  if (data.reverse) data.offset = len;
1504  data.len = len;
1505
1506  /* If copying requires more than two move insns,
1507     copy addresses to registers (to make displacements shorter)
1508     and use post-increment if available.  */
1509  if (!(data.autinc_from && data.autinc_to)
1510      && move_by_pieces_ninsns (len, align) > 2)
1511    {
1512      /* Find the mode of the largest move...  */
1513      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1514	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1515	if (GET_MODE_SIZE (tmode) < max_size)
1516	  mode = tmode;
1517
1518      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1519	{
1520	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1521	  data.autinc_from = 1;
1522	  data.explicit_inc_from = -1;
1523	}
1524      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1525	{
1526	  data.from_addr = copy_addr_to_reg (from_addr);
1527	  data.autinc_from = 1;
1528	  data.explicit_inc_from = 1;
1529	}
1530      if (!data.autinc_from && CONSTANT_P (from_addr))
1531	data.from_addr = copy_addr_to_reg (from_addr);
1532      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1533	{
1534	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1535	  data.autinc_to = 1;
1536	  data.explicit_inc_to = -1;
1537	}
1538      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1539	{
1540	  data.to_addr = copy_addr_to_reg (to_addr);
1541	  data.autinc_to = 1;
1542	  data.explicit_inc_to = 1;
1543	}
1544      if (!data.autinc_to && CONSTANT_P (to_addr))
1545	data.to_addr = copy_addr_to_reg (to_addr);
1546    }
1547
1548  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1549      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1550    align = MOVE_MAX * BITS_PER_UNIT;
1551
1552  /* First move what we can in the largest integer mode, then go to
1553     successively smaller modes.  */
1554
1555  while (max_size > 1)
1556    {
1557      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1558	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1559	if (GET_MODE_SIZE (tmode) < max_size)
1560	  mode = tmode;
1561
1562      if (mode == VOIDmode)
1563	break;
1564
1565      icode = mov_optab->handlers[(int) mode].insn_code;
1566      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1567	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1568
1569      max_size = GET_MODE_SIZE (mode);
1570    }
1571
1572  /* The code above should have handled everything.  */
1573  if (data.len > 0)
1574    abort ();
1575}
1576
1577/* Return number of insns required to move L bytes by pieces.
1578   ALIGN (in bits) is maximum alignment we can assume.  */
1579
1580static unsigned HOST_WIDE_INT
1581move_by_pieces_ninsns (l, align)
1582     unsigned HOST_WIDE_INT l;
1583     unsigned int align;
1584{
1585  unsigned HOST_WIDE_INT n_insns = 0;
1586  unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1587
1588  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1589      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1590    align = MOVE_MAX * BITS_PER_UNIT;
1591
1592  while (max_size > 1)
1593    {
1594      enum machine_mode mode = VOIDmode, tmode;
1595      enum insn_code icode;
1596
1597      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1598	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1599	if (GET_MODE_SIZE (tmode) < max_size)
1600	  mode = tmode;
1601
1602      if (mode == VOIDmode)
1603	break;
1604
1605      icode = mov_optab->handlers[(int) mode].insn_code;
1606      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1607	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1608
1609      max_size = GET_MODE_SIZE (mode);
1610    }
1611
1612  if (l)
1613    abort ();
1614  return n_insns;
1615}
1616
1617/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1618   with move instructions for mode MODE.  GENFUN is the gen_... function
1619   to make a move insn for that mode.  DATA has all the other info.  */
1620
1621static void
1622move_by_pieces_1 (genfun, mode, data)
1623     rtx (*genfun) PARAMS ((rtx, ...));
1624     enum machine_mode mode;
1625     struct move_by_pieces *data;
1626{
1627  unsigned int size = GET_MODE_SIZE (mode);
1628  rtx to1 = NULL_RTX, from1;
1629
1630  while (data->len >= size)
1631    {
1632      if (data->reverse)
1633	data->offset -= size;
1634
1635      if (data->to)
1636	{
1637	  if (data->autinc_to)
1638	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1639					     data->offset);
1640	  else
1641	    to1 = adjust_address (data->to, mode, data->offset);
1642	}
1643
1644      if (data->autinc_from)
1645	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1646					   data->offset);
1647      else
1648	from1 = adjust_address (data->from, mode, data->offset);
1649
1650      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1651	emit_insn (gen_add2_insn (data->to_addr,
1652				  GEN_INT (-(HOST_WIDE_INT)size)));
1653      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1654	emit_insn (gen_add2_insn (data->from_addr,
1655				  GEN_INT (-(HOST_WIDE_INT)size)));
1656
1657      if (data->to)
1658	emit_insn ((*genfun) (to1, from1));
1659      else
1660	{
1661#ifdef PUSH_ROUNDING
1662	  emit_single_push_insn (mode, from1, NULL);
1663#else
1664	  abort ();
1665#endif
1666	}
1667
1668      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1669	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1670      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1671	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1672
1673      if (! data->reverse)
1674	data->offset += size;
1675
1676      data->len -= size;
1677    }
1678}
1679
1680/* Emit code to move a block Y to a block X.  This may be done with
1681   string-move instructions, with multiple scalar move instructions,
1682   or with a library call.
1683
1684   Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1685   SIZE is an rtx that says how long they are.
1686   ALIGN is the maximum alignment we can assume they have.
1687   METHOD describes what kind of copy this is, and what mechanisms may be used.
1688
1689   Return the address of the new block, if memcpy is called and returns it,
1690   0 otherwise.  */
1691
1692rtx
1693emit_block_move (x, y, size, method)
1694     rtx x, y, size;
1695     enum block_op_methods method;
1696{
1697  bool may_use_call;
1698  rtx retval = 0;
1699  unsigned int align;
1700
1701  switch (method)
1702    {
1703    case BLOCK_OP_NORMAL:
1704      may_use_call = true;
1705      break;
1706
1707    case BLOCK_OP_CALL_PARM:
1708      may_use_call = block_move_libcall_safe_for_call_parm ();
1709
1710      /* Make inhibit_defer_pop nonzero around the library call
1711	 to force it to pop the arguments right away.  */
1712      NO_DEFER_POP;
1713      break;
1714
1715    case BLOCK_OP_NO_LIBCALL:
1716      may_use_call = false;
1717      break;
1718
1719    default:
1720      abort ();
1721    }
1722
1723  align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1724
1725  if (GET_MODE (x) != BLKmode)
1726    abort ();
1727  if (GET_MODE (y) != BLKmode)
1728    abort ();
1729
1730  x = protect_from_queue (x, 1);
1731  y = protect_from_queue (y, 0);
1732  size = protect_from_queue (size, 0);
1733
1734  if (GET_CODE (x) != MEM)
1735    abort ();
1736  if (GET_CODE (y) != MEM)
1737    abort ();
1738  if (size == 0)
1739    abort ();
1740
1741  /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1742     can be incorrect is coming from __builtin_memcpy.  */
1743  if (GET_CODE (size) == CONST_INT)
1744    {
1745      x = shallow_copy_rtx (x);
1746      y = shallow_copy_rtx (y);
1747      set_mem_size (x, size);
1748      set_mem_size (y, size);
1749    }
1750
1751  if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1752    move_by_pieces (x, y, INTVAL (size), align);
1753  else if (emit_block_move_via_movstr (x, y, size, align))
1754    ;
1755  else if (may_use_call)
1756    retval = emit_block_move_via_libcall (x, y, size);
1757  else
1758    emit_block_move_via_loop (x, y, size, align);
1759
1760  if (method == BLOCK_OP_CALL_PARM)
1761    OK_DEFER_POP;
1762
1763  return retval;
1764}
1765
1766/* A subroutine of emit_block_move.  Returns true if calling the
1767   block move libcall will not clobber any parameters which may have
1768   already been placed on the stack.  */
1769
1770static bool
1771block_move_libcall_safe_for_call_parm ()
1772{
1773  if (PUSH_ARGS)
1774    return true;
1775  else
1776    {
1777      /* Check to see whether memcpy takes all register arguments.  */
1778      static enum {
1779	takes_regs_uninit, takes_regs_no, takes_regs_yes
1780      } takes_regs = takes_regs_uninit;
1781
1782      switch (takes_regs)
1783	{
1784	case takes_regs_uninit:
1785	  {
1786	    CUMULATIVE_ARGS args_so_far;
1787	    tree fn, arg;
1788
1789	    fn = emit_block_move_libcall_fn (false);
1790	    INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1791
1792	    arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1793	    for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1794	      {
1795		enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1796		rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1797		if (!tmp || !REG_P (tmp))
1798		  goto fail_takes_regs;
1799#ifdef FUNCTION_ARG_PARTIAL_NREGS
1800		if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1801						NULL_TREE, 1))
1802		  goto fail_takes_regs;
1803#endif
1804		FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1805	      }
1806	  }
1807	  takes_regs = takes_regs_yes;
1808	  /* FALLTHRU */
1809
1810	case takes_regs_yes:
1811	  return true;
1812
1813	fail_takes_regs:
1814	  takes_regs = takes_regs_no;
1815	  /* FALLTHRU */
1816	case takes_regs_no:
1817	  return false;
1818
1819	default:
1820	  abort ();
1821	}
1822    }
1823}
1824
1825/* A subroutine of emit_block_move.  Expand a movstr pattern;
1826   return true if successful.  */
1827
1828static bool
1829emit_block_move_via_movstr (x, y, size, align)
1830     rtx x, y, size;
1831     unsigned int align;
1832{
1833  /* Try the most limited insn first, because there's no point
1834     including more than one in the machine description unless
1835     the more limited one has some advantage.  */
1836
1837  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1838  enum machine_mode mode;
1839
1840  /* Since this is a move insn, we don't care about volatility.  */
1841  volatile_ok = 1;
1842
1843  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1844       mode = GET_MODE_WIDER_MODE (mode))
1845    {
1846      enum insn_code code = movstr_optab[(int) mode];
1847      insn_operand_predicate_fn pred;
1848
1849      if (code != CODE_FOR_nothing
1850	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1851	     here because if SIZE is less than the mode mask, as it is
1852	     returned by the macro, it will definitely be less than the
1853	     actual mode mask.  */
1854	  && ((GET_CODE (size) == CONST_INT
1855	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1856		   <= (GET_MODE_MASK (mode) >> 1)))
1857	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1858	  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1859	      || (*pred) (x, BLKmode))
1860	  && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1861	      || (*pred) (y, BLKmode))
1862	  && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1863	      || (*pred) (opalign, VOIDmode)))
1864	{
1865	  rtx op2;
1866	  rtx last = get_last_insn ();
1867	  rtx pat;
1868
1869	  op2 = convert_to_mode (mode, size, 1);
1870	  pred = insn_data[(int) code].operand[2].predicate;
1871	  if (pred != 0 && ! (*pred) (op2, mode))
1872	    op2 = copy_to_mode_reg (mode, op2);
1873
1874	  /* ??? When called via emit_block_move_for_call, it'd be
1875	     nice if there were some way to inform the backend, so
1876	     that it doesn't fail the expansion because it thinks
1877	     emitting the libcall would be more efficient.  */
1878
1879	  pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1880	  if (pat)
1881	    {
1882	      emit_insn (pat);
1883	      volatile_ok = 0;
1884	      return true;
1885	    }
1886	  else
1887	    delete_insns_since (last);
1888	}
1889    }
1890
1891  volatile_ok = 0;
1892  return false;
1893}
1894
1895/* A subroutine of emit_block_move.  Expand a call to memcpy or bcopy.
1896   Return the return value from memcpy, 0 otherwise.  */
1897
1898static rtx
1899emit_block_move_via_libcall (dst, src, size)
1900     rtx dst, src, size;
1901{
1902  tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1903  enum machine_mode size_mode;
1904  rtx retval;
1905
1906  /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1907
1908     It is unsafe to save the value generated by protect_from_queue
1909     and reuse it later.  Consider what happens if emit_queue is
1910     called before the return value from protect_from_queue is used.
1911
1912     Expansion of the CALL_EXPR below will call emit_queue before
1913     we are finished emitting RTL for argument setup.  So if we are
1914     not careful we could get the wrong value for an argument.
1915
1916     To avoid this problem we go ahead and emit code to copy X, Y &
1917     SIZE into new pseudos.  We can then place those new pseudos
1918     into an RTL_EXPR and use them later, even after a call to
1919     emit_queue.
1920
1921     Note this is not strictly needed for library calls since they
1922     do not call emit_queue before loading their arguments.  However,
1923     we may need to have library calls call emit_queue in the future
1924     since failing to do so could cause problems for targets which
1925     define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
1926
1927  dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1928  src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1929
1930  if (TARGET_MEM_FUNCTIONS)
1931    size_mode = TYPE_MODE (sizetype);
1932  else
1933    size_mode = TYPE_MODE (unsigned_type_node);
1934  size = convert_to_mode (size_mode, size, 1);
1935  size = copy_to_mode_reg (size_mode, size);
1936
1937  /* It is incorrect to use the libcall calling conventions to call
1938     memcpy in this context.  This could be a user call to memcpy and
1939     the user may wish to examine the return value from memcpy.  For
1940     targets where libcalls and normal calls have different conventions
1941     for returning pointers, we could end up generating incorrect code.
1942
1943     For convenience, we generate the call to bcopy this way as well.  */
1944
1945  dst_tree = make_tree (ptr_type_node, dst);
1946  src_tree = make_tree (ptr_type_node, src);
1947  if (TARGET_MEM_FUNCTIONS)
1948    size_tree = make_tree (sizetype, size);
1949  else
1950    size_tree = make_tree (unsigned_type_node, size);
1951
1952  fn = emit_block_move_libcall_fn (true);
1953  arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1954  if (TARGET_MEM_FUNCTIONS)
1955    {
1956      arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1957      arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1958    }
1959  else
1960    {
1961      arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1962      arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1963    }
1964
1965  /* Now we have to build up the CALL_EXPR itself.  */
1966  call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1967  call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1968		     call_expr, arg_list, NULL_TREE);
1969  TREE_SIDE_EFFECTS (call_expr) = 1;
1970
1971  retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1972
1973  /* If we are initializing a readonly value, show the above call
1974     clobbered it.  Otherwise, a load from it may erroneously be
1975     hoisted from a loop.  */
1976  if (RTX_UNCHANGING_P (dst))
1977    emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1978
1979  return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1980}
1981
1982/* A subroutine of emit_block_move_via_libcall.  Create the tree node
1983   for the function we use for block copies.  The first time FOR_CALL
1984   is true, we call assemble_external.  */
1985
1986static GTY(()) tree block_move_fn;
1987
1988static tree
1989emit_block_move_libcall_fn (for_call)
1990      int for_call;
1991{
1992  static bool emitted_extern;
1993  tree fn = block_move_fn, args;
1994
1995  if (!fn)
1996    {
1997      if (TARGET_MEM_FUNCTIONS)
1998	{
1999	  fn = get_identifier ("memcpy");
2000	  args = build_function_type_list (ptr_type_node, ptr_type_node,
2001					   const_ptr_type_node, sizetype,
2002					   NULL_TREE);
2003	}
2004      else
2005	{
2006	  fn = get_identifier ("bcopy");
2007	  args = build_function_type_list (void_type_node, const_ptr_type_node,
2008					   ptr_type_node, unsigned_type_node,
2009					   NULL_TREE);
2010	}
2011
2012      fn = build_decl (FUNCTION_DECL, fn, args);
2013      DECL_EXTERNAL (fn) = 1;
2014      TREE_PUBLIC (fn) = 1;
2015      DECL_ARTIFICIAL (fn) = 1;
2016      TREE_NOTHROW (fn) = 1;
2017
2018      block_move_fn = fn;
2019    }
2020
2021  if (for_call && !emitted_extern)
2022    {
2023      emitted_extern = true;
2024      make_decl_rtl (fn, NULL);
2025      assemble_external (fn);
2026    }
2027
2028  return fn;
2029}
2030
2031/* A subroutine of emit_block_move.  Copy the data via an explicit
2032   loop.  This is used only when libcalls are forbidden.  */
2033/* ??? It'd be nice to copy in hunks larger than QImode.  */
2034
2035static void
2036emit_block_move_via_loop (x, y, size, align)
2037     rtx x, y, size;
2038     unsigned int align ATTRIBUTE_UNUSED;
2039{
2040  rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2041  enum machine_mode iter_mode;
2042
2043  iter_mode = GET_MODE (size);
2044  if (iter_mode == VOIDmode)
2045    iter_mode = word_mode;
2046
2047  top_label = gen_label_rtx ();
2048  cmp_label = gen_label_rtx ();
2049  iter = gen_reg_rtx (iter_mode);
2050
2051  emit_move_insn (iter, const0_rtx);
2052
2053  x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2054  y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2055  do_pending_stack_adjust ();
2056
2057  emit_note (NULL, NOTE_INSN_LOOP_BEG);
2058
2059  emit_jump (cmp_label);
2060  emit_label (top_label);
2061
2062  tmp = convert_modes (Pmode, iter_mode, iter, true);
2063  x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2064  y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2065  x = change_address (x, QImode, x_addr);
2066  y = change_address (y, QImode, y_addr);
2067
2068  emit_move_insn (x, y);
2069
2070  tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2071			     true, OPTAB_LIB_WIDEN);
2072  if (tmp != iter)
2073    emit_move_insn (iter, tmp);
2074
2075  emit_note (NULL, NOTE_INSN_LOOP_CONT);
2076  emit_label (cmp_label);
2077
2078  emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2079			   true, top_label);
2080
2081  emit_note (NULL, NOTE_INSN_LOOP_END);
2082}
2083
2084/* Copy all or part of a value X into registers starting at REGNO.
2085   The number of registers to be filled is NREGS.  */
2086
2087void
2088move_block_to_reg (regno, x, nregs, mode)
2089     int regno;
2090     rtx x;
2091     int nregs;
2092     enum machine_mode mode;
2093{
2094  int i;
2095#ifdef HAVE_load_multiple
2096  rtx pat;
2097  rtx last;
2098#endif
2099
2100  if (nregs == 0)
2101    return;
2102
2103  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2104    x = validize_mem (force_const_mem (mode, x));
2105
2106  /* See if the machine can do this with a load multiple insn.  */
2107#ifdef HAVE_load_multiple
2108  if (HAVE_load_multiple)
2109    {
2110      last = get_last_insn ();
2111      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2112			       GEN_INT (nregs));
2113      if (pat)
2114	{
2115	  emit_insn (pat);
2116	  return;
2117	}
2118      else
2119	delete_insns_since (last);
2120    }
2121#endif
2122
2123  for (i = 0; i < nregs; i++)
2124    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2125		    operand_subword_force (x, i, mode));
2126}
2127
2128/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2129   The number of registers to be filled is NREGS.  SIZE indicates the number
2130   of bytes in the object X.  */
2131
2132void
2133move_block_from_reg (regno, x, nregs, size)
2134     int regno;
2135     rtx x;
2136     int nregs;
2137     int size;
2138{
2139  int i;
2140#ifdef HAVE_store_multiple
2141  rtx pat;
2142  rtx last;
2143#endif
2144  enum machine_mode mode;
2145
2146  if (nregs == 0)
2147    return;
2148
2149  /* If SIZE is that of a mode no bigger than a word, just use that
2150     mode's store operation.  */
2151  if (size <= UNITS_PER_WORD
2152      && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2153    {
2154      emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2155      return;
2156    }
2157
2158  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2159     to the left before storing to memory.  Note that the previous test
2160     doesn't handle all cases (e.g. SIZE == 3).  */
2161  if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2162    {
2163      rtx tem = operand_subword (x, 0, 1, BLKmode);
2164      rtx shift;
2165
2166      if (tem == 0)
2167	abort ();
2168
2169      shift = expand_shift (LSHIFT_EXPR, word_mode,
2170			    gen_rtx_REG (word_mode, regno),
2171			    build_int_2 ((UNITS_PER_WORD - size)
2172					 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2173      emit_move_insn (tem, shift);
2174      return;
2175    }
2176
2177  /* See if the machine can do this with a store multiple insn.  */
2178#ifdef HAVE_store_multiple
2179  if (HAVE_store_multiple)
2180    {
2181      last = get_last_insn ();
2182      pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2183				GEN_INT (nregs));
2184      if (pat)
2185	{
2186	  emit_insn (pat);
2187	  return;
2188	}
2189      else
2190	delete_insns_since (last);
2191    }
2192#endif
2193
2194  for (i = 0; i < nregs; i++)
2195    {
2196      rtx tem = operand_subword (x, i, 1, BLKmode);
2197
2198      if (tem == 0)
2199	abort ();
2200
2201      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2202    }
2203}
2204
2205/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2206   ORIG, where ORIG is a non-consecutive group of registers represented by
2207   a PARALLEL.  The clone is identical to the original except in that the
2208   original set of registers is replaced by a new set of pseudo registers.
2209   The new set has the same modes as the original set.  */
2210
2211rtx
2212gen_group_rtx (orig)
2213     rtx orig;
2214{
2215  int i, length;
2216  rtx *tmps;
2217
2218  if (GET_CODE (orig) != PARALLEL)
2219    abort ();
2220
2221  length = XVECLEN (orig, 0);
2222  tmps = (rtx *) alloca (sizeof (rtx) * length);
2223
2224  /* Skip a NULL entry in first slot.  */
2225  i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2226
2227  if (i)
2228    tmps[0] = 0;
2229
2230  for (; i < length; i++)
2231    {
2232      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2233      rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2234
2235      tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2236    }
2237
2238  return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2239}
2240
2241/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2242   registers represented by a PARALLEL.  SSIZE represents the total size of
2243   block SRC in bytes, or -1 if not known.  */
2244/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2245   the balance will be in what would be the low-order memory addresses, i.e.
2246   left justified for big endian, right justified for little endian.  This
2247   happens to be true for the targets currently using this support.  If this
2248   ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2249   would be needed.  */
2250
2251void
2252emit_group_load (dst, orig_src, ssize)
2253     rtx dst, orig_src;
2254     int ssize;
2255{
2256  rtx *tmps, src;
2257  int start, i;
2258
2259  if (GET_CODE (dst) != PARALLEL)
2260    abort ();
2261
2262  /* Check for a NULL entry, used to indicate that the parameter goes
2263     both on the stack and in registers.  */
2264  if (XEXP (XVECEXP (dst, 0, 0), 0))
2265    start = 0;
2266  else
2267    start = 1;
2268
2269  tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2270
2271  /* Process the pieces.  */
2272  for (i = start; i < XVECLEN (dst, 0); i++)
2273    {
2274      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2275      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2276      unsigned int bytelen = GET_MODE_SIZE (mode);
2277      int shift = 0;
2278
2279      /* Handle trailing fragments that run over the size of the struct.  */
2280      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2281	{
2282	  shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2283	  bytelen = ssize - bytepos;
2284	  if (bytelen <= 0)
2285	    abort ();
2286	}
2287
2288      /* If we won't be loading directly from memory, protect the real source
2289	 from strange tricks we might play; but make sure that the source can
2290	 be loaded directly into the destination.  */
2291      src = orig_src;
2292      if (GET_CODE (orig_src) != MEM
2293	  && (!CONSTANT_P (orig_src)
2294	      || (GET_MODE (orig_src) != mode
2295		  && GET_MODE (orig_src) != VOIDmode)))
2296	{
2297	  if (GET_MODE (orig_src) == VOIDmode)
2298	    src = gen_reg_rtx (mode);
2299	  else
2300	    src = gen_reg_rtx (GET_MODE (orig_src));
2301
2302	  emit_move_insn (src, orig_src);
2303	}
2304
2305      /* Optimize the access just a bit.  */
2306      if (GET_CODE (src) == MEM
2307	  && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2308	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2309	  && bytelen == GET_MODE_SIZE (mode))
2310	{
2311	  tmps[i] = gen_reg_rtx (mode);
2312	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2313	}
2314      else if (GET_CODE (src) == CONCAT)
2315	{
2316	  unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2317	  unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2318
2319	  if ((bytepos == 0 && bytelen == slen0)
2320	      || (bytepos != 0 && bytepos + bytelen <= slen))
2321	    {
2322	      /* The following assumes that the concatenated objects all
2323		 have the same size.  In this case, a simple calculation
2324		 can be used to determine the object and the bit field
2325		 to be extracted.  */
2326	      tmps[i] = XEXP (src, bytepos / slen0);
2327	      if (! CONSTANT_P (tmps[i])
2328		  && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2329		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2330					     (bytepos % slen0) * BITS_PER_UNIT,
2331					     1, NULL_RTX, mode, mode, ssize);
2332	    }
2333	  else if (bytepos == 0)
2334	    {
2335	      rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2336	      emit_move_insn (mem, src);
2337	      tmps[i] = adjust_address (mem, mode, 0);
2338	    }
2339	  else
2340	    abort ();
2341	}
2342      else if (CONSTANT_P (src)
2343	       || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2344	tmps[i] = src;
2345      else
2346	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2347				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2348				     mode, mode, ssize);
2349
2350      if (BYTES_BIG_ENDIAN && shift)
2351	expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2352		      tmps[i], 0, OPTAB_WIDEN);
2353    }
2354
2355  emit_queue ();
2356
2357  /* Copy the extracted pieces into the proper (probable) hard regs.  */
2358  for (i = start; i < XVECLEN (dst, 0); i++)
2359    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2360}
2361
2362/* Emit code to move a block SRC to block DST, where SRC and DST are
2363   non-consecutive groups of registers, each represented by a PARALLEL.  */
2364
2365void
2366emit_group_move (dst, src)
2367     rtx dst, src;
2368{
2369  int i;
2370
2371  if (GET_CODE (src) != PARALLEL
2372      || GET_CODE (dst) != PARALLEL
2373      || XVECLEN (src, 0) != XVECLEN (dst, 0))
2374    abort ();
2375
2376  /* Skip first entry if NULL.  */
2377  for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2378    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2379		    XEXP (XVECEXP (src, 0, i), 0));
2380}
2381
2382/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2383   registers represented by a PARALLEL.  SSIZE represents the total size of
2384   block DST, or -1 if not known.  */
2385
2386void
2387emit_group_store (orig_dst, src, ssize)
2388     rtx orig_dst, src;
2389     int ssize;
2390{
2391  rtx *tmps, dst;
2392  int start, i;
2393
2394  if (GET_CODE (src) != PARALLEL)
2395    abort ();
2396
2397  /* Check for a NULL entry, used to indicate that the parameter goes
2398     both on the stack and in registers.  */
2399  if (XEXP (XVECEXP (src, 0, 0), 0))
2400    start = 0;
2401  else
2402    start = 1;
2403
2404  tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2405
2406  /* Copy the (probable) hard regs into pseudos.  */
2407  for (i = start; i < XVECLEN (src, 0); i++)
2408    {
2409      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2410      tmps[i] = gen_reg_rtx (GET_MODE (reg));
2411      emit_move_insn (tmps[i], reg);
2412    }
2413  emit_queue ();
2414
2415  /* If we won't be storing directly into memory, protect the real destination
2416     from strange tricks we might play.  */
2417  dst = orig_dst;
2418  if (GET_CODE (dst) == PARALLEL)
2419    {
2420      rtx temp;
2421
2422      /* We can get a PARALLEL dst if there is a conditional expression in
2423	 a return statement.  In that case, the dst and src are the same,
2424	 so no action is necessary.  */
2425      if (rtx_equal_p (dst, src))
2426	return;
2427
2428      /* It is unclear if we can ever reach here, but we may as well handle
2429	 it.  Allocate a temporary, and split this into a store/load to/from
2430	 the temporary.  */
2431
2432      temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2433      emit_group_store (temp, src, ssize);
2434      emit_group_load (dst, temp, ssize);
2435      return;
2436    }
2437  else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2438    {
2439      dst = gen_reg_rtx (GET_MODE (orig_dst));
2440      /* Make life a bit easier for combine.  */
2441      emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2442    }
2443
2444  /* Process the pieces.  */
2445  for (i = start; i < XVECLEN (src, 0); i++)
2446    {
2447      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2448      enum machine_mode mode = GET_MODE (tmps[i]);
2449      unsigned int bytelen = GET_MODE_SIZE (mode);
2450      rtx dest = dst;
2451
2452      /* Handle trailing fragments that run over the size of the struct.  */
2453      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2454	{
2455	  if (BYTES_BIG_ENDIAN)
2456	    {
2457	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2458	      expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2459			    tmps[i], 0, OPTAB_WIDEN);
2460	    }
2461	  bytelen = ssize - bytepos;
2462	}
2463
2464      if (GET_CODE (dst) == CONCAT)
2465	{
2466	  if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2467	    dest = XEXP (dst, 0);
2468	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2469	    {
2470	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2471	      dest = XEXP (dst, 1);
2472	    }
2473	  else if (bytepos == 0 && XVECLEN (src, 0))
2474	    {
2475	      dest = assign_stack_temp (GET_MODE (dest),
2476				        GET_MODE_SIZE (GET_MODE (dest)), 0);
2477	      emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2478			      tmps[i]);
2479	      dst = dest;
2480	      break;
2481	    }
2482	  else
2483	    abort ();
2484	}
2485
2486      /* Optimize the access just a bit.  */
2487      if (GET_CODE (dest) == MEM
2488	  && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2489	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2490	  && bytelen == GET_MODE_SIZE (mode))
2491	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2492      else
2493	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2494			 mode, tmps[i], ssize);
2495    }
2496
2497  emit_queue ();
2498
2499  /* Copy from the pseudo into the (probable) hard reg.  */
2500  if (orig_dst != dst)
2501    emit_move_insn (orig_dst, dst);
2502}
2503
2504/* Generate code to copy a BLKmode object of TYPE out of a
2505   set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2506   is null, a stack temporary is created.  TGTBLK is returned.
2507
2508   The primary purpose of this routine is to handle functions
2509   that return BLKmode structures in registers.  Some machines
2510   (the PA for example) want to return all small structures
2511   in registers regardless of the structure's alignment.  */
2512
2513rtx
2514copy_blkmode_from_reg (tgtblk, srcreg, type)
2515     rtx tgtblk;
2516     rtx srcreg;
2517     tree type;
2518{
2519  unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2520  rtx src = NULL, dst = NULL;
2521  unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2522  unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2523
2524  if (tgtblk == 0)
2525    {
2526      tgtblk = assign_temp (build_qualified_type (type,
2527						  (TYPE_QUALS (type)
2528						   | TYPE_QUAL_CONST)),
2529			    0, 1, 1);
2530      preserve_temp_slots (tgtblk);
2531    }
2532
2533  /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2534     into a new pseudo which is a full word.  */
2535
2536  if (GET_MODE (srcreg) != BLKmode
2537      && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2538    srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2539
2540  /* Structures whose size is not a multiple of a word are aligned
2541     to the least significant byte (to the right).  On a BYTES_BIG_ENDIAN
2542     machine, this means we must skip the empty high order bytes when
2543     calculating the bit offset.  */
2544  if (BYTES_BIG_ENDIAN
2545      && bytes % UNITS_PER_WORD)
2546    big_endian_correction
2547      = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2548
2549  /* Copy the structure BITSIZE bites at a time.
2550
2551     We could probably emit more efficient code for machines which do not use
2552     strict alignment, but it doesn't seem worth the effort at the current
2553     time.  */
2554  for (bitpos = 0, xbitpos = big_endian_correction;
2555       bitpos < bytes * BITS_PER_UNIT;
2556       bitpos += bitsize, xbitpos += bitsize)
2557    {
2558      /* We need a new source operand each time xbitpos is on a
2559	 word boundary and when xbitpos == big_endian_correction
2560	 (the first time through).  */
2561      if (xbitpos % BITS_PER_WORD == 0
2562	  || xbitpos == big_endian_correction)
2563	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2564				     GET_MODE (srcreg));
2565
2566      /* We need a new destination operand each time bitpos is on
2567	 a word boundary.  */
2568      if (bitpos % BITS_PER_WORD == 0)
2569	dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2570
2571      /* Use xbitpos for the source extraction (right justified) and
2572	 xbitpos for the destination store (left justified).  */
2573      store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2574		       extract_bit_field (src, bitsize,
2575					  xbitpos % BITS_PER_WORD, 1,
2576					  NULL_RTX, word_mode, word_mode,
2577					  BITS_PER_WORD),
2578		       BITS_PER_WORD);
2579    }
2580
2581  return tgtblk;
2582}
2583
2584/* Add a USE expression for REG to the (possibly empty) list pointed
2585   to by CALL_FUSAGE.  REG must denote a hard register.  */
2586
2587void
2588use_reg (call_fusage, reg)
2589     rtx *call_fusage, reg;
2590{
2591  if (GET_CODE (reg) != REG
2592      || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2593    abort ();
2594
2595  *call_fusage
2596    = gen_rtx_EXPR_LIST (VOIDmode,
2597			 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2598}
2599
2600/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2601   starting at REGNO.  All of these registers must be hard registers.  */
2602
2603void
2604use_regs (call_fusage, regno, nregs)
2605     rtx *call_fusage;
2606     int regno;
2607     int nregs;
2608{
2609  int i;
2610
2611  if (regno + nregs > FIRST_PSEUDO_REGISTER)
2612    abort ();
2613
2614  for (i = 0; i < nregs; i++)
2615    use_reg (call_fusage, regno_reg_rtx[regno + i]);
2616}
2617
2618/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2619   PARALLEL REGS.  This is for calls that pass values in multiple
2620   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2621
2622void
2623use_group_regs (call_fusage, regs)
2624     rtx *call_fusage;
2625     rtx regs;
2626{
2627  int i;
2628
2629  for (i = 0; i < XVECLEN (regs, 0); i++)
2630    {
2631      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2632
2633      /* A NULL entry means the parameter goes both on the stack and in
2634	 registers.  This can also be a MEM for targets that pass values
2635	 partially on the stack and partially in registers.  */
2636      if (reg != 0 && GET_CODE (reg) == REG)
2637	use_reg (call_fusage, reg);
2638    }
2639}
2640
2641
2642/* Determine whether the LEN bytes generated by CONSTFUN can be
2643   stored to memory using several move instructions.  CONSTFUNDATA is
2644   a pointer which will be passed as argument in every CONSTFUN call.
2645   ALIGN is maximum alignment we can assume.  Return nonzero if a
2646   call to store_by_pieces should succeed.  */
2647
2648int
2649can_store_by_pieces (len, constfun, constfundata, align)
2650     unsigned HOST_WIDE_INT len;
2651     rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2652     PTR constfundata;
2653     unsigned int align;
2654{
2655  unsigned HOST_WIDE_INT max_size, l;
2656  HOST_WIDE_INT offset = 0;
2657  enum machine_mode mode, tmode;
2658  enum insn_code icode;
2659  int reverse;
2660  rtx cst;
2661
2662  if (! MOVE_BY_PIECES_P (len, align))
2663    return 0;
2664
2665  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2666      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2667    align = MOVE_MAX * BITS_PER_UNIT;
2668
2669  /* We would first store what we can in the largest integer mode, then go to
2670     successively smaller modes.  */
2671
2672  for (reverse = 0;
2673       reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2674       reverse++)
2675    {
2676      l = len;
2677      mode = VOIDmode;
2678      max_size = STORE_MAX_PIECES + 1;
2679      while (max_size > 1)
2680	{
2681	  for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2682	       tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2683	    if (GET_MODE_SIZE (tmode) < max_size)
2684	      mode = tmode;
2685
2686	  if (mode == VOIDmode)
2687	    break;
2688
2689	  icode = mov_optab->handlers[(int) mode].insn_code;
2690	  if (icode != CODE_FOR_nothing
2691	      && align >= GET_MODE_ALIGNMENT (mode))
2692	    {
2693	      unsigned int size = GET_MODE_SIZE (mode);
2694
2695	      while (l >= size)
2696		{
2697		  if (reverse)
2698		    offset -= size;
2699
2700		  cst = (*constfun) (constfundata, offset, mode);
2701		  if (!LEGITIMATE_CONSTANT_P (cst))
2702		    return 0;
2703
2704		  if (!reverse)
2705		    offset += size;
2706
2707		  l -= size;
2708		}
2709	    }
2710
2711	  max_size = GET_MODE_SIZE (mode);
2712	}
2713
2714      /* The code above should have handled everything.  */
2715      if (l != 0)
2716	abort ();
2717    }
2718
2719  return 1;
2720}
2721
2722/* Generate several move instructions to store LEN bytes generated by
2723   CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2724   pointer which will be passed as argument in every CONSTFUN call.
2725   ALIGN is maximum alignment we can assume.  */
2726
2727void
2728store_by_pieces (to, len, constfun, constfundata, align)
2729     rtx to;
2730     unsigned HOST_WIDE_INT len;
2731     rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2732     PTR constfundata;
2733     unsigned int align;
2734{
2735  struct store_by_pieces data;
2736
2737  if (! MOVE_BY_PIECES_P (len, align))
2738    abort ();
2739  to = protect_from_queue (to, 1);
2740  data.constfun = constfun;
2741  data.constfundata = constfundata;
2742  data.len = len;
2743  data.to = to;
2744  store_by_pieces_1 (&data, align);
2745}
2746
2747/* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2748   rtx with BLKmode).  The caller must pass TO through protect_from_queue
2749   before calling. ALIGN is maximum alignment we can assume.  */
2750
2751static void
2752clear_by_pieces (to, len, align)
2753     rtx to;
2754     unsigned HOST_WIDE_INT len;
2755     unsigned int align;
2756{
2757  struct store_by_pieces data;
2758
2759  data.constfun = clear_by_pieces_1;
2760  data.constfundata = NULL;
2761  data.len = len;
2762  data.to = to;
2763  store_by_pieces_1 (&data, align);
2764}
2765
2766/* Callback routine for clear_by_pieces.
2767   Return const0_rtx unconditionally.  */
2768
2769static rtx
2770clear_by_pieces_1 (data, offset, mode)
2771     PTR data ATTRIBUTE_UNUSED;
2772     HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2773     enum machine_mode mode ATTRIBUTE_UNUSED;
2774{
2775  return const0_rtx;
2776}
2777
2778/* Subroutine of clear_by_pieces and store_by_pieces.
2779   Generate several move instructions to store LEN bytes of block TO.  (A MEM
2780   rtx with BLKmode).  The caller must pass TO through protect_from_queue
2781   before calling.  ALIGN is maximum alignment we can assume.  */
2782
2783static void
2784store_by_pieces_1 (data, align)
2785     struct store_by_pieces *data;
2786     unsigned int align;
2787{
2788  rtx to_addr = XEXP (data->to, 0);
2789  unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2790  enum machine_mode mode = VOIDmode, tmode;
2791  enum insn_code icode;
2792
2793  data->offset = 0;
2794  data->to_addr = to_addr;
2795  data->autinc_to
2796    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2797       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2798
2799  data->explicit_inc_to = 0;
2800  data->reverse
2801    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2802  if (data->reverse)
2803    data->offset = data->len;
2804
2805  /* If storing requires more than two move insns,
2806     copy addresses to registers (to make displacements shorter)
2807     and use post-increment if available.  */
2808  if (!data->autinc_to
2809      && move_by_pieces_ninsns (data->len, align) > 2)
2810    {
2811      /* Determine the main mode we'll be using.  */
2812      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2813	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2814	if (GET_MODE_SIZE (tmode) < max_size)
2815	  mode = tmode;
2816
2817      if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2818	{
2819	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2820	  data->autinc_to = 1;
2821	  data->explicit_inc_to = -1;
2822	}
2823
2824      if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2825	  && ! data->autinc_to)
2826	{
2827	  data->to_addr = copy_addr_to_reg (to_addr);
2828	  data->autinc_to = 1;
2829	  data->explicit_inc_to = 1;
2830	}
2831
2832      if ( !data->autinc_to && CONSTANT_P (to_addr))
2833	data->to_addr = copy_addr_to_reg (to_addr);
2834    }
2835
2836  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2837      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2838    align = MOVE_MAX * BITS_PER_UNIT;
2839
2840  /* First store what we can in the largest integer mode, then go to
2841     successively smaller modes.  */
2842
2843  while (max_size > 1)
2844    {
2845      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2846	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2847	if (GET_MODE_SIZE (tmode) < max_size)
2848	  mode = tmode;
2849
2850      if (mode == VOIDmode)
2851	break;
2852
2853      icode = mov_optab->handlers[(int) mode].insn_code;
2854      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2855	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2856
2857      max_size = GET_MODE_SIZE (mode);
2858    }
2859
2860  /* The code above should have handled everything.  */
2861  if (data->len != 0)
2862    abort ();
2863}
2864
2865/* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2866   with move instructions for mode MODE.  GENFUN is the gen_... function
2867   to make a move insn for that mode.  DATA has all the other info.  */
2868
2869static void
2870store_by_pieces_2 (genfun, mode, data)
2871     rtx (*genfun) PARAMS ((rtx, ...));
2872     enum machine_mode mode;
2873     struct store_by_pieces *data;
2874{
2875  unsigned int size = GET_MODE_SIZE (mode);
2876  rtx to1, cst;
2877
2878  while (data->len >= size)
2879    {
2880      if (data->reverse)
2881	data->offset -= size;
2882
2883      if (data->autinc_to)
2884	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2885					 data->offset);
2886      else
2887	to1 = adjust_address (data->to, mode, data->offset);
2888
2889      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2890	emit_insn (gen_add2_insn (data->to_addr,
2891				  GEN_INT (-(HOST_WIDE_INT) size)));
2892
2893      cst = (*data->constfun) (data->constfundata, data->offset, mode);
2894      emit_insn ((*genfun) (to1, cst));
2895
2896      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2897	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2898
2899      if (! data->reverse)
2900	data->offset += size;
2901
2902      data->len -= size;
2903    }
2904}
2905
2906/* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2907   its length in bytes.  */
2908
2909rtx
2910clear_storage (object, size)
2911     rtx object;
2912     rtx size;
2913{
2914  rtx retval = 0;
2915  unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2916			: GET_MODE_ALIGNMENT (GET_MODE (object)));
2917
2918  /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2919     just move a zero.  Otherwise, do this a piece at a time.  */
2920  if (GET_MODE (object) != BLKmode
2921      && GET_CODE (size) == CONST_INT
2922      && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2923    emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2924  else
2925    {
2926      object = protect_from_queue (object, 1);
2927      size = protect_from_queue (size, 0);
2928
2929      if (GET_CODE (size) == CONST_INT
2930	  && CLEAR_BY_PIECES_P (INTVAL (size), align))
2931	clear_by_pieces (object, INTVAL (size), align);
2932      else if (clear_storage_via_clrstr (object, size, align))
2933	;
2934      else
2935	retval = clear_storage_via_libcall (object, size);
2936    }
2937
2938  return retval;
2939}
2940
2941/* A subroutine of clear_storage.  Expand a clrstr pattern;
2942   return true if successful.  */
2943
2944static bool
2945clear_storage_via_clrstr (object, size, align)
2946     rtx object, size;
2947     unsigned int align;
2948{
2949  /* Try the most limited insn first, because there's no point
2950     including more than one in the machine description unless
2951     the more limited one has some advantage.  */
2952
2953  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2954  enum machine_mode mode;
2955
2956  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2957       mode = GET_MODE_WIDER_MODE (mode))
2958    {
2959      enum insn_code code = clrstr_optab[(int) mode];
2960      insn_operand_predicate_fn pred;
2961
2962      if (code != CODE_FOR_nothing
2963	  /* We don't need MODE to be narrower than
2964	     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2965	     the mode mask, as it is returned by the macro, it will
2966	     definitely be less than the actual mode mask.  */
2967	  && ((GET_CODE (size) == CONST_INT
2968	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2969		   <= (GET_MODE_MASK (mode) >> 1)))
2970	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2971	  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2972	      || (*pred) (object, BLKmode))
2973	  && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2974	      || (*pred) (opalign, VOIDmode)))
2975	{
2976	  rtx op1;
2977	  rtx last = get_last_insn ();
2978	  rtx pat;
2979
2980	  op1 = convert_to_mode (mode, size, 1);
2981	  pred = insn_data[(int) code].operand[1].predicate;
2982	  if (pred != 0 && ! (*pred) (op1, mode))
2983	    op1 = copy_to_mode_reg (mode, op1);
2984
2985	  pat = GEN_FCN ((int) code) (object, op1, opalign);
2986	  if (pat)
2987	    {
2988	      emit_insn (pat);
2989	      return true;
2990	    }
2991	  else
2992	    delete_insns_since (last);
2993	}
2994    }
2995
2996  return false;
2997}
2998
2999/* A subroutine of clear_storage.  Expand a call to memset or bzero.
3000   Return the return value of memset, 0 otherwise.  */
3001
3002static rtx
3003clear_storage_via_libcall (object, size)
3004     rtx object, size;
3005{
3006  tree call_expr, arg_list, fn, object_tree, size_tree;
3007  enum machine_mode size_mode;
3008  rtx retval;
3009
3010  /* OBJECT or SIZE may have been passed through protect_from_queue.
3011
3012     It is unsafe to save the value generated by protect_from_queue
3013     and reuse it later.  Consider what happens if emit_queue is
3014     called before the return value from protect_from_queue is used.
3015
3016     Expansion of the CALL_EXPR below will call emit_queue before
3017     we are finished emitting RTL for argument setup.  So if we are
3018     not careful we could get the wrong value for an argument.
3019
3020     To avoid this problem we go ahead and emit code to copy OBJECT
3021     and SIZE into new pseudos.  We can then place those new pseudos
3022     into an RTL_EXPR and use them later, even after a call to
3023     emit_queue.
3024
3025     Note this is not strictly needed for library calls since they
3026     do not call emit_queue before loading their arguments.  However,
3027     we may need to have library calls call emit_queue in the future
3028     since failing to do so could cause problems for targets which
3029     define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
3030
3031  object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3032
3033  if (TARGET_MEM_FUNCTIONS)
3034    size_mode = TYPE_MODE (sizetype);
3035  else
3036    size_mode = TYPE_MODE (unsigned_type_node);
3037  size = convert_to_mode (size_mode, size, 1);
3038  size = copy_to_mode_reg (size_mode, size);
3039
3040  /* It is incorrect to use the libcall calling conventions to call
3041     memset in this context.  This could be a user call to memset and
3042     the user may wish to examine the return value from memset.  For
3043     targets where libcalls and normal calls have different conventions
3044     for returning pointers, we could end up generating incorrect code.
3045
3046     For convenience, we generate the call to bzero this way as well.  */
3047
3048  object_tree = make_tree (ptr_type_node, object);
3049  if (TARGET_MEM_FUNCTIONS)
3050    size_tree = make_tree (sizetype, size);
3051  else
3052    size_tree = make_tree (unsigned_type_node, size);
3053
3054  fn = clear_storage_libcall_fn (true);
3055  arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3056  if (TARGET_MEM_FUNCTIONS)
3057    arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3058  arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3059
3060  /* Now we have to build up the CALL_EXPR itself.  */
3061  call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3062  call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3063		     call_expr, arg_list, NULL_TREE);
3064  TREE_SIDE_EFFECTS (call_expr) = 1;
3065
3066  retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3067
3068  /* If we are initializing a readonly value, show the above call
3069     clobbered it.  Otherwise, a load from it may erroneously be
3070     hoisted from a loop.  */
3071  if (RTX_UNCHANGING_P (object))
3072    emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3073
3074  return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3075}
3076
3077/* A subroutine of clear_storage_via_libcall.  Create the tree node
3078   for the function we use for block clears.  The first time FOR_CALL
3079   is true, we call assemble_external.  */
3080
3081static GTY(()) tree block_clear_fn;
3082
3083static tree
3084clear_storage_libcall_fn (for_call)
3085     int for_call;
3086{
3087  static bool emitted_extern;
3088  tree fn = block_clear_fn, args;
3089
3090  if (!fn)
3091    {
3092      if (TARGET_MEM_FUNCTIONS)
3093	{
3094	  fn = get_identifier ("memset");
3095	  args = build_function_type_list (ptr_type_node, ptr_type_node,
3096					   integer_type_node, sizetype,
3097					   NULL_TREE);
3098	}
3099      else
3100	{
3101	  fn = get_identifier ("bzero");
3102	  args = build_function_type_list (void_type_node, ptr_type_node,
3103					   unsigned_type_node, NULL_TREE);
3104	}
3105
3106      fn = build_decl (FUNCTION_DECL, fn, args);
3107      DECL_EXTERNAL (fn) = 1;
3108      TREE_PUBLIC (fn) = 1;
3109      DECL_ARTIFICIAL (fn) = 1;
3110      TREE_NOTHROW (fn) = 1;
3111
3112      block_clear_fn = fn;
3113    }
3114
3115  if (for_call && !emitted_extern)
3116    {
3117      emitted_extern = true;
3118      make_decl_rtl (fn, NULL);
3119      assemble_external (fn);
3120    }
3121
3122  return fn;
3123}
3124
3125/* Generate code to copy Y into X.
3126   Both Y and X must have the same mode, except that
3127   Y can be a constant with VOIDmode.
3128   This mode cannot be BLKmode; use emit_block_move for that.
3129
3130   Return the last instruction emitted.  */
3131
3132rtx
3133emit_move_insn (x, y)
3134     rtx x, y;
3135{
3136  enum machine_mode mode = GET_MODE (x);
3137  rtx y_cst = NULL_RTX;
3138  rtx last_insn;
3139
3140  x = protect_from_queue (x, 1);
3141  y = protect_from_queue (y, 0);
3142
3143  if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3144    abort ();
3145
3146  /* Never force constant_p_rtx to memory.  */
3147  if (GET_CODE (y) == CONSTANT_P_RTX)
3148    ;
3149  else if (CONSTANT_P (y))
3150    {
3151      if (optimize
3152	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3153	  && (last_insn = compress_float_constant (x, y)))
3154	return last_insn;
3155
3156      if (!LEGITIMATE_CONSTANT_P (y))
3157	{
3158	  y_cst = y;
3159	  y = force_const_mem (mode, y);
3160
3161	  /* If the target's cannot_force_const_mem prevented the spill,
3162	     assume that the target's move expanders will also take care
3163	     of the non-legitimate constant.  */
3164	  if (!y)
3165	    y = y_cst;
3166	}
3167    }
3168
3169  /* If X or Y are memory references, verify that their addresses are valid
3170     for the machine.  */
3171  if (GET_CODE (x) == MEM
3172      && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3173	   && ! push_operand (x, GET_MODE (x)))
3174	  || (flag_force_addr
3175	      && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3176    x = validize_mem (x);
3177
3178  if (GET_CODE (y) == MEM
3179      && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3180	  || (flag_force_addr
3181	      && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3182    y = validize_mem (y);
3183
3184  if (mode == BLKmode)
3185    abort ();
3186
3187  last_insn = emit_move_insn_1 (x, y);
3188
3189  if (y_cst && GET_CODE (x) == REG)
3190    set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3191
3192  return last_insn;
3193}
3194
3195/* Low level part of emit_move_insn.
3196   Called just like emit_move_insn, but assumes X and Y
3197   are basically valid.  */
3198
3199rtx
3200emit_move_insn_1 (x, y)
3201     rtx x, y;
3202{
3203  enum machine_mode mode = GET_MODE (x);
3204  enum machine_mode submode;
3205  enum mode_class class = GET_MODE_CLASS (mode);
3206
3207  if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3208    abort ();
3209
3210  if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3211    return
3212      emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3213
3214  /* Expand complex moves by moving real part and imag part, if possible.  */
3215  else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3216	   && BLKmode != (submode = GET_MODE_INNER (mode))
3217	   && (mov_optab->handlers[(int) submode].insn_code
3218	       != CODE_FOR_nothing))
3219    {
3220      /* Don't split destination if it is a stack push.  */
3221      int stack = push_operand (x, GET_MODE (x));
3222
3223#ifdef PUSH_ROUNDING
3224      /* In case we output to the stack, but the size is smaller machine can
3225	 push exactly, we need to use move instructions.  */
3226      if (stack
3227	  && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3228	      != GET_MODE_SIZE (submode)))
3229	{
3230	  rtx temp;
3231	  HOST_WIDE_INT offset1, offset2;
3232
3233	  /* Do not use anti_adjust_stack, since we don't want to update
3234	     stack_pointer_delta.  */
3235	  temp = expand_binop (Pmode,
3236#ifdef STACK_GROWS_DOWNWARD
3237			       sub_optab,
3238#else
3239			       add_optab,
3240#endif
3241			       stack_pointer_rtx,
3242			       GEN_INT
3243				 (PUSH_ROUNDING
3244				  (GET_MODE_SIZE (GET_MODE (x)))),
3245			       stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3246
3247	  if (temp != stack_pointer_rtx)
3248	    emit_move_insn (stack_pointer_rtx, temp);
3249
3250#ifdef STACK_GROWS_DOWNWARD
3251	  offset1 = 0;
3252	  offset2 = GET_MODE_SIZE (submode);
3253#else
3254	  offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3255	  offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3256		     + GET_MODE_SIZE (submode));
3257#endif
3258
3259	  emit_move_insn (change_address (x, submode,
3260					  gen_rtx_PLUS (Pmode,
3261						        stack_pointer_rtx,
3262							GEN_INT (offset1))),
3263			  gen_realpart (submode, y));
3264	  emit_move_insn (change_address (x, submode,
3265					  gen_rtx_PLUS (Pmode,
3266						        stack_pointer_rtx,
3267							GEN_INT (offset2))),
3268			  gen_imagpart (submode, y));
3269	}
3270      else
3271#endif
3272      /* If this is a stack, push the highpart first, so it
3273	 will be in the argument order.
3274
3275	 In that case, change_address is used only to convert
3276	 the mode, not to change the address.  */
3277      if (stack)
3278	{
3279	  /* Note that the real part always precedes the imag part in memory
3280	     regardless of machine's endianness.  */
3281#ifdef STACK_GROWS_DOWNWARD
3282	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3283		     (gen_rtx_MEM (submode, XEXP (x, 0)),
3284		      gen_imagpart (submode, y)));
3285	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3286		     (gen_rtx_MEM (submode, XEXP (x, 0)),
3287		      gen_realpart (submode, y)));
3288#else
3289	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3290		     (gen_rtx_MEM (submode, XEXP (x, 0)),
3291		      gen_realpart (submode, y)));
3292	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3293		     (gen_rtx_MEM (submode, XEXP (x, 0)),
3294		      gen_imagpart (submode, y)));
3295#endif
3296	}
3297      else
3298	{
3299	  rtx realpart_x, realpart_y;
3300	  rtx imagpart_x, imagpart_y;
3301
3302	  /* If this is a complex value with each part being smaller than a
3303	     word, the usual calling sequence will likely pack the pieces into
3304	     a single register.  Unfortunately, SUBREG of hard registers only
3305	     deals in terms of words, so we have a problem converting input
3306	     arguments to the CONCAT of two registers that is used elsewhere
3307	     for complex values.  If this is before reload, we can copy it into
3308	     memory and reload.  FIXME, we should see about using extract and
3309	     insert on integer registers, but complex short and complex char
3310	     variables should be rarely used.  */
3311	  if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3312	      && (reload_in_progress | reload_completed) == 0)
3313	    {
3314	      int packed_dest_p
3315		= (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3316	      int packed_src_p
3317		= (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3318
3319	      if (packed_dest_p || packed_src_p)
3320		{
3321		  enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3322					       ? MODE_FLOAT : MODE_INT);
3323
3324		  enum machine_mode reg_mode
3325		    = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3326
3327		  if (reg_mode != BLKmode)
3328		    {
3329		      rtx mem = assign_stack_temp (reg_mode,
3330						   GET_MODE_SIZE (mode), 0);
3331		      rtx cmem = adjust_address (mem, mode, 0);
3332
3333		      cfun->cannot_inline
3334			= N_("function using short complex types cannot be inline");
3335
3336		      if (packed_dest_p)
3337			{
3338			  rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3339
3340			  emit_move_insn_1 (cmem, y);
3341			  return emit_move_insn_1 (sreg, mem);
3342			}
3343		      else
3344			{
3345			  rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3346
3347			  emit_move_insn_1 (mem, sreg);
3348			  return emit_move_insn_1 (x, cmem);
3349			}
3350		    }
3351		}
3352	    }
3353
3354	  realpart_x = gen_realpart (submode, x);
3355	  realpart_y = gen_realpart (submode, y);
3356	  imagpart_x = gen_imagpart (submode, x);
3357	  imagpart_y = gen_imagpart (submode, y);
3358
3359	  /* Show the output dies here.  This is necessary for SUBREGs
3360	     of pseudos since we cannot track their lifetimes correctly;
3361	     hard regs shouldn't appear here except as return values.
3362	     We never want to emit such a clobber after reload.  */
3363	  if (x != y
3364	      && ! (reload_in_progress || reload_completed)
3365	      && (GET_CODE (realpart_x) == SUBREG
3366		  || GET_CODE (imagpart_x) == SUBREG))
3367	    emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3368
3369	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3370		     (realpart_x, realpart_y));
3371	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3372		     (imagpart_x, imagpart_y));
3373	}
3374
3375      return get_last_insn ();
3376    }
3377
3378  /* This will handle any multi-word or full-word mode that lacks a move_insn
3379     pattern.  However, you will get better code if you define such patterns,
3380     even if they must turn into multiple assembler instructions.  */
3381  else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3382    {
3383      rtx last_insn = 0;
3384      rtx seq, inner;
3385      int need_clobber;
3386      int i;
3387
3388#ifdef PUSH_ROUNDING
3389
3390      /* If X is a push on the stack, do the push now and replace
3391	 X with a reference to the stack pointer.  */
3392      if (push_operand (x, GET_MODE (x)))
3393	{
3394	  rtx temp;
3395	  enum rtx_code code;
3396
3397	  /* Do not use anti_adjust_stack, since we don't want to update
3398	     stack_pointer_delta.  */
3399	  temp = expand_binop (Pmode,
3400#ifdef STACK_GROWS_DOWNWARD
3401			       sub_optab,
3402#else
3403			       add_optab,
3404#endif
3405			       stack_pointer_rtx,
3406			       GEN_INT
3407				 (PUSH_ROUNDING
3408				  (GET_MODE_SIZE (GET_MODE (x)))),
3409			       stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3410
3411	  if (temp != stack_pointer_rtx)
3412	    emit_move_insn (stack_pointer_rtx, temp);
3413
3414	  code = GET_CODE (XEXP (x, 0));
3415
3416	  /* Just hope that small offsets off SP are OK.  */
3417	  if (code == POST_INC)
3418	    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3419				GEN_INT (-((HOST_WIDE_INT)
3420					   GET_MODE_SIZE (GET_MODE (x)))));
3421	  else if (code == POST_DEC)
3422	    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3423				GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3424	  else
3425	    temp = stack_pointer_rtx;
3426
3427	  x = change_address (x, VOIDmode, temp);
3428	}
3429#endif
3430
3431      /* If we are in reload, see if either operand is a MEM whose address
3432	 is scheduled for replacement.  */
3433      if (reload_in_progress && GET_CODE (x) == MEM
3434	  && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3435	x = replace_equiv_address_nv (x, inner);
3436      if (reload_in_progress && GET_CODE (y) == MEM
3437	  && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3438	y = replace_equiv_address_nv (y, inner);
3439
3440      start_sequence ();
3441
3442      need_clobber = 0;
3443      for (i = 0;
3444	   i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3445	   i++)
3446	{
3447	  rtx xpart = operand_subword (x, i, 1, mode);
3448	  rtx ypart = operand_subword (y, i, 1, mode);
3449
3450	  /* If we can't get a part of Y, put Y into memory if it is a
3451	     constant.  Otherwise, force it into a register.  If we still
3452	     can't get a part of Y, abort.  */
3453	  if (ypart == 0 && CONSTANT_P (y))
3454	    {
3455	      y = force_const_mem (mode, y);
3456	      ypart = operand_subword (y, i, 1, mode);
3457	    }
3458	  else if (ypart == 0)
3459	    ypart = operand_subword_force (y, i, mode);
3460
3461	  if (xpart == 0 || ypart == 0)
3462	    abort ();
3463
3464	  need_clobber |= (GET_CODE (xpart) == SUBREG);
3465
3466	  last_insn = emit_move_insn (xpart, ypart);
3467	}
3468
3469      seq = get_insns ();
3470      end_sequence ();
3471
3472      /* Show the output dies here.  This is necessary for SUBREGs
3473	 of pseudos since we cannot track their lifetimes correctly;
3474	 hard regs shouldn't appear here except as return values.
3475	 We never want to emit such a clobber after reload.  */
3476      if (x != y
3477	  && ! (reload_in_progress || reload_completed)
3478	  && need_clobber != 0)
3479	emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3480
3481      emit_insn (seq);
3482
3483      return last_insn;
3484    }
3485  else
3486    abort ();
3487}
3488
3489/* If Y is representable exactly in a narrower mode, and the target can
3490   perform the extension directly from constant or memory, then emit the
3491   move as an extension.  */
3492
3493static rtx
3494compress_float_constant (x, y)
3495     rtx x, y;
3496{
3497  enum machine_mode dstmode = GET_MODE (x);
3498  enum machine_mode orig_srcmode = GET_MODE (y);
3499  enum machine_mode srcmode;
3500  REAL_VALUE_TYPE r;
3501
3502  REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3503
3504  for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3505       srcmode != orig_srcmode;
3506       srcmode = GET_MODE_WIDER_MODE (srcmode))
3507    {
3508      enum insn_code ic;
3509      rtx trunc_y, last_insn;
3510
3511      /* Skip if the target can't extend this way.  */
3512      ic = can_extend_p (dstmode, srcmode, 0);
3513      if (ic == CODE_FOR_nothing)
3514	continue;
3515
3516      /* Skip if the narrowed value isn't exact.  */
3517      if (! exact_real_truncate (srcmode, &r))
3518	continue;
3519
3520      trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3521
3522      if (LEGITIMATE_CONSTANT_P (trunc_y))
3523	{
3524	  /* Skip if the target needs extra instructions to perform
3525	     the extension.  */
3526	  if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3527	    continue;
3528	}
3529      else if (float_extend_from_mem[dstmode][srcmode])
3530	trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3531      else
3532	continue;
3533
3534      emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3535      last_insn = get_last_insn ();
3536
3537      if (GET_CODE (x) == REG)
3538	REG_NOTES (last_insn)
3539	  = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3540
3541      return last_insn;
3542    }
3543
3544  return NULL_RTX;
3545}
3546
3547/* Pushing data onto the stack.  */
3548
3549/* Push a block of length SIZE (perhaps variable)
3550   and return an rtx to address the beginning of the block.
3551   Note that it is not possible for the value returned to be a QUEUED.
3552   The value may be virtual_outgoing_args_rtx.
3553
3554   EXTRA is the number of bytes of padding to push in addition to SIZE.
3555   BELOW nonzero means this padding comes at low addresses;
3556   otherwise, the padding comes at high addresses.  */
3557
3558rtx
3559push_block (size, extra, below)
3560     rtx size;
3561     int extra, below;
3562{
3563  rtx temp;
3564
3565  size = convert_modes (Pmode, ptr_mode, size, 1);
3566  if (CONSTANT_P (size))
3567    anti_adjust_stack (plus_constant (size, extra));
3568  else if (GET_CODE (size) == REG && extra == 0)
3569    anti_adjust_stack (size);
3570  else
3571    {
3572      temp = copy_to_mode_reg (Pmode, size);
3573      if (extra != 0)
3574	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3575			     temp, 0, OPTAB_LIB_WIDEN);
3576      anti_adjust_stack (temp);
3577    }
3578
3579#ifndef STACK_GROWS_DOWNWARD
3580  if (0)
3581#else
3582  if (1)
3583#endif
3584    {
3585      temp = virtual_outgoing_args_rtx;
3586      if (extra != 0 && below)
3587	temp = plus_constant (temp, extra);
3588    }
3589  else
3590    {
3591      if (GET_CODE (size) == CONST_INT)
3592	temp = plus_constant (virtual_outgoing_args_rtx,
3593			      -INTVAL (size) - (below ? 0 : extra));
3594      else if (extra != 0 && !below)
3595	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3596			     negate_rtx (Pmode, plus_constant (size, extra)));
3597      else
3598	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3599			     negate_rtx (Pmode, size));
3600    }
3601
3602  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3603}
3604
3605#ifdef PUSH_ROUNDING
3606
3607/* Emit single push insn.  */
3608
3609static void
3610emit_single_push_insn (mode, x, type)
3611     rtx x;
3612     enum machine_mode mode;
3613     tree type;
3614{
3615  rtx dest_addr;
3616  unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3617  rtx dest;
3618  enum insn_code icode;
3619  insn_operand_predicate_fn pred;
3620
3621  stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3622  /* If there is push pattern, use it.  Otherwise try old way of throwing
3623     MEM representing push operation to move expander.  */
3624  icode = push_optab->handlers[(int) mode].insn_code;
3625  if (icode != CODE_FOR_nothing)
3626    {
3627      if (((pred = insn_data[(int) icode].operand[0].predicate)
3628	   && !((*pred) (x, mode))))
3629	x = force_reg (mode, x);
3630      emit_insn (GEN_FCN (icode) (x));
3631      return;
3632    }
3633  if (GET_MODE_SIZE (mode) == rounded_size)
3634    dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3635  else
3636    {
3637#ifdef STACK_GROWS_DOWNWARD
3638      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3639				GEN_INT (-(HOST_WIDE_INT) rounded_size));
3640#else
3641      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3642				GEN_INT (rounded_size));
3643#endif
3644      dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3645    }
3646
3647  dest = gen_rtx_MEM (mode, dest_addr);
3648
3649  if (type != 0)
3650    {
3651      set_mem_attributes (dest, type, 1);
3652
3653      if (flag_optimize_sibling_calls)
3654	/* Function incoming arguments may overlap with sibling call
3655	   outgoing arguments and we cannot allow reordering of reads
3656	   from function arguments with stores to outgoing arguments
3657	   of sibling calls.  */
3658	set_mem_alias_set (dest, 0);
3659    }
3660  emit_move_insn (dest, x);
3661}
3662#endif
3663
3664/* Generate code to push X onto the stack, assuming it has mode MODE and
3665   type TYPE.
3666   MODE is redundant except when X is a CONST_INT (since they don't
3667   carry mode info).
3668   SIZE is an rtx for the size of data to be copied (in bytes),
3669   needed only if X is BLKmode.
3670
3671   ALIGN (in bits) is maximum alignment we can assume.
3672
3673   If PARTIAL and REG are both nonzero, then copy that many of the first
3674   words of X into registers starting with REG, and push the rest of X.
3675   The amount of space pushed is decreased by PARTIAL words,
3676   rounded *down* to a multiple of PARM_BOUNDARY.
3677   REG must be a hard register in this case.
3678   If REG is zero but PARTIAL is not, take any all others actions for an
3679   argument partially in registers, but do not actually load any
3680   registers.
3681
3682   EXTRA is the amount in bytes of extra space to leave next to this arg.
3683   This is ignored if an argument block has already been allocated.
3684
3685   On a machine that lacks real push insns, ARGS_ADDR is the address of
3686   the bottom of the argument block for this call.  We use indexing off there
3687   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
3688   argument block has not been preallocated.
3689
3690   ARGS_SO_FAR is the size of args previously pushed for this call.
3691
3692   REG_PARM_STACK_SPACE is nonzero if functions require stack space
3693   for arguments passed in registers.  If nonzero, it will be the number
3694   of bytes required.  */
3695
3696void
3697emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3698		args_addr, args_so_far, reg_parm_stack_space,
3699		alignment_pad)
3700     rtx x;
3701     enum machine_mode mode;
3702     tree type;
3703     rtx size;
3704     unsigned int align;
3705     int partial;
3706     rtx reg;
3707     int extra;
3708     rtx args_addr;
3709     rtx args_so_far;
3710     int reg_parm_stack_space;
3711     rtx alignment_pad;
3712{
3713  rtx xinner;
3714  enum direction stack_direction
3715#ifdef STACK_GROWS_DOWNWARD
3716    = downward;
3717#else
3718    = upward;
3719#endif
3720
3721  /* Decide where to pad the argument: `downward' for below,
3722     `upward' for above, or `none' for don't pad it.
3723     Default is below for small data on big-endian machines; else above.  */
3724  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3725
3726  /* Invert direction if stack is post-decrement.
3727     FIXME: why?  */
3728  if (STACK_PUSH_CODE == POST_DEC)
3729    if (where_pad != none)
3730      where_pad = (where_pad == downward ? upward : downward);
3731
3732  xinner = x = protect_from_queue (x, 0);
3733
3734  if (mode == BLKmode)
3735    {
3736      /* Copy a block into the stack, entirely or partially.  */
3737
3738      rtx temp;
3739      int used = partial * UNITS_PER_WORD;
3740      int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3741      int skip;
3742
3743      if (size == 0)
3744	abort ();
3745
3746      used -= offset;
3747
3748      /* USED is now the # of bytes we need not copy to the stack
3749	 because registers will take care of them.  */
3750
3751      if (partial != 0)
3752	xinner = adjust_address (xinner, BLKmode, used);
3753
3754      /* If the partial register-part of the arg counts in its stack size,
3755	 skip the part of stack space corresponding to the registers.
3756	 Otherwise, start copying to the beginning of the stack space,
3757	 by setting SKIP to 0.  */
3758      skip = (reg_parm_stack_space == 0) ? 0 : used;
3759
3760#ifdef PUSH_ROUNDING
3761      /* Do it with several push insns if that doesn't take lots of insns
3762	 and if there is no difficulty with push insns that skip bytes
3763	 on the stack for alignment purposes.  */
3764      if (args_addr == 0
3765	  && PUSH_ARGS
3766	  && GET_CODE (size) == CONST_INT
3767	  && skip == 0
3768	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3769	  /* Here we avoid the case of a structure whose weak alignment
3770	     forces many pushes of a small amount of data,
3771	     and such small pushes do rounding that causes trouble.  */
3772	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3773	      || align >= BIGGEST_ALIGNMENT
3774	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3775		  == (align / BITS_PER_UNIT)))
3776	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3777	{
3778	  /* Push padding now if padding above and stack grows down,
3779	     or if padding below and stack grows up.
3780	     But if space already allocated, this has already been done.  */
3781	  if (extra && args_addr == 0
3782	      && where_pad != none && where_pad != stack_direction)
3783	    anti_adjust_stack (GEN_INT (extra));
3784
3785	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3786	}
3787      else
3788#endif /* PUSH_ROUNDING  */
3789	{
3790	  rtx target;
3791
3792	  /* Otherwise make space on the stack and copy the data
3793	     to the address of that space.  */
3794
3795	  /* Deduct words put into registers from the size we must copy.  */
3796	  if (partial != 0)
3797	    {
3798	      if (GET_CODE (size) == CONST_INT)
3799		size = GEN_INT (INTVAL (size) - used);
3800	      else
3801		size = expand_binop (GET_MODE (size), sub_optab, size,
3802				     GEN_INT (used), NULL_RTX, 0,
3803				     OPTAB_LIB_WIDEN);
3804	    }
3805
3806	  /* Get the address of the stack space.
3807	     In this case, we do not deal with EXTRA separately.
3808	     A single stack adjust will do.  */
3809	  if (! args_addr)
3810	    {
3811	      temp = push_block (size, extra, where_pad == downward);
3812	      extra = 0;
3813	    }
3814	  else if (GET_CODE (args_so_far) == CONST_INT)
3815	    temp = memory_address (BLKmode,
3816				   plus_constant (args_addr,
3817						  skip + INTVAL (args_so_far)));
3818	  else
3819	    temp = memory_address (BLKmode,
3820				   plus_constant (gen_rtx_PLUS (Pmode,
3821								args_addr,
3822								args_so_far),
3823						  skip));
3824
3825	  if (!ACCUMULATE_OUTGOING_ARGS)
3826	    {
3827	      /* If the source is referenced relative to the stack pointer,
3828		 copy it to another register to stabilize it.  We do not need
3829		 to do this if we know that we won't be changing sp.  */
3830
3831	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3832		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3833		temp = copy_to_reg (temp);
3834	    }
3835
3836	  target = gen_rtx_MEM (BLKmode, temp);
3837
3838	  if (type != 0)
3839	    {
3840	      set_mem_attributes (target, type, 1);
3841	      /* Function incoming arguments may overlap with sibling call
3842		 outgoing arguments and we cannot allow reordering of reads
3843		 from function arguments with stores to outgoing arguments
3844		 of sibling calls.  */
3845	      set_mem_alias_set (target, 0);
3846	    }
3847
3848	  /* ALIGN may well be better aligned than TYPE, e.g. due to
3849	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
3850	  set_mem_align (target, align);
3851
3852	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3853	}
3854    }
3855  else if (partial > 0)
3856    {
3857      /* Scalar partly in registers.  */
3858
3859      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3860      int i;
3861      int not_stack;
3862      /* # words of start of argument
3863	 that we must make space for but need not store.  */
3864      int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3865      int args_offset = INTVAL (args_so_far);
3866      int skip;
3867
3868      /* Push padding now if padding above and stack grows down,
3869	 or if padding below and stack grows up.
3870	 But if space already allocated, this has already been done.  */
3871      if (extra && args_addr == 0
3872	  && where_pad != none && where_pad != stack_direction)
3873	anti_adjust_stack (GEN_INT (extra));
3874
3875      /* If we make space by pushing it, we might as well push
3876	 the real data.  Otherwise, we can leave OFFSET nonzero
3877	 and leave the space uninitialized.  */
3878      if (args_addr == 0)
3879	offset = 0;
3880
3881      /* Now NOT_STACK gets the number of words that we don't need to
3882	 allocate on the stack.  */
3883      not_stack = partial - offset;
3884
3885      /* If the partial register-part of the arg counts in its stack size,
3886	 skip the part of stack space corresponding to the registers.
3887	 Otherwise, start copying to the beginning of the stack space,
3888	 by setting SKIP to 0.  */
3889      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3890
3891      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3892	x = validize_mem (force_const_mem (mode, x));
3893
3894      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3895	 SUBREGs of such registers are not allowed.  */
3896      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3897	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3898	x = copy_to_reg (x);
3899
3900      /* Loop over all the words allocated on the stack for this arg.  */
3901      /* We can do it by words, because any scalar bigger than a word
3902	 has a size a multiple of a word.  */
3903#ifndef PUSH_ARGS_REVERSED
3904      for (i = not_stack; i < size; i++)
3905#else
3906      for (i = size - 1; i >= not_stack; i--)
3907#endif
3908	if (i >= not_stack + offset)
3909	  emit_push_insn (operand_subword_force (x, i, mode),
3910			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3911			  0, args_addr,
3912			  GEN_INT (args_offset + ((i - not_stack + skip)
3913						  * UNITS_PER_WORD)),
3914			  reg_parm_stack_space, alignment_pad);
3915    }
3916  else
3917    {
3918      rtx addr;
3919      rtx target = NULL_RTX;
3920      rtx dest;
3921
3922      /* Push padding now if padding above and stack grows down,
3923	 or if padding below and stack grows up.
3924	 But if space already allocated, this has already been done.  */
3925      if (extra && args_addr == 0
3926	  && where_pad != none && where_pad != stack_direction)
3927	anti_adjust_stack (GEN_INT (extra));
3928
3929#ifdef PUSH_ROUNDING
3930      if (args_addr == 0 && PUSH_ARGS)
3931	emit_single_push_insn (mode, x, type);
3932      else
3933#endif
3934	{
3935	  if (GET_CODE (args_so_far) == CONST_INT)
3936	    addr
3937	      = memory_address (mode,
3938				plus_constant (args_addr,
3939					       INTVAL (args_so_far)));
3940	  else
3941	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3942						       args_so_far));
3943	  target = addr;
3944	  dest = gen_rtx_MEM (mode, addr);
3945	  if (type != 0)
3946	    {
3947	      set_mem_attributes (dest, type, 1);
3948	      /* Function incoming arguments may overlap with sibling call
3949		 outgoing arguments and we cannot allow reordering of reads
3950		 from function arguments with stores to outgoing arguments
3951		 of sibling calls.  */
3952	      set_mem_alias_set (dest, 0);
3953	    }
3954
3955	  emit_move_insn (dest, x);
3956	}
3957    }
3958
3959  /* If part should go in registers, copy that part
3960     into the appropriate registers.  Do this now, at the end,
3961     since mem-to-mem copies above may do function calls.  */
3962  if (partial > 0 && reg != 0)
3963    {
3964      /* Handle calls that pass values in multiple non-contiguous locations.
3965	 The Irix 6 ABI has examples of this.  */
3966      if (GET_CODE (reg) == PARALLEL)
3967	emit_group_load (reg, x, -1);  /* ??? size? */
3968      else
3969	move_block_to_reg (REGNO (reg), x, partial, mode);
3970    }
3971
3972  if (extra && args_addr == 0 && where_pad == stack_direction)
3973    anti_adjust_stack (GEN_INT (extra));
3974
3975  if (alignment_pad && args_addr == 0)
3976    anti_adjust_stack (alignment_pad);
3977}
3978
3979/* Return X if X can be used as a subtarget in a sequence of arithmetic
3980   operations.  */
3981
3982static rtx
3983get_subtarget (x)
3984     rtx x;
3985{
3986  return ((x == 0
3987	   /* Only registers can be subtargets.  */
3988	   || GET_CODE (x) != REG
3989	   /* If the register is readonly, it can't be set more than once.  */
3990	   || RTX_UNCHANGING_P (x)
3991	   /* Don't use hard regs to avoid extending their life.  */
3992	   || REGNO (x) < FIRST_PSEUDO_REGISTER
3993	   /* Avoid subtargets inside loops,
3994	      since they hide some invariant expressions.  */
3995	   || preserve_subexpressions_p ())
3996	  ? 0 : x);
3997}
3998
3999/* Expand an assignment that stores the value of FROM into TO.
4000   If WANT_VALUE is nonzero, return an rtx for the value of TO.
4001   (This may contain a QUEUED rtx;
4002   if the value is constant, this rtx is a constant.)
4003   Otherwise, the returned value is NULL_RTX.
4004
4005   SUGGEST_REG is no longer actually used.
4006   It used to mean, copy the value through a register
4007   and return that register, if that is possible.
4008   We now use WANT_VALUE to decide whether to do this.  */
4009
4010rtx
4011expand_assignment (to, from, want_value, suggest_reg)
4012     tree to, from;
4013     int want_value;
4014     int suggest_reg ATTRIBUTE_UNUSED;
4015{
4016  rtx to_rtx = 0;
4017  rtx result;
4018
4019  /* Don't crash if the lhs of the assignment was erroneous.  */
4020
4021  if (TREE_CODE (to) == ERROR_MARK)
4022    {
4023      result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4024      return want_value ? result : NULL_RTX;
4025    }
4026
4027  /* Assignment of a structure component needs special treatment
4028     if the structure component's rtx is not simply a MEM.
4029     Assignment of an array element at a constant index, and assignment of
4030     an array element in an unaligned packed structure field, has the same
4031     problem.  */
4032
4033  if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4034      || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4035      || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4036    {
4037      enum machine_mode mode1;
4038      HOST_WIDE_INT bitsize, bitpos;
4039      rtx orig_to_rtx;
4040      tree offset;
4041      int unsignedp;
4042      int volatilep = 0;
4043      tree tem;
4044
4045      push_temp_slots ();
4046      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4047				 &unsignedp, &volatilep);
4048
4049      /* If we are going to use store_bit_field and extract_bit_field,
4050	 make sure to_rtx will be safe for multiple use.  */
4051
4052      if (mode1 == VOIDmode && want_value)
4053	tem = stabilize_reference (tem);
4054
4055      orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4056
4057      if (offset != 0)
4058	{
4059	  rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4060
4061	  if (GET_CODE (to_rtx) != MEM)
4062	    abort ();
4063
4064#ifdef POINTERS_EXTEND_UNSIGNED
4065	  if (GET_MODE (offset_rtx) != Pmode)
4066	    offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4067#else
4068	  if (GET_MODE (offset_rtx) != ptr_mode)
4069	    offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4070#endif
4071
4072	  /* A constant address in TO_RTX can have VOIDmode, we must not try
4073	     to call force_reg for that case.  Avoid that case.  */
4074	  if (GET_CODE (to_rtx) == MEM
4075	      && GET_MODE (to_rtx) == BLKmode
4076	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4077	      && bitsize > 0
4078	      && (bitpos % bitsize) == 0
4079	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4080	      && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4081	    {
4082	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4083	      bitpos = 0;
4084	    }
4085
4086	  to_rtx = offset_address (to_rtx, offset_rtx,
4087				   highest_pow2_factor_for_type (TREE_TYPE (to),
4088								 offset));
4089	}
4090
4091      if (GET_CODE (to_rtx) == MEM)
4092	{
4093	  /* If the field is at offset zero, we could have been given the
4094	     DECL_RTX of the parent struct.  Don't munge it.  */
4095	  to_rtx = shallow_copy_rtx (to_rtx);
4096
4097	  set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4098	}
4099
4100      /* Deal with volatile and readonly fields.  The former is only done
4101	 for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
4102      if (volatilep && GET_CODE (to_rtx) == MEM)
4103	{
4104	  if (to_rtx == orig_to_rtx)
4105	    to_rtx = copy_rtx (to_rtx);
4106	  MEM_VOLATILE_P (to_rtx) = 1;
4107	}
4108
4109      if (TREE_CODE (to) == COMPONENT_REF
4110	  && TREE_READONLY (TREE_OPERAND (to, 1)))
4111	{
4112	  if (to_rtx == orig_to_rtx)
4113	    to_rtx = copy_rtx (to_rtx);
4114	  RTX_UNCHANGING_P (to_rtx) = 1;
4115	}
4116
4117      if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4118	{
4119	  if (to_rtx == orig_to_rtx)
4120	    to_rtx = copy_rtx (to_rtx);
4121	  MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4122	}
4123
4124      result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4125			    (want_value
4126			     /* Spurious cast for HPUX compiler.  */
4127			     ? ((enum machine_mode)
4128				TYPE_MODE (TREE_TYPE (to)))
4129			     : VOIDmode),
4130			    unsignedp, TREE_TYPE (tem), get_alias_set (to));
4131
4132      preserve_temp_slots (result);
4133      free_temp_slots ();
4134      pop_temp_slots ();
4135
4136      /* If the value is meaningful, convert RESULT to the proper mode.
4137	 Otherwise, return nothing.  */
4138      return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4139					  TYPE_MODE (TREE_TYPE (from)),
4140					  result,
4141					  TREE_UNSIGNED (TREE_TYPE (to)))
4142	      : NULL_RTX);
4143    }
4144
4145  /* If the rhs is a function call and its value is not an aggregate,
4146     call the function before we start to compute the lhs.
4147     This is needed for correct code for cases such as
4148     val = setjmp (buf) on machines where reference to val
4149     requires loading up part of an address in a separate insn.
4150
4151     Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4152     since it might be a promoted variable where the zero- or sign- extension
4153     needs to be done.  Handling this in the normal way is safe because no
4154     computation is done before the call.  */
4155  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4156      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4157      && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4158	    && GET_CODE (DECL_RTL (to)) == REG))
4159    {
4160      rtx value;
4161
4162      push_temp_slots ();
4163      value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4164      if (to_rtx == 0)
4165	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4166
4167      /* Handle calls that return values in multiple non-contiguous locations.
4168	 The Irix 6 ABI has examples of this.  */
4169      if (GET_CODE (to_rtx) == PARALLEL)
4170	emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4171      else if (GET_MODE (to_rtx) == BLKmode)
4172	emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4173      else
4174	{
4175#ifdef POINTERS_EXTEND_UNSIGNED
4176	  if (POINTER_TYPE_P (TREE_TYPE (to))
4177	      && GET_MODE (to_rtx) != GET_MODE (value))
4178	    value = convert_memory_address (GET_MODE (to_rtx), value);
4179#endif
4180	  emit_move_insn (to_rtx, value);
4181	}
4182      preserve_temp_slots (to_rtx);
4183      free_temp_slots ();
4184      pop_temp_slots ();
4185      return want_value ? to_rtx : NULL_RTX;
4186    }
4187
4188  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
4189     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
4190
4191  if (to_rtx == 0)
4192    to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4193
4194  /* Don't move directly into a return register.  */
4195  if (TREE_CODE (to) == RESULT_DECL
4196      && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4197    {
4198      rtx temp;
4199
4200      push_temp_slots ();
4201      temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4202
4203      if (GET_CODE (to_rtx) == PARALLEL)
4204	emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4205      else
4206	emit_move_insn (to_rtx, temp);
4207
4208      preserve_temp_slots (to_rtx);
4209      free_temp_slots ();
4210      pop_temp_slots ();
4211      return want_value ? to_rtx : NULL_RTX;
4212    }
4213
4214  /* In case we are returning the contents of an object which overlaps
4215     the place the value is being stored, use a safe function when copying
4216     a value through a pointer into a structure value return block.  */
4217  if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4218      && current_function_returns_struct
4219      && !current_function_returns_pcc_struct)
4220    {
4221      rtx from_rtx, size;
4222
4223      push_temp_slots ();
4224      size = expr_size (from);
4225      from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4226
4227      if (TARGET_MEM_FUNCTIONS)
4228	emit_library_call (memmove_libfunc, LCT_NORMAL,
4229			   VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4230			   XEXP (from_rtx, 0), Pmode,
4231			   convert_to_mode (TYPE_MODE (sizetype),
4232					    size, TREE_UNSIGNED (sizetype)),
4233			   TYPE_MODE (sizetype));
4234      else
4235        emit_library_call (bcopy_libfunc, LCT_NORMAL,
4236			   VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4237			   XEXP (to_rtx, 0), Pmode,
4238			   convert_to_mode (TYPE_MODE (integer_type_node),
4239					    size,
4240					    TREE_UNSIGNED (integer_type_node)),
4241			   TYPE_MODE (integer_type_node));
4242
4243      preserve_temp_slots (to_rtx);
4244      free_temp_slots ();
4245      pop_temp_slots ();
4246      return want_value ? to_rtx : NULL_RTX;
4247    }
4248
4249  /* Compute FROM and store the value in the rtx we got.  */
4250
4251  push_temp_slots ();
4252  result = store_expr (from, to_rtx, want_value);
4253  preserve_temp_slots (result);
4254  free_temp_slots ();
4255  pop_temp_slots ();
4256  return want_value ? result : NULL_RTX;
4257}
4258
4259/* Generate code for computing expression EXP,
4260   and storing the value into TARGET.
4261   TARGET may contain a QUEUED rtx.
4262
4263   If WANT_VALUE & 1 is nonzero, return a copy of the value
4264   not in TARGET, so that we can be sure to use the proper
4265   value in a containing expression even if TARGET has something
4266   else stored in it.  If possible, we copy the value through a pseudo
4267   and return that pseudo.  Or, if the value is constant, we try to
4268   return the constant.  In some cases, we return a pseudo
4269   copied *from* TARGET.
4270
4271   If the mode is BLKmode then we may return TARGET itself.
4272   It turns out that in BLKmode it doesn't cause a problem.
4273   because C has no operators that could combine two different
4274   assignments into the same BLKmode object with different values
4275   with no sequence point.  Will other languages need this to
4276   be more thorough?
4277
4278   If WANT_VALUE & 1 is 0, we return NULL, to make sure
4279   to catch quickly any cases where the caller uses the value
4280   and fails to set WANT_VALUE.
4281
4282   If WANT_VALUE & 2 is set, this is a store into a call param on the
4283   stack, and block moves may need to be treated specially.  */
4284
4285rtx
4286store_expr (exp, target, want_value)
4287     tree exp;
4288     rtx target;
4289     int want_value;
4290{
4291  rtx temp;
4292  int dont_return_target = 0;
4293  int dont_store_target = 0;
4294
4295  if (VOID_TYPE_P (TREE_TYPE (exp)))
4296    {
4297      /* C++ can generate ?: expressions with a throw expression in one
4298	 branch and an rvalue in the other. Here, we resolve attempts to
4299	 store the throw expression's nonexistant result. */
4300      if (want_value)
4301	abort ();
4302      expand_expr (exp, const0_rtx, VOIDmode, 0);
4303      return NULL_RTX;
4304    }
4305  if (TREE_CODE (exp) == COMPOUND_EXPR)
4306    {
4307      /* Perform first part of compound expression, then assign from second
4308	 part.  */
4309      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4310		   want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4311      emit_queue ();
4312      return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4313    }
4314  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4315    {
4316      /* For conditional expression, get safe form of the target.  Then
4317	 test the condition, doing the appropriate assignment on either
4318	 side.  This avoids the creation of unnecessary temporaries.
4319	 For non-BLKmode, it is more efficient not to do this.  */
4320
4321      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4322
4323      emit_queue ();
4324      target = protect_from_queue (target, 1);
4325
4326      do_pending_stack_adjust ();
4327      NO_DEFER_POP;
4328      jumpifnot (TREE_OPERAND (exp, 0), lab1);
4329      start_cleanup_deferral ();
4330      store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4331      end_cleanup_deferral ();
4332      emit_queue ();
4333      emit_jump_insn (gen_jump (lab2));
4334      emit_barrier ();
4335      emit_label (lab1);
4336      start_cleanup_deferral ();
4337      store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4338      end_cleanup_deferral ();
4339      emit_queue ();
4340      emit_label (lab2);
4341      OK_DEFER_POP;
4342
4343      return want_value & 1 ? target : NULL_RTX;
4344    }
4345  else if (queued_subexp_p (target))
4346    /* If target contains a postincrement, let's not risk
4347       using it as the place to generate the rhs.  */
4348    {
4349      if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4350	{
4351	  /* Expand EXP into a new pseudo.  */
4352	  temp = gen_reg_rtx (GET_MODE (target));
4353	  temp = expand_expr (exp, temp, GET_MODE (target),
4354			      (want_value & 2
4355			       ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4356	}
4357      else
4358	temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4359			    (want_value & 2
4360			     ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4361
4362      /* If target is volatile, ANSI requires accessing the value
4363	 *from* the target, if it is accessed.  So make that happen.
4364	 In no case return the target itself.  */
4365      if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4366	dont_return_target = 1;
4367    }
4368  else if ((want_value & 1) != 0
4369	   && GET_CODE (target) == MEM
4370	   && ! MEM_VOLATILE_P (target)
4371	   && GET_MODE (target) != BLKmode)
4372    /* If target is in memory and caller wants value in a register instead,
4373       arrange that.  Pass TARGET as target for expand_expr so that,
4374       if EXP is another assignment, WANT_VALUE will be nonzero for it.
4375       We know expand_expr will not use the target in that case.
4376       Don't do this if TARGET is volatile because we are supposed
4377       to write it and then read it.  */
4378    {
4379      temp = expand_expr (exp, target, GET_MODE (target),
4380			  want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4381      if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4382	{
4383	  /* If TEMP is already in the desired TARGET, only copy it from
4384	     memory and don't store it there again.  */
4385	  if (temp == target
4386	      || (rtx_equal_p (temp, target)
4387		  && ! side_effects_p (temp) && ! side_effects_p (target)))
4388	    dont_store_target = 1;
4389	  temp = copy_to_reg (temp);
4390	}
4391      dont_return_target = 1;
4392    }
4393  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4394    /* If this is a scalar in a register that is stored in a wider mode
4395       than the declared mode, compute the result into its declared mode
4396       and then convert to the wider mode.  Our value is the computed
4397       expression.  */
4398    {
4399      rtx inner_target = 0;
4400
4401      /* If we don't want a value, we can do the conversion inside EXP,
4402	 which will often result in some optimizations.  Do the conversion
4403	 in two steps: first change the signedness, if needed, then
4404	 the extend.  But don't do this if the type of EXP is a subtype
4405	 of something else since then the conversion might involve
4406	 more than just converting modes.  */
4407      if ((want_value & 1) == 0
4408	  && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4409	  && TREE_TYPE (TREE_TYPE (exp)) == 0)
4410	{
4411	  if (TREE_UNSIGNED (TREE_TYPE (exp))
4412	      != SUBREG_PROMOTED_UNSIGNED_P (target))
4413	    exp = convert
4414	      ((*lang_hooks.types.signed_or_unsigned_type)
4415	       (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4416
4417	  exp = convert ((*lang_hooks.types.type_for_mode)
4418			 (GET_MODE (SUBREG_REG (target)),
4419			  SUBREG_PROMOTED_UNSIGNED_P (target)),
4420			 exp);
4421
4422	  inner_target = SUBREG_REG (target);
4423	}
4424
4425      temp = expand_expr (exp, inner_target, VOIDmode,
4426			  want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4427
4428      /* If TEMP is a volatile MEM and we want a result value, make
4429	 the access now so it gets done only once.  Likewise if
4430	 it contains TARGET.  */
4431      if (GET_CODE (temp) == MEM && (want_value & 1) != 0
4432	  && (MEM_VOLATILE_P (temp)
4433	      || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4434	temp = copy_to_reg (temp);
4435
4436      /* If TEMP is a VOIDmode constant, use convert_modes to make
4437	 sure that we properly convert it.  */
4438      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4439	{
4440	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4441				temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4442	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4443			        GET_MODE (target), temp,
4444			        SUBREG_PROMOTED_UNSIGNED_P (target));
4445	}
4446
4447      convert_move (SUBREG_REG (target), temp,
4448		    SUBREG_PROMOTED_UNSIGNED_P (target));
4449
4450      /* If we promoted a constant, change the mode back down to match
4451	 target.  Otherwise, the caller might get confused by a result whose
4452	 mode is larger than expected.  */
4453
4454      if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4455	{
4456	  if (GET_MODE (temp) != VOIDmode)
4457	    {
4458	      temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4459	      SUBREG_PROMOTED_VAR_P (temp) = 1;
4460	      SUBREG_PROMOTED_UNSIGNED_SET (temp,
4461		SUBREG_PROMOTED_UNSIGNED_P (target));
4462	    }
4463	  else
4464	    temp = convert_modes (GET_MODE (target),
4465				  GET_MODE (SUBREG_REG (target)),
4466				  temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4467	}
4468
4469      return want_value & 1 ? temp : NULL_RTX;
4470    }
4471  else
4472    {
4473      temp = expand_expr (exp, target, GET_MODE (target),
4474			  want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4475      /* Return TARGET if it's a specified hardware register.
4476	 If TARGET is a volatile mem ref, either return TARGET
4477	 or return a reg copied *from* TARGET; ANSI requires this.
4478
4479	 Otherwise, if TEMP is not TARGET, return TEMP
4480	 if it is constant (for efficiency),
4481	 or if we really want the correct value.  */
4482      if (!(target && GET_CODE (target) == REG
4483	    && REGNO (target) < FIRST_PSEUDO_REGISTER)
4484	  && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4485	  && ! rtx_equal_p (temp, target)
4486	  && (CONSTANT_P (temp) || (want_value & 1) != 0))
4487	dont_return_target = 1;
4488    }
4489
4490  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4491     the same as that of TARGET, adjust the constant.  This is needed, for
4492     example, in case it is a CONST_DOUBLE and we want only a word-sized
4493     value.  */
4494  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4495      && TREE_CODE (exp) != ERROR_MARK
4496      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4497    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4498			  temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4499
4500  /* If value was not generated in the target, store it there.
4501     Convert the value to TARGET's type first if necessary.
4502     If TEMP and TARGET compare equal according to rtx_equal_p, but
4503     one or both of them are volatile memory refs, we have to distinguish
4504     two cases:
4505     - expand_expr has used TARGET.  In this case, we must not generate
4506       another copy.  This can be detected by TARGET being equal according
4507       to == .
4508     - expand_expr has not used TARGET - that means that the source just
4509       happens to have the same RTX form.  Since temp will have been created
4510       by expand_expr, it will compare unequal according to == .
4511       We must generate a copy in this case, to reach the correct number
4512       of volatile memory references.  */
4513
4514  if ((! rtx_equal_p (temp, target)
4515       || (temp != target && (side_effects_p (temp)
4516			      || side_effects_p (target))))
4517      && TREE_CODE (exp) != ERROR_MARK
4518      && ! dont_store_target
4519	 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4520	    but TARGET is not valid memory reference, TEMP will differ
4521	    from TARGET although it is really the same location.  */
4522      && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4523	  || target != DECL_RTL_IF_SET (exp))
4524      /* If there's nothing to copy, don't bother.  Don't call expr_size
4525	 unless necessary, because some front-ends (C++) expr_size-hook
4526	 aborts on objects that are not supposed to be bit-copied or
4527	 bit-initialized.  */
4528      && expr_size (exp) != const0_rtx)
4529    {
4530      target = protect_from_queue (target, 1);
4531      if (GET_MODE (temp) != GET_MODE (target)
4532	  && GET_MODE (temp) != VOIDmode)
4533	{
4534	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4535	  if (dont_return_target)
4536	    {
4537	      /* In this case, we will return TEMP,
4538		 so make sure it has the proper mode.
4539		 But don't forget to store the value into TARGET.  */
4540	      temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4541	      emit_move_insn (target, temp);
4542	    }
4543	  else
4544	    convert_move (target, temp, unsignedp);
4545	}
4546
4547      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4548	{
4549	  /* Handle copying a string constant into an array.  The string
4550	     constant may be shorter than the array.  So copy just the string's
4551	     actual length, and clear the rest.  First get the size of the data
4552	     type of the string, which is actually the size of the target.  */
4553	  rtx size = expr_size (exp);
4554
4555	  if (GET_CODE (size) == CONST_INT
4556	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
4557	    emit_block_move (target, temp, size,
4558			     (want_value & 2
4559			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4560	  else
4561	    {
4562	      /* Compute the size of the data to copy from the string.  */
4563	      tree copy_size
4564		= size_binop (MIN_EXPR,
4565			      make_tree (sizetype, size),
4566			      size_int (TREE_STRING_LENGTH (exp)));
4567	      rtx copy_size_rtx
4568		= expand_expr (copy_size, NULL_RTX, VOIDmode,
4569			       (want_value & 2
4570				? EXPAND_STACK_PARM : EXPAND_NORMAL));
4571	      rtx label = 0;
4572
4573	      /* Copy that much.  */
4574	      copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4575					       TREE_UNSIGNED (sizetype));
4576	      emit_block_move (target, temp, copy_size_rtx,
4577			       (want_value & 2
4578				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4579
4580	      /* Figure out how much is left in TARGET that we have to clear.
4581		 Do all calculations in ptr_mode.  */
4582	      if (GET_CODE (copy_size_rtx) == CONST_INT)
4583		{
4584		  size = plus_constant (size, -INTVAL (copy_size_rtx));
4585		  target = adjust_address (target, BLKmode,
4586					   INTVAL (copy_size_rtx));
4587		}
4588	      else
4589		{
4590		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4591				       copy_size_rtx, NULL_RTX, 0,
4592				       OPTAB_LIB_WIDEN);
4593
4594#ifdef POINTERS_EXTEND_UNSIGNED
4595		  if (GET_MODE (copy_size_rtx) != Pmode)
4596		    copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4597						     TREE_UNSIGNED (sizetype));
4598#endif
4599
4600		  target = offset_address (target, copy_size_rtx,
4601					   highest_pow2_factor (copy_size));
4602		  label = gen_label_rtx ();
4603		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4604					   GET_MODE (size), 0, label);
4605		}
4606
4607	      if (size != const0_rtx)
4608		clear_storage (target, size);
4609
4610	      if (label)
4611		emit_label (label);
4612	    }
4613	}
4614      /* Handle calls that return values in multiple non-contiguous locations.
4615	 The Irix 6 ABI has examples of this.  */
4616      else if (GET_CODE (target) == PARALLEL)
4617	emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4618      else if (GET_MODE (temp) == BLKmode)
4619	emit_block_move (target, temp, expr_size (exp),
4620			 (want_value & 2
4621			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4622      else
4623	emit_move_insn (target, temp);
4624    }
4625
4626  /* If we don't want a value, return NULL_RTX.  */
4627  if ((want_value & 1) == 0)
4628    return NULL_RTX;
4629
4630  /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4631     ??? The latter test doesn't seem to make sense.  */
4632  else if (dont_return_target && GET_CODE (temp) != MEM)
4633    return temp;
4634
4635  /* Return TARGET itself if it is a hard register.  */
4636  else if ((want_value & 1) != 0
4637	   && GET_MODE (target) != BLKmode
4638	   && ! (GET_CODE (target) == REG
4639		 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4640    return copy_to_reg (target);
4641
4642  else
4643    return target;
4644}
4645
4646/* Return 1 if EXP just contains zeros.  */
4647
4648static int
4649is_zeros_p (exp)
4650     tree exp;
4651{
4652  tree elt;
4653
4654  switch (TREE_CODE (exp))
4655    {
4656    case CONVERT_EXPR:
4657    case NOP_EXPR:
4658    case NON_LVALUE_EXPR:
4659    case VIEW_CONVERT_EXPR:
4660      return is_zeros_p (TREE_OPERAND (exp, 0));
4661
4662    case INTEGER_CST:
4663      return integer_zerop (exp);
4664
4665    case COMPLEX_CST:
4666      return
4667	is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4668
4669    case REAL_CST:
4670      return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4671
4672    case VECTOR_CST:
4673      for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4674	   elt = TREE_CHAIN (elt))
4675	if (!is_zeros_p (TREE_VALUE (elt)))
4676	  return 0;
4677
4678      return 1;
4679
4680    case CONSTRUCTOR:
4681      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4682	return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4683      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4684	if (! is_zeros_p (TREE_VALUE (elt)))
4685	  return 0;
4686
4687      return 1;
4688
4689    default:
4690      return 0;
4691    }
4692}
4693
4694/* Return 1 if EXP contains mostly (3/4)  zeros.  */
4695
4696static int
4697mostly_zeros_p (exp)
4698     tree exp;
4699{
4700  if (TREE_CODE (exp) == CONSTRUCTOR)
4701    {
4702      int elts = 0, zeros = 0;
4703      tree elt = CONSTRUCTOR_ELTS (exp);
4704      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4705	{
4706	  /* If there are no ranges of true bits, it is all zero.  */
4707	  return elt == NULL_TREE;
4708	}
4709      for (; elt; elt = TREE_CHAIN (elt))
4710	{
4711	  /* We do not handle the case where the index is a RANGE_EXPR,
4712	     so the statistic will be somewhat inaccurate.
4713	     We do make a more accurate count in store_constructor itself,
4714	     so since this function is only used for nested array elements,
4715	     this should be close enough.  */
4716	  if (mostly_zeros_p (TREE_VALUE (elt)))
4717	    zeros++;
4718	  elts++;
4719	}
4720
4721      return 4 * zeros >= 3 * elts;
4722    }
4723
4724  return is_zeros_p (exp);
4725}
4726
4727/* Helper function for store_constructor.
4728   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4729   TYPE is the type of the CONSTRUCTOR, not the element type.
4730   CLEARED is as for store_constructor.
4731   ALIAS_SET is the alias set to use for any stores.
4732
4733   This provides a recursive shortcut back to store_constructor when it isn't
4734   necessary to go through store_field.  This is so that we can pass through
4735   the cleared field to let store_constructor know that we may not have to
4736   clear a substructure if the outer structure has already been cleared.  */
4737
4738static void
4739store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4740			 alias_set)
4741     rtx target;
4742     unsigned HOST_WIDE_INT bitsize;
4743     HOST_WIDE_INT bitpos;
4744     enum machine_mode mode;
4745     tree exp, type;
4746     int cleared;
4747     int alias_set;
4748{
4749  if (TREE_CODE (exp) == CONSTRUCTOR
4750      && bitpos % BITS_PER_UNIT == 0
4751      /* If we have a nonzero bitpos for a register target, then we just
4752	 let store_field do the bitfield handling.  This is unlikely to
4753	 generate unnecessary clear instructions anyways.  */
4754      && (bitpos == 0 || GET_CODE (target) == MEM))
4755    {
4756      if (GET_CODE (target) == MEM)
4757	target
4758	  = adjust_address (target,
4759			    GET_MODE (target) == BLKmode
4760			    || 0 != (bitpos
4761				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
4762			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4763
4764
4765      /* Update the alias set, if required.  */
4766      if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4767	  && MEM_ALIAS_SET (target) != 0)
4768	{
4769	  target = copy_rtx (target);
4770	  set_mem_alias_set (target, alias_set);
4771	}
4772
4773      store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4774    }
4775  else
4776    store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4777		 alias_set);
4778}
4779
4780/* Store the value of constructor EXP into the rtx TARGET.
4781   TARGET is either a REG or a MEM; we know it cannot conflict, since
4782   safe_from_p has been called.
4783   CLEARED is true if TARGET is known to have been zero'd.
4784   SIZE is the number of bytes of TARGET we are allowed to modify: this
4785   may not be the same as the size of EXP if we are assigning to a field
4786   which has been packed to exclude padding bits.  */
4787
4788static void
4789store_constructor (exp, target, cleared, size)
4790     tree exp;
4791     rtx target;
4792     int cleared;
4793     HOST_WIDE_INT size;
4794{
4795  tree type = TREE_TYPE (exp);
4796#ifdef WORD_REGISTER_OPERATIONS
4797  HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4798#endif
4799
4800  if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4801      || TREE_CODE (type) == QUAL_UNION_TYPE)
4802    {
4803      tree elt;
4804
4805      /* We either clear the aggregate or indicate the value is dead.  */
4806      if ((TREE_CODE (type) == UNION_TYPE
4807	   || TREE_CODE (type) == QUAL_UNION_TYPE)
4808	  && ! cleared
4809	  && ! CONSTRUCTOR_ELTS (exp))
4810	/* If the constructor is empty, clear the union.  */
4811	{
4812	  clear_storage (target, expr_size (exp));
4813	  cleared = 1;
4814	}
4815
4816      /* If we are building a static constructor into a register,
4817	 set the initial value as zero so we can fold the value into
4818	 a constant.  But if more than one register is involved,
4819	 this probably loses.  */
4820      else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4821	       && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4822	{
4823	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4824	  cleared = 1;
4825	}
4826
4827      /* If the constructor has fewer fields than the structure
4828	 or if we are initializing the structure to mostly zeros,
4829	 clear the whole structure first.  Don't do this if TARGET is a
4830	 register whose mode size isn't equal to SIZE since clear_storage
4831	 can't handle this case.  */
4832      else if (! cleared && size > 0
4833	       && ((list_length (CONSTRUCTOR_ELTS (exp))
4834		    != fields_length (type))
4835		   || mostly_zeros_p (exp))
4836	       && (GET_CODE (target) != REG
4837		   || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4838		       == size)))
4839	{
4840	  clear_storage (target, GEN_INT (size));
4841	  cleared = 1;
4842	}
4843
4844      if (! cleared)
4845	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4846
4847      /* Store each element of the constructor into
4848	 the corresponding field of TARGET.  */
4849
4850      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4851	{
4852	  tree field = TREE_PURPOSE (elt);
4853	  tree value = TREE_VALUE (elt);
4854	  enum machine_mode mode;
4855	  HOST_WIDE_INT bitsize;
4856	  HOST_WIDE_INT bitpos = 0;
4857	  int unsignedp;
4858	  tree offset;
4859	  rtx to_rtx = target;
4860
4861	  /* Just ignore missing fields.
4862	     We cleared the whole structure, above,
4863	     if any fields are missing.  */
4864	  if (field == 0)
4865	    continue;
4866
4867	  if (cleared && is_zeros_p (value))
4868	    continue;
4869
4870	  if (host_integerp (DECL_SIZE (field), 1))
4871	    bitsize = tree_low_cst (DECL_SIZE (field), 1);
4872	  else
4873	    bitsize = -1;
4874
4875	  unsignedp = TREE_UNSIGNED (field);
4876	  mode = DECL_MODE (field);
4877	  if (DECL_BIT_FIELD (field))
4878	    mode = VOIDmode;
4879
4880	  offset = DECL_FIELD_OFFSET (field);
4881	  if (host_integerp (offset, 0)
4882	      && host_integerp (bit_position (field), 0))
4883	    {
4884	      bitpos = int_bit_position (field);
4885	      offset = 0;
4886	    }
4887	  else
4888	    bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4889
4890	  if (offset)
4891	    {
4892	      rtx offset_rtx;
4893
4894	      if (contains_placeholder_p (offset))
4895		offset = build (WITH_RECORD_EXPR, sizetype,
4896				offset, make_tree (TREE_TYPE (exp), target));
4897
4898	      offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4899	      if (GET_CODE (to_rtx) != MEM)
4900		abort ();
4901
4902#ifdef POINTERS_EXTEND_UNSIGNED
4903	      if (GET_MODE (offset_rtx) != Pmode)
4904		offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4905#else
4906	      if (GET_MODE (offset_rtx) != ptr_mode)
4907		offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4908#endif
4909
4910	      to_rtx = offset_address (to_rtx, offset_rtx,
4911				       highest_pow2_factor (offset));
4912	    }
4913
4914	  if (TREE_READONLY (field))
4915	    {
4916	      if (GET_CODE (to_rtx) == MEM)
4917		to_rtx = copy_rtx (to_rtx);
4918
4919	      RTX_UNCHANGING_P (to_rtx) = 1;
4920	    }
4921
4922#ifdef WORD_REGISTER_OPERATIONS
4923	  /* If this initializes a field that is smaller than a word, at the
4924	     start of a word, try to widen it to a full word.
4925	     This special case allows us to output C++ member function
4926	     initializations in a form that the optimizers can understand.  */
4927	  if (GET_CODE (target) == REG
4928	      && bitsize < BITS_PER_WORD
4929	      && bitpos % BITS_PER_WORD == 0
4930	      && GET_MODE_CLASS (mode) == MODE_INT
4931	      && TREE_CODE (value) == INTEGER_CST
4932	      && exp_size >= 0
4933	      && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4934	    {
4935	      tree type = TREE_TYPE (value);
4936
4937	      if (TYPE_PRECISION (type) < BITS_PER_WORD)
4938		{
4939		  type = (*lang_hooks.types.type_for_size)
4940		    (BITS_PER_WORD, TREE_UNSIGNED (type));
4941		  value = convert (type, value);
4942		}
4943
4944	      if (BYTES_BIG_ENDIAN)
4945		value
4946		  = fold (build (LSHIFT_EXPR, type, value,
4947				 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4948	      bitsize = BITS_PER_WORD;
4949	      mode = word_mode;
4950	    }
4951#endif
4952
4953	  if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4954	      && DECL_NONADDRESSABLE_P (field))
4955	    {
4956	      to_rtx = copy_rtx (to_rtx);
4957	      MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4958	    }
4959
4960	  store_constructor_field (to_rtx, bitsize, bitpos, mode,
4961				   value, type, cleared,
4962				   get_alias_set (TREE_TYPE (field)));
4963	}
4964    }
4965  else if (TREE_CODE (type) == ARRAY_TYPE
4966	   || TREE_CODE (type) == VECTOR_TYPE)
4967    {
4968      tree elt;
4969      int i;
4970      int need_to_clear;
4971      tree domain = TYPE_DOMAIN (type);
4972      tree elttype = TREE_TYPE (type);
4973      int const_bounds_p;
4974      HOST_WIDE_INT minelt = 0;
4975      HOST_WIDE_INT maxelt = 0;
4976
4977      /* Vectors are like arrays, but the domain is stored via an array
4978	 type indirectly.  */
4979      if (TREE_CODE (type) == VECTOR_TYPE)
4980	{
4981	  /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4982	     the same field as TYPE_DOMAIN, we are not guaranteed that
4983	     it always will.  */
4984	  domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4985	  domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4986	}
4987
4988      const_bounds_p = (TYPE_MIN_VALUE (domain)
4989			&& TYPE_MAX_VALUE (domain)
4990			&& host_integerp (TYPE_MIN_VALUE (domain), 0)
4991			&& host_integerp (TYPE_MAX_VALUE (domain), 0));
4992
4993      /* If we have constant bounds for the range of the type, get them.  */
4994      if (const_bounds_p)
4995	{
4996	  minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4997	  maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4998	}
4999
5000      /* If the constructor has fewer elements than the array,
5001         clear the whole array first.  Similarly if this is
5002         static constructor of a non-BLKmode object.  */
5003      if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5004	need_to_clear = 1;
5005      else
5006	{
5007	  HOST_WIDE_INT count = 0, zero_count = 0;
5008	  need_to_clear = ! const_bounds_p;
5009
5010	  /* This loop is a more accurate version of the loop in
5011	     mostly_zeros_p (it handles RANGE_EXPR in an index).
5012	     It is also needed to check for missing elements.  */
5013	  for (elt = CONSTRUCTOR_ELTS (exp);
5014	       elt != NULL_TREE && ! need_to_clear;
5015	       elt = TREE_CHAIN (elt))
5016	    {
5017	      tree index = TREE_PURPOSE (elt);
5018	      HOST_WIDE_INT this_node_count;
5019
5020	      if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5021		{
5022		  tree lo_index = TREE_OPERAND (index, 0);
5023		  tree hi_index = TREE_OPERAND (index, 1);
5024
5025		  if (! host_integerp (lo_index, 1)
5026		      || ! host_integerp (hi_index, 1))
5027		    {
5028		      need_to_clear = 1;
5029		      break;
5030		    }
5031
5032		  this_node_count = (tree_low_cst (hi_index, 1)
5033				     - tree_low_cst (lo_index, 1) + 1);
5034		}
5035	      else
5036		this_node_count = 1;
5037
5038	      count += this_node_count;
5039	      if (mostly_zeros_p (TREE_VALUE (elt)))
5040		zero_count += this_node_count;
5041	    }
5042
5043	  /* Clear the entire array first if there are any missing elements,
5044	     or if the incidence of zero elements is >= 75%.  */
5045	  if (! need_to_clear
5046	      && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5047	    need_to_clear = 1;
5048	}
5049
5050      if (need_to_clear && size > 0)
5051	{
5052	  if (! cleared)
5053	    {
5054	      if (REG_P (target))
5055		emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
5056	      else
5057		clear_storage (target, GEN_INT (size));
5058	    }
5059	  cleared = 1;
5060	}
5061      else if (REG_P (target))
5062	/* Inform later passes that the old value is dead.  */
5063	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5064
5065      /* Store each element of the constructor into
5066	 the corresponding element of TARGET, determined
5067	 by counting the elements.  */
5068      for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5069	   elt;
5070	   elt = TREE_CHAIN (elt), i++)
5071	{
5072	  enum machine_mode mode;
5073	  HOST_WIDE_INT bitsize;
5074	  HOST_WIDE_INT bitpos;
5075	  int unsignedp;
5076	  tree value = TREE_VALUE (elt);
5077	  tree index = TREE_PURPOSE (elt);
5078	  rtx xtarget = target;
5079
5080	  if (cleared && is_zeros_p (value))
5081	    continue;
5082
5083	  unsignedp = TREE_UNSIGNED (elttype);
5084	  mode = TYPE_MODE (elttype);
5085	  if (mode == BLKmode)
5086	    bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5087		       ? tree_low_cst (TYPE_SIZE (elttype), 1)
5088		       : -1);
5089	  else
5090	    bitsize = GET_MODE_BITSIZE (mode);
5091
5092	  if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5093	    {
5094	      tree lo_index = TREE_OPERAND (index, 0);
5095	      tree hi_index = TREE_OPERAND (index, 1);
5096	      rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
5097	      struct nesting *loop;
5098	      HOST_WIDE_INT lo, hi, count;
5099	      tree position;
5100
5101	      /* If the range is constant and "small", unroll the loop.  */
5102	      if (const_bounds_p
5103		  && host_integerp (lo_index, 0)
5104		  && host_integerp (hi_index, 0)
5105		  && (lo = tree_low_cst (lo_index, 0),
5106		      hi = tree_low_cst (hi_index, 0),
5107		      count = hi - lo + 1,
5108		      (GET_CODE (target) != MEM
5109		       || count <= 2
5110		       || (host_integerp (TYPE_SIZE (elttype), 1)
5111			   && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5112			       <= 40 * 8)))))
5113		{
5114		  lo -= minelt;  hi -= minelt;
5115		  for (; lo <= hi; lo++)
5116		    {
5117		      bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5118
5119		      if (GET_CODE (target) == MEM
5120			  && !MEM_KEEP_ALIAS_SET_P (target)
5121			  && TREE_CODE (type) == ARRAY_TYPE
5122			  && TYPE_NONALIASED_COMPONENT (type))
5123			{
5124			  target = copy_rtx (target);
5125			  MEM_KEEP_ALIAS_SET_P (target) = 1;
5126			}
5127
5128		      store_constructor_field
5129			(target, bitsize, bitpos, mode, value, type, cleared,
5130			 get_alias_set (elttype));
5131		    }
5132		}
5133	      else
5134		{
5135		  hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5136		  loop_top = gen_label_rtx ();
5137		  loop_end = gen_label_rtx ();
5138
5139		  unsignedp = TREE_UNSIGNED (domain);
5140
5141		  index = build_decl (VAR_DECL, NULL_TREE, domain);
5142
5143		  index_r
5144		    = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5145						 &unsignedp, 0));
5146		  SET_DECL_RTL (index, index_r);
5147		  if (TREE_CODE (value) == SAVE_EXPR
5148		      && SAVE_EXPR_RTL (value) == 0)
5149		    {
5150		      /* Make sure value gets expanded once before the
5151                         loop.  */
5152		      expand_expr (value, const0_rtx, VOIDmode, 0);
5153		      emit_queue ();
5154		    }
5155		  store_expr (lo_index, index_r, 0);
5156		  loop = expand_start_loop (0);
5157
5158		  /* Assign value to element index.  */
5159		  position
5160		    = convert (ssizetype,
5161			       fold (build (MINUS_EXPR, TREE_TYPE (index),
5162					    index, TYPE_MIN_VALUE (domain))));
5163		  position = size_binop (MULT_EXPR, position,
5164					 convert (ssizetype,
5165						  TYPE_SIZE_UNIT (elttype)));
5166
5167		  pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5168		  xtarget = offset_address (target, pos_rtx,
5169					    highest_pow2_factor (position));
5170		  xtarget = adjust_address (xtarget, mode, 0);
5171		  if (TREE_CODE (value) == CONSTRUCTOR)
5172		    store_constructor (value, xtarget, cleared,
5173				       bitsize / BITS_PER_UNIT);
5174		  else
5175		    store_expr (value, xtarget, 0);
5176
5177		  expand_exit_loop_if_false (loop,
5178					     build (LT_EXPR, integer_type_node,
5179						    index, hi_index));
5180
5181		  expand_increment (build (PREINCREMENT_EXPR,
5182					   TREE_TYPE (index),
5183					   index, integer_one_node), 0, 0);
5184		  expand_end_loop ();
5185		  emit_label (loop_end);
5186		}
5187	    }
5188	  else if ((index != 0 && ! host_integerp (index, 0))
5189		   || ! host_integerp (TYPE_SIZE (elttype), 1))
5190	    {
5191	      tree position;
5192
5193	      if (index == 0)
5194		index = ssize_int (1);
5195
5196	      if (minelt)
5197		index = convert (ssizetype,
5198				 fold (build (MINUS_EXPR, index,
5199					      TYPE_MIN_VALUE (domain))));
5200
5201	      position = size_binop (MULT_EXPR, index,
5202				     convert (ssizetype,
5203					      TYPE_SIZE_UNIT (elttype)));
5204	      xtarget = offset_address (target,
5205					expand_expr (position, 0, VOIDmode, 0),
5206					highest_pow2_factor (position));
5207	      xtarget = adjust_address (xtarget, mode, 0);
5208	      store_expr (value, xtarget, 0);
5209	    }
5210	  else
5211	    {
5212	      if (index != 0)
5213		bitpos = ((tree_low_cst (index, 0) - minelt)
5214			  * tree_low_cst (TYPE_SIZE (elttype), 1));
5215	      else
5216		bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5217
5218	      if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5219		  && TREE_CODE (type) == ARRAY_TYPE
5220		  && TYPE_NONALIASED_COMPONENT (type))
5221		{
5222		  target = copy_rtx (target);
5223		  MEM_KEEP_ALIAS_SET_P (target) = 1;
5224		}
5225
5226	      store_constructor_field (target, bitsize, bitpos, mode, value,
5227				       type, cleared, get_alias_set (elttype));
5228
5229	    }
5230	}
5231    }
5232
5233  /* Set constructor assignments.  */
5234  else if (TREE_CODE (type) == SET_TYPE)
5235    {
5236      tree elt = CONSTRUCTOR_ELTS (exp);
5237      unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5238      tree domain = TYPE_DOMAIN (type);
5239      tree domain_min, domain_max, bitlength;
5240
5241      /* The default implementation strategy is to extract the constant
5242	 parts of the constructor, use that to initialize the target,
5243	 and then "or" in whatever non-constant ranges we need in addition.
5244
5245	 If a large set is all zero or all ones, it is
5246	 probably better to set it using memset (if available) or bzero.
5247	 Also, if a large set has just a single range, it may also be
5248	 better to first clear all the first clear the set (using
5249	 bzero/memset), and set the bits we want.  */
5250
5251      /* Check for all zeros.  */
5252      if (elt == NULL_TREE && size > 0)
5253	{
5254	  if (!cleared)
5255	    clear_storage (target, GEN_INT (size));
5256	  return;
5257	}
5258
5259      domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5260      domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5261      bitlength = size_binop (PLUS_EXPR,
5262			      size_diffop (domain_max, domain_min),
5263			      ssize_int (1));
5264
5265      nbits = tree_low_cst (bitlength, 1);
5266
5267      /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5268	 are "complicated" (more than one range), initialize (the
5269	 constant parts) by copying from a constant.  */
5270      if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5271	  || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5272	{
5273	  unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5274	  enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5275	  char *bit_buffer = (char *) alloca (nbits);
5276	  HOST_WIDE_INT word = 0;
5277	  unsigned int bit_pos = 0;
5278	  unsigned int ibit = 0;
5279	  unsigned int offset = 0;  /* In bytes from beginning of set.  */
5280
5281	  elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5282	  for (;;)
5283	    {
5284	      if (bit_buffer[ibit])
5285		{
5286		  if (BYTES_BIG_ENDIAN)
5287		    word |= (1 << (set_word_size - 1 - bit_pos));
5288		  else
5289		    word |= 1 << bit_pos;
5290		}
5291
5292	      bit_pos++;  ibit++;
5293	      if (bit_pos >= set_word_size || ibit == nbits)
5294		{
5295		  if (word != 0 || ! cleared)
5296		    {
5297		      rtx datum = GEN_INT (word);
5298		      rtx to_rtx;
5299
5300		      /* The assumption here is that it is safe to use
5301			 XEXP if the set is multi-word, but not if
5302			 it's single-word.  */
5303		      if (GET_CODE (target) == MEM)
5304			to_rtx = adjust_address (target, mode, offset);
5305		      else if (offset == 0)
5306			to_rtx = target;
5307		      else
5308			abort ();
5309		      emit_move_insn (to_rtx, datum);
5310		    }
5311
5312		  if (ibit == nbits)
5313		    break;
5314		  word = 0;
5315		  bit_pos = 0;
5316		  offset += set_word_size / BITS_PER_UNIT;
5317		}
5318	    }
5319	}
5320      else if (!cleared)
5321	/* Don't bother clearing storage if the set is all ones.  */
5322	if (TREE_CHAIN (elt) != NULL_TREE
5323	    || (TREE_PURPOSE (elt) == NULL_TREE
5324		? nbits != 1
5325		: ( ! host_integerp (TREE_VALUE (elt), 0)
5326		   || ! host_integerp (TREE_PURPOSE (elt), 0)
5327		   || (tree_low_cst (TREE_VALUE (elt), 0)
5328		       - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5329		       != (HOST_WIDE_INT) nbits))))
5330	  clear_storage (target, expr_size (exp));
5331
5332      for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5333	{
5334	  /* Start of range of element or NULL.  */
5335	  tree startbit = TREE_PURPOSE (elt);
5336	  /* End of range of element, or element value.  */
5337	  tree endbit   = TREE_VALUE (elt);
5338	  HOST_WIDE_INT startb, endb;
5339	  rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5340
5341	  bitlength_rtx = expand_expr (bitlength,
5342				       NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5343
5344	  /* Handle non-range tuple element like [ expr ].  */
5345	  if (startbit == NULL_TREE)
5346	    {
5347	      startbit = save_expr (endbit);
5348	      endbit = startbit;
5349	    }
5350
5351	  startbit = convert (sizetype, startbit);
5352	  endbit = convert (sizetype, endbit);
5353	  if (! integer_zerop (domain_min))
5354	    {
5355	      startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5356	      endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5357	    }
5358	  startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5359				      EXPAND_CONST_ADDRESS);
5360	  endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5361				    EXPAND_CONST_ADDRESS);
5362
5363	  if (REG_P (target))
5364	    {
5365	      targetx
5366		= assign_temp
5367		  ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5368					  (GET_MODE (target), 0),
5369					  TYPE_QUAL_CONST)),
5370		   0, 1, 1);
5371	      emit_move_insn (targetx, target);
5372	    }
5373
5374	  else if (GET_CODE (target) == MEM)
5375	    targetx = target;
5376	  else
5377	    abort ();
5378
5379	  /* Optimization:  If startbit and endbit are constants divisible
5380	     by BITS_PER_UNIT, call memset instead.  */
5381	  if (TARGET_MEM_FUNCTIONS
5382	      && TREE_CODE (startbit) == INTEGER_CST
5383	      && TREE_CODE (endbit) == INTEGER_CST
5384	      && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5385	      && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5386	    {
5387	      emit_library_call (memset_libfunc, LCT_NORMAL,
5388				 VOIDmode, 3,
5389				 plus_constant (XEXP (targetx, 0),
5390						startb / BITS_PER_UNIT),
5391				 Pmode,
5392				 constm1_rtx, TYPE_MODE (integer_type_node),
5393				 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5394				 TYPE_MODE (sizetype));
5395	    }
5396	  else
5397	    emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5398			       LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5399			       Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5400			       startbit_rtx, TYPE_MODE (sizetype),
5401			       endbit_rtx, TYPE_MODE (sizetype));
5402
5403	  if (REG_P (target))
5404	    emit_move_insn (target, targetx);
5405	}
5406    }
5407
5408  else
5409    abort ();
5410}
5411
5412/* Store the value of EXP (an expression tree)
5413   into a subfield of TARGET which has mode MODE and occupies
5414   BITSIZE bits, starting BITPOS bits from the start of TARGET.
5415   If MODE is VOIDmode, it means that we are storing into a bit-field.
5416
5417   If VALUE_MODE is VOIDmode, return nothing in particular.
5418   UNSIGNEDP is not used in this case.
5419
5420   Otherwise, return an rtx for the value stored.  This rtx
5421   has mode VALUE_MODE if that is convenient to do.
5422   In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5423
5424   TYPE is the type of the underlying object,
5425
5426   ALIAS_SET is the alias set for the destination.  This value will
5427   (in general) be different from that for TARGET, since TARGET is a
5428   reference to the containing structure.  */
5429
5430static rtx
5431store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5432	     alias_set)
5433     rtx target;
5434     HOST_WIDE_INT bitsize;
5435     HOST_WIDE_INT bitpos;
5436     enum machine_mode mode;
5437     tree exp;
5438     enum machine_mode value_mode;
5439     int unsignedp;
5440     tree type;
5441     int alias_set;
5442{
5443  HOST_WIDE_INT width_mask = 0;
5444
5445  if (TREE_CODE (exp) == ERROR_MARK)
5446    return const0_rtx;
5447
5448  /* If we have nothing to store, do nothing unless the expression has
5449     side-effects.  */
5450  if (bitsize == 0)
5451    return expand_expr (exp, const0_rtx, VOIDmode, 0);
5452  else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5453    width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5454
5455  /* If we are storing into an unaligned field of an aligned union that is
5456     in a register, we may have the mode of TARGET being an integer mode but
5457     MODE == BLKmode.  In that case, get an aligned object whose size and
5458     alignment are the same as TARGET and store TARGET into it (we can avoid
5459     the store if the field being stored is the entire width of TARGET).  Then
5460     call ourselves recursively to store the field into a BLKmode version of
5461     that object.  Finally, load from the object into TARGET.  This is not
5462     very efficient in general, but should only be slightly more expensive
5463     than the otherwise-required unaligned accesses.  Perhaps this can be
5464     cleaned up later.  */
5465
5466  if (mode == BLKmode
5467      && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5468    {
5469      rtx object
5470	= assign_temp
5471	  (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5472	   0, 1, 1);
5473      rtx blk_object = adjust_address (object, BLKmode, 0);
5474
5475      if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5476	emit_move_insn (object, target);
5477
5478      store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5479		   alias_set);
5480
5481      emit_move_insn (target, object);
5482
5483      /* We want to return the BLKmode version of the data.  */
5484      return blk_object;
5485    }
5486
5487  if (GET_CODE (target) == CONCAT)
5488    {
5489      /* We're storing into a struct containing a single __complex.  */
5490
5491      if (bitpos != 0)
5492	abort ();
5493      return store_expr (exp, target, 0);
5494    }
5495
5496  /* If the structure is in a register or if the component
5497     is a bit field, we cannot use addressing to access it.
5498     Use bit-field techniques or SUBREG to store in it.  */
5499
5500  if (mode == VOIDmode
5501      || (mode != BLKmode && ! direct_store[(int) mode]
5502	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5503	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5504      || GET_CODE (target) == REG
5505      || GET_CODE (target) == SUBREG
5506      /* If the field isn't aligned enough to store as an ordinary memref,
5507	 store it as a bit field.  */
5508      || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5509	  && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5510	      || bitpos % GET_MODE_ALIGNMENT (mode)))
5511      /* If the RHS and field are a constant size and the size of the
5512	 RHS isn't the same size as the bitfield, we must use bitfield
5513	 operations.  */
5514      || (bitsize >= 0
5515	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5516	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5517    {
5518      rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5519
5520      /* If BITSIZE is narrower than the size of the type of EXP
5521	 we will be narrowing TEMP.  Normally, what's wanted are the
5522	 low-order bits.  However, if EXP's type is a record and this is
5523	 big-endian machine, we want the upper BITSIZE bits.  */
5524      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5525	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5526	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5527	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5528			     size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5529				       - bitsize),
5530			     temp, 1);
5531
5532      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5533	 MODE.  */
5534      if (mode != VOIDmode && mode != BLKmode
5535	  && mode != TYPE_MODE (TREE_TYPE (exp)))
5536	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5537
5538      /* If the modes of TARGET and TEMP are both BLKmode, both
5539	 must be in memory and BITPOS must be aligned on a byte
5540	 boundary.  If so, we simply do a block copy.  */
5541      if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5542	{
5543	  if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5544	      || bitpos % BITS_PER_UNIT != 0)
5545	    abort ();
5546
5547	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5548	  emit_block_move (target, temp,
5549			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5550				    / BITS_PER_UNIT),
5551			   BLOCK_OP_NORMAL);
5552
5553	  return value_mode == VOIDmode ? const0_rtx : target;
5554	}
5555
5556      /* Store the value in the bitfield.  */
5557      store_bit_field (target, bitsize, bitpos, mode, temp,
5558		       int_size_in_bytes (type));
5559
5560      if (value_mode != VOIDmode)
5561	{
5562	  /* The caller wants an rtx for the value.
5563	     If possible, avoid refetching from the bitfield itself.  */
5564	  if (width_mask != 0
5565	      && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5566	    {
5567	      tree count;
5568	      enum machine_mode tmode;
5569
5570	      tmode = GET_MODE (temp);
5571	      if (tmode == VOIDmode)
5572		tmode = value_mode;
5573
5574	      if (unsignedp)
5575		return expand_and (tmode, temp,
5576				   gen_int_mode (width_mask, tmode),
5577				   NULL_RTX);
5578
5579	      count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5580	      temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5581	      return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5582	    }
5583
5584	  return extract_bit_field (target, bitsize, bitpos, unsignedp,
5585				    NULL_RTX, value_mode, VOIDmode,
5586				    int_size_in_bytes (type));
5587	}
5588      return const0_rtx;
5589    }
5590  else
5591    {
5592      rtx addr = XEXP (target, 0);
5593      rtx to_rtx = target;
5594
5595      /* If a value is wanted, it must be the lhs;
5596	 so make the address stable for multiple use.  */
5597
5598      if (value_mode != VOIDmode && GET_CODE (addr) != REG
5599	  && ! CONSTANT_ADDRESS_P (addr)
5600	  /* A frame-pointer reference is already stable.  */
5601	  && ! (GET_CODE (addr) == PLUS
5602		&& GET_CODE (XEXP (addr, 1)) == CONST_INT
5603		&& (XEXP (addr, 0) == virtual_incoming_args_rtx
5604		    || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5605	to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5606
5607      /* Now build a reference to just the desired component.  */
5608
5609      to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5610
5611      if (to_rtx == target)
5612	to_rtx = copy_rtx (to_rtx);
5613
5614      MEM_SET_IN_STRUCT_P (to_rtx, 1);
5615      if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5616	set_mem_alias_set (to_rtx, alias_set);
5617
5618      return store_expr (exp, to_rtx, value_mode != VOIDmode);
5619    }
5620}
5621
5622/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5623   an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5624   codes and find the ultimate containing object, which we return.
5625
5626   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5627   bit position, and *PUNSIGNEDP to the signedness of the field.
5628   If the position of the field is variable, we store a tree
5629   giving the variable offset (in units) in *POFFSET.
5630   This offset is in addition to the bit position.
5631   If the position is not variable, we store 0 in *POFFSET.
5632
5633   If any of the extraction expressions is volatile,
5634   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
5635
5636   If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
5637   is a mode that can be used to access the field.  In that case, *PBITSIZE
5638   is redundant.
5639
5640   If the field describes a variable-sized object, *PMODE is set to
5641   VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
5642   this case, but the address of the object can be found.  */
5643
5644tree
5645get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5646		     punsignedp, pvolatilep)
5647     tree exp;
5648     HOST_WIDE_INT *pbitsize;
5649     HOST_WIDE_INT *pbitpos;
5650     tree *poffset;
5651     enum machine_mode *pmode;
5652     int *punsignedp;
5653     int *pvolatilep;
5654{
5655  tree size_tree = 0;
5656  enum machine_mode mode = VOIDmode;
5657  tree offset = size_zero_node;
5658  tree bit_offset = bitsize_zero_node;
5659  tree placeholder_ptr = 0;
5660  tree tem;
5661
5662  /* First get the mode, signedness, and size.  We do this from just the
5663     outermost expression.  */
5664  if (TREE_CODE (exp) == COMPONENT_REF)
5665    {
5666      size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5667      if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5668	mode = DECL_MODE (TREE_OPERAND (exp, 1));
5669
5670      *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5671    }
5672  else if (TREE_CODE (exp) == BIT_FIELD_REF)
5673    {
5674      size_tree = TREE_OPERAND (exp, 1);
5675      *punsignedp = TREE_UNSIGNED (exp);
5676    }
5677  else
5678    {
5679      mode = TYPE_MODE (TREE_TYPE (exp));
5680      *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5681
5682      if (mode == BLKmode)
5683	size_tree = TYPE_SIZE (TREE_TYPE (exp));
5684      else
5685	*pbitsize = GET_MODE_BITSIZE (mode);
5686    }
5687
5688  if (size_tree != 0)
5689    {
5690      if (! host_integerp (size_tree, 1))
5691	mode = BLKmode, *pbitsize = -1;
5692      else
5693	*pbitsize = tree_low_cst (size_tree, 1);
5694    }
5695
5696  /* Compute cumulative bit-offset for nested component-refs and array-refs,
5697     and find the ultimate containing object.  */
5698  while (1)
5699    {
5700      if (TREE_CODE (exp) == BIT_FIELD_REF)
5701	bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5702      else if (TREE_CODE (exp) == COMPONENT_REF)
5703	{
5704	  tree field = TREE_OPERAND (exp, 1);
5705	  tree this_offset = DECL_FIELD_OFFSET (field);
5706
5707	  /* If this field hasn't been filled in yet, don't go
5708	     past it.  This should only happen when folding expressions
5709	     made during type construction.  */
5710	  if (this_offset == 0)
5711	    break;
5712	  else if (! TREE_CONSTANT (this_offset)
5713		   && contains_placeholder_p (this_offset))
5714	    this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5715
5716	  offset = size_binop (PLUS_EXPR, offset, this_offset);
5717	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
5718				   DECL_FIELD_BIT_OFFSET (field));
5719
5720	  /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
5721	}
5722
5723      else if (TREE_CODE (exp) == ARRAY_REF
5724	       || TREE_CODE (exp) == ARRAY_RANGE_REF)
5725	{
5726	  tree index = TREE_OPERAND (exp, 1);
5727	  tree array = TREE_OPERAND (exp, 0);
5728	  tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5729	  tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5730	  tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5731
5732	  /* We assume all arrays have sizes that are a multiple of a byte.
5733	     First subtract the lower bound, if any, in the type of the
5734	     index, then convert to sizetype and multiply by the size of the
5735	     array element.  */
5736	  if (low_bound != 0 && ! integer_zerop (low_bound))
5737	    index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5738				 index, low_bound));
5739
5740	  /* If the index has a self-referential type, pass it to a
5741	     WITH_RECORD_EXPR; if the component size is, pass our
5742	     component to one.  */
5743	  if (! TREE_CONSTANT (index)
5744	      && contains_placeholder_p (index))
5745	    index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5746	  if (! TREE_CONSTANT (unit_size)
5747	      && contains_placeholder_p (unit_size))
5748	    unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5749
5750	  offset = size_binop (PLUS_EXPR, offset,
5751			       size_binop (MULT_EXPR,
5752					   convert (sizetype, index),
5753					   unit_size));
5754	}
5755
5756      else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5757	{
5758	  tree new = find_placeholder (exp, &placeholder_ptr);
5759
5760	  /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5761	     We might have been called from tree optimization where we
5762	     haven't set up an object yet.  */
5763	  if (new == 0)
5764	    break;
5765	  else
5766	    exp = new;
5767
5768	  continue;
5769	}
5770      else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5771	       && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5772	       && ! ((TREE_CODE (exp) == NOP_EXPR
5773		      || TREE_CODE (exp) == CONVERT_EXPR)
5774		     && (TYPE_MODE (TREE_TYPE (exp))
5775			 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5776	break;
5777
5778      /* If any reference in the chain is volatile, the effect is volatile.  */
5779      if (TREE_THIS_VOLATILE (exp))
5780	*pvolatilep = 1;
5781
5782      exp = TREE_OPERAND (exp, 0);
5783    }
5784
5785  /* If OFFSET is constant, see if we can return the whole thing as a
5786     constant bit position.  Otherwise, split it up.  */
5787  if (host_integerp (offset, 0)
5788      && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5789				 bitsize_unit_node))
5790      && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5791      && host_integerp (tem, 0))
5792    *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5793  else
5794    *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5795
5796  *pmode = mode;
5797  return exp;
5798}
5799
5800/* Return 1 if T is an expression that get_inner_reference handles.  */
5801
5802int
5803handled_component_p (t)
5804     tree t;
5805{
5806  switch (TREE_CODE (t))
5807    {
5808    case BIT_FIELD_REF:
5809    case COMPONENT_REF:
5810    case ARRAY_REF:
5811    case ARRAY_RANGE_REF:
5812    case NON_LVALUE_EXPR:
5813    case VIEW_CONVERT_EXPR:
5814      return 1;
5815
5816    case NOP_EXPR:
5817    case CONVERT_EXPR:
5818      return (TYPE_MODE (TREE_TYPE (t))
5819	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5820
5821    default:
5822      return 0;
5823    }
5824}
5825
5826/* Given an rtx VALUE that may contain additions and multiplications, return
5827   an equivalent value that just refers to a register, memory, or constant.
5828   This is done by generating instructions to perform the arithmetic and
5829   returning a pseudo-register containing the value.
5830
5831   The returned value may be a REG, SUBREG, MEM or constant.  */
5832
5833rtx
5834force_operand (value, target)
5835     rtx value, target;
5836{
5837  rtx op1, op2;
5838  /* Use subtarget as the target for operand 0 of a binary operation.  */
5839  rtx subtarget = get_subtarget (target);
5840  enum rtx_code code = GET_CODE (value);
5841
5842  /* Check for a PIC address load.  */
5843  if ((code == PLUS || code == MINUS)
5844      && XEXP (value, 0) == pic_offset_table_rtx
5845      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5846	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
5847	  || GET_CODE (XEXP (value, 1)) == CONST))
5848    {
5849      if (!subtarget)
5850	subtarget = gen_reg_rtx (GET_MODE (value));
5851      emit_move_insn (subtarget, value);
5852      return subtarget;
5853    }
5854
5855  if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5856    {
5857      if (!target)
5858	target = gen_reg_rtx (GET_MODE (value));
5859      convert_move (target, force_operand (XEXP (value, 0), NULL),
5860		    code == ZERO_EXTEND);
5861      return target;
5862    }
5863
5864  if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5865    {
5866      op2 = XEXP (value, 1);
5867      if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5868	subtarget = 0;
5869      if (code == MINUS && GET_CODE (op2) == CONST_INT)
5870	{
5871	  code = PLUS;
5872	  op2 = negate_rtx (GET_MODE (value), op2);
5873	}
5874
5875      /* Check for an addition with OP2 a constant integer and our first
5876         operand a PLUS of a virtual register and something else.  In that
5877         case, we want to emit the sum of the virtual register and the
5878         constant first and then add the other value.  This allows virtual
5879         register instantiation to simply modify the constant rather than
5880         creating another one around this addition.  */
5881      if (code == PLUS && GET_CODE (op2) == CONST_INT
5882	  && GET_CODE (XEXP (value, 0)) == PLUS
5883	  && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5884	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5885	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5886	{
5887	  rtx temp = expand_simple_binop (GET_MODE (value), code,
5888					  XEXP (XEXP (value, 0), 0), op2,
5889					  subtarget, 0, OPTAB_LIB_WIDEN);
5890	  return expand_simple_binop (GET_MODE (value), code, temp,
5891				      force_operand (XEXP (XEXP (value,
5892								 0), 1), 0),
5893				      target, 0, OPTAB_LIB_WIDEN);
5894	}
5895
5896      op1 = force_operand (XEXP (value, 0), subtarget);
5897      op2 = force_operand (op2, NULL_RTX);
5898      switch (code)
5899	{
5900	case MULT:
5901	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
5902	case DIV:
5903	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
5904	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
5905					target, 1, OPTAB_LIB_WIDEN);
5906	  else
5907	    return expand_divmod (0,
5908				  FLOAT_MODE_P (GET_MODE (value))
5909				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
5910				  GET_MODE (value), op1, op2, target, 0);
5911	  break;
5912	case MOD:
5913	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5914				target, 0);
5915	  break;
5916	case UDIV:
5917	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5918				target, 1);
5919	  break;
5920	case UMOD:
5921	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5922				target, 1);
5923	  break;
5924	case ASHIFTRT:
5925	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
5926				      target, 0, OPTAB_LIB_WIDEN);
5927	  break;
5928	default:
5929	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
5930				      target, 1, OPTAB_LIB_WIDEN);
5931	}
5932    }
5933  if (GET_RTX_CLASS (code) == '1')
5934    {
5935      op1 = force_operand (XEXP (value, 0), NULL_RTX);
5936      return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5937    }
5938
5939#ifdef INSN_SCHEDULING
5940  /* On machines that have insn scheduling, we want all memory reference to be
5941     explicit, so we need to deal with such paradoxical SUBREGs.  */
5942  if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5943      && (GET_MODE_SIZE (GET_MODE (value))
5944	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5945    value
5946      = simplify_gen_subreg (GET_MODE (value),
5947			     force_reg (GET_MODE (SUBREG_REG (value)),
5948					force_operand (SUBREG_REG (value),
5949						       NULL_RTX)),
5950			     GET_MODE (SUBREG_REG (value)),
5951			     SUBREG_BYTE (value));
5952#endif
5953
5954  return value;
5955}
5956
5957/* Subroutine of expand_expr: return nonzero iff there is no way that
5958   EXP can reference X, which is being modified.  TOP_P is nonzero if this
5959   call is going to be used to determine whether we need a temporary
5960   for EXP, as opposed to a recursive call to this function.
5961
5962   It is always safe for this routine to return zero since it merely
5963   searches for optimization opportunities.  */
5964
5965int
5966safe_from_p (x, exp, top_p)
5967     rtx x;
5968     tree exp;
5969     int top_p;
5970{
5971  rtx exp_rtl = 0;
5972  int i, nops;
5973  static tree save_expr_list;
5974
5975  if (x == 0
5976      /* If EXP has varying size, we MUST use a target since we currently
5977	 have no way of allocating temporaries of variable size
5978	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5979	 So we assume here that something at a higher level has prevented a
5980	 clash.  This is somewhat bogus, but the best we can do.  Only
5981	 do this when X is BLKmode and when we are at the top level.  */
5982      || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5983	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5984	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5985	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5986	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5987	      != INTEGER_CST)
5988	  && GET_MODE (x) == BLKmode)
5989      /* If X is in the outgoing argument area, it is always safe.  */
5990      || (GET_CODE (x) == MEM
5991	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
5992	      || (GET_CODE (XEXP (x, 0)) == PLUS
5993		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5994    return 1;
5995
5996  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5997     find the underlying pseudo.  */
5998  if (GET_CODE (x) == SUBREG)
5999    {
6000      x = SUBREG_REG (x);
6001      if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6002	return 0;
6003    }
6004
6005  /* A SAVE_EXPR might appear many times in the expression passed to the
6006     top-level safe_from_p call, and if it has a complex subexpression,
6007     examining it multiple times could result in a combinatorial explosion.
6008     E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6009     with optimization took about 28 minutes to compile -- even though it was
6010     only a few lines long.  So we mark each SAVE_EXPR we see with TREE_PRIVATE
6011     and turn that off when we are done.  We keep a list of the SAVE_EXPRs
6012     we have processed.  Note that the only test of top_p was above.  */
6013
6014  if (top_p)
6015    {
6016      int rtn;
6017      tree t;
6018
6019      save_expr_list = 0;
6020
6021      rtn = safe_from_p (x, exp, 0);
6022
6023      for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6024	TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6025
6026      return rtn;
6027    }
6028
6029  /* Now look at our tree code and possibly recurse.  */
6030  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6031    {
6032    case 'd':
6033      exp_rtl = DECL_RTL_IF_SET (exp);
6034      break;
6035
6036    case 'c':
6037      return 1;
6038
6039    case 'x':
6040      if (TREE_CODE (exp) == TREE_LIST)
6041	{
6042	  while (1)
6043	    {
6044	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6045		return 0;
6046	      exp = TREE_CHAIN (exp);
6047	      if (!exp)
6048		return 1;
6049	      if (TREE_CODE (exp) != TREE_LIST)
6050		return safe_from_p (x, exp, 0);
6051	    }
6052	}
6053      else if (TREE_CODE (exp) == ERROR_MARK)
6054	return 1;	/* An already-visited SAVE_EXPR? */
6055      else
6056	return 0;
6057
6058    case '2':
6059    case '<':
6060      if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6061	return 0;
6062      /* FALLTHRU */
6063
6064    case '1':
6065      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6066
6067    case 'e':
6068    case 'r':
6069      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
6070	 the expression.  If it is set, we conflict iff we are that rtx or
6071	 both are in memory.  Otherwise, we check all operands of the
6072	 expression recursively.  */
6073
6074      switch (TREE_CODE (exp))
6075	{
6076	case ADDR_EXPR:
6077	  /* If the operand is static or we are static, we can't conflict.
6078	     Likewise if we don't conflict with the operand at all.  */
6079	  if (staticp (TREE_OPERAND (exp, 0))
6080	      || TREE_STATIC (exp)
6081	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6082	    return 1;
6083
6084	  /* Otherwise, the only way this can conflict is if we are taking
6085	     the address of a DECL a that address if part of X, which is
6086	     very rare.  */
6087	  exp = TREE_OPERAND (exp, 0);
6088	  if (DECL_P (exp))
6089	    {
6090	      if (!DECL_RTL_SET_P (exp)
6091		  || GET_CODE (DECL_RTL (exp)) != MEM)
6092		return 0;
6093	      else
6094		exp_rtl = XEXP (DECL_RTL (exp), 0);
6095	    }
6096	  break;
6097
6098	case INDIRECT_REF:
6099	  if (GET_CODE (x) == MEM
6100	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6101					get_alias_set (exp)))
6102	    return 0;
6103	  break;
6104
6105	case CALL_EXPR:
6106	  /* Assume that the call will clobber all hard registers and
6107	     all of memory.  */
6108	  if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6109	      || GET_CODE (x) == MEM)
6110	    return 0;
6111	  break;
6112
6113	case RTL_EXPR:
6114	  /* If a sequence exists, we would have to scan every instruction
6115	     in the sequence to see if it was safe.  This is probably not
6116	     worthwhile.  */
6117	  if (RTL_EXPR_SEQUENCE (exp))
6118	    return 0;
6119
6120	  exp_rtl = RTL_EXPR_RTL (exp);
6121	  break;
6122
6123	case WITH_CLEANUP_EXPR:
6124	  exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6125	  break;
6126
6127	case CLEANUP_POINT_EXPR:
6128	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6129
6130	case SAVE_EXPR:
6131	  exp_rtl = SAVE_EXPR_RTL (exp);
6132	  if (exp_rtl)
6133	    break;
6134
6135	  /* If we've already scanned this, don't do it again.  Otherwise,
6136	     show we've scanned it and record for clearing the flag if we're
6137	     going on.  */
6138	  if (TREE_PRIVATE (exp))
6139	    return 1;
6140
6141	  TREE_PRIVATE (exp) = 1;
6142	  if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6143	    {
6144	      TREE_PRIVATE (exp) = 0;
6145	      return 0;
6146	    }
6147
6148	  save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6149	  return 1;
6150
6151	case BIND_EXPR:
6152	  /* The only operand we look at is operand 1.  The rest aren't
6153	     part of the expression.  */
6154	  return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6155
6156	case METHOD_CALL_EXPR:
6157	  /* This takes an rtx argument, but shouldn't appear here.  */
6158	  abort ();
6159
6160	default:
6161	  break;
6162	}
6163
6164      /* If we have an rtx, we do not need to scan our operands.  */
6165      if (exp_rtl)
6166	break;
6167
6168      nops = first_rtl_op (TREE_CODE (exp));
6169      for (i = 0; i < nops; i++)
6170	if (TREE_OPERAND (exp, i) != 0
6171	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6172	  return 0;
6173
6174      /* If this is a language-specific tree code, it may require
6175	 special handling.  */
6176      if ((unsigned int) TREE_CODE (exp)
6177	  >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6178	  && !(*lang_hooks.safe_from_p) (x, exp))
6179	return 0;
6180    }
6181
6182  /* If we have an rtl, find any enclosed object.  Then see if we conflict
6183     with it.  */
6184  if (exp_rtl)
6185    {
6186      if (GET_CODE (exp_rtl) == SUBREG)
6187	{
6188	  exp_rtl = SUBREG_REG (exp_rtl);
6189	  if (GET_CODE (exp_rtl) == REG
6190	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6191	    return 0;
6192	}
6193
6194      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
6195	 are memory and they conflict.  */
6196      return ! (rtx_equal_p (x, exp_rtl)
6197		|| (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6198		    && true_dependence (exp_rtl, VOIDmode, x,
6199					rtx_addr_varies_p)));
6200    }
6201
6202  /* If we reach here, it is safe.  */
6203  return 1;
6204}
6205
6206/* Subroutine of expand_expr: return rtx if EXP is a
6207   variable or parameter; else return 0.  */
6208
6209static rtx
6210var_rtx (exp)
6211     tree exp;
6212{
6213  STRIP_NOPS (exp);
6214  switch (TREE_CODE (exp))
6215    {
6216    case PARM_DECL:
6217    case VAR_DECL:
6218      return DECL_RTL (exp);
6219    default:
6220      return 0;
6221    }
6222}
6223
6224#ifdef MAX_INTEGER_COMPUTATION_MODE
6225
6226void
6227check_max_integer_computation_mode (exp)
6228     tree exp;
6229{
6230  enum tree_code code;
6231  enum machine_mode mode;
6232
6233  /* Strip any NOPs that don't change the mode.  */
6234  STRIP_NOPS (exp);
6235  code = TREE_CODE (exp);
6236
6237  /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE.  */
6238  if (code == NOP_EXPR
6239      && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6240    return;
6241
6242  /* First check the type of the overall operation.   We need only look at
6243     unary, binary and relational operations.  */
6244  if (TREE_CODE_CLASS (code) == '1'
6245      || TREE_CODE_CLASS (code) == '2'
6246      || TREE_CODE_CLASS (code) == '<')
6247    {
6248      mode = TYPE_MODE (TREE_TYPE (exp));
6249      if (GET_MODE_CLASS (mode) == MODE_INT
6250	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6251	internal_error ("unsupported wide integer operation");
6252    }
6253
6254  /* Check operand of a unary op.  */
6255  if (TREE_CODE_CLASS (code) == '1')
6256    {
6257      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6258      if (GET_MODE_CLASS (mode) == MODE_INT
6259	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6260	internal_error ("unsupported wide integer operation");
6261    }
6262
6263  /* Check operands of a binary/comparison op.  */
6264  if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6265    {
6266      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6267      if (GET_MODE_CLASS (mode) == MODE_INT
6268	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6269	internal_error ("unsupported wide integer operation");
6270
6271      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6272      if (GET_MODE_CLASS (mode) == MODE_INT
6273	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6274	internal_error ("unsupported wide integer operation");
6275    }
6276}
6277#endif
6278
6279/* Return the highest power of two that EXP is known to be a multiple of.
6280   This is used in updating alignment of MEMs in array references.  */
6281
6282static HOST_WIDE_INT
6283highest_pow2_factor (exp)
6284     tree exp;
6285{
6286  HOST_WIDE_INT c0, c1;
6287
6288  switch (TREE_CODE (exp))
6289    {
6290    case INTEGER_CST:
6291      /* We can find the lowest bit that's a one.  If the low
6292	 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6293	 We need to handle this case since we can find it in a COND_EXPR,
6294	 a MIN_EXPR, or a MAX_EXPR.  If the constant overlows, we have an
6295	 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6296	 later ICE.  */
6297      if (TREE_CONSTANT_OVERFLOW (exp))
6298	return BIGGEST_ALIGNMENT;
6299      else
6300	{
6301	  /* Note: tree_low_cst is intentionally not used here,
6302	     we don't care about the upper bits.  */
6303	  c0 = TREE_INT_CST_LOW (exp);
6304	  c0 &= -c0;
6305	  return c0 ? c0 : BIGGEST_ALIGNMENT;
6306	}
6307      break;
6308
6309    case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
6310      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6311      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6312      return MIN (c0, c1);
6313
6314    case MULT_EXPR:
6315      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6316      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6317      return c0 * c1;
6318
6319    case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
6320    case CEIL_DIV_EXPR:
6321      if (integer_pow2p (TREE_OPERAND (exp, 1))
6322	  && host_integerp (TREE_OPERAND (exp, 1), 1))
6323	{
6324	  c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6325	  c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6326	  return MAX (1, c0 / c1);
6327	}
6328      break;
6329
6330    case NON_LVALUE_EXPR:  case NOP_EXPR:  case CONVERT_EXPR:
6331    case SAVE_EXPR: case WITH_RECORD_EXPR:
6332      return highest_pow2_factor (TREE_OPERAND (exp, 0));
6333
6334    case COMPOUND_EXPR:
6335      return highest_pow2_factor (TREE_OPERAND (exp, 1));
6336
6337    case COND_EXPR:
6338      c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6339      c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6340      return MIN (c0, c1);
6341
6342    default:
6343      break;
6344    }
6345
6346  return 1;
6347}
6348
6349/* Similar, except that it is known that the expression must be a multiple
6350   of the alignment of TYPE.  */
6351
6352static HOST_WIDE_INT
6353highest_pow2_factor_for_type (type, exp)
6354     tree type;
6355     tree exp;
6356{
6357  HOST_WIDE_INT type_align, factor;
6358
6359  factor = highest_pow2_factor (exp);
6360  type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6361  return MAX (factor, type_align);
6362}
6363
6364/* Return an object on the placeholder list that matches EXP, a
6365   PLACEHOLDER_EXPR.  An object "matches" if it is of the type of the
6366   PLACEHOLDER_EXPR or a pointer type to it.  For further information, see
6367   tree.def.  If no such object is found, return 0.  If PLIST is nonzero, it
6368   is a location which initially points to a starting location in the
6369   placeholder list (zero means start of the list) and where a pointer into
6370   the placeholder list at which the object is found is placed.  */
6371
6372tree
6373find_placeholder (exp, plist)
6374     tree exp;
6375     tree *plist;
6376{
6377  tree type = TREE_TYPE (exp);
6378  tree placeholder_expr;
6379
6380  for (placeholder_expr
6381       = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6382       placeholder_expr != 0;
6383       placeholder_expr = TREE_CHAIN (placeholder_expr))
6384    {
6385      tree need_type = TYPE_MAIN_VARIANT (type);
6386      tree elt;
6387
6388      /* Find the outermost reference that is of the type we want.  If none,
6389	 see if any object has a type that is a pointer to the type we
6390	 want.  */
6391      for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6392	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6393		   || TREE_CODE (elt) == COND_EXPR)
6394		  ? TREE_OPERAND (elt, 1)
6395		  : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6396		     || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6397		     || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6398		     || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6399		  ? TREE_OPERAND (elt, 0) : 0))
6400	if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6401	  {
6402	    if (plist)
6403	      *plist = placeholder_expr;
6404	    return elt;
6405	  }
6406
6407      for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6408	   elt
6409	   = ((TREE_CODE (elt) == COMPOUND_EXPR
6410	       || TREE_CODE (elt) == COND_EXPR)
6411	      ? TREE_OPERAND (elt, 1)
6412	      : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6413		 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6414		 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6415		 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6416	      ? TREE_OPERAND (elt, 0) : 0))
6417	if (POINTER_TYPE_P (TREE_TYPE (elt))
6418	    && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6419		== need_type))
6420	  {
6421	    if (plist)
6422	      *plist = placeholder_expr;
6423	    return build1 (INDIRECT_REF, need_type, elt);
6424	  }
6425    }
6426
6427  return 0;
6428}
6429
6430/* expand_expr: generate code for computing expression EXP.
6431   An rtx for the computed value is returned.  The value is never null.
6432   In the case of a void EXP, const0_rtx is returned.
6433
6434   The value may be stored in TARGET if TARGET is nonzero.
6435   TARGET is just a suggestion; callers must assume that
6436   the rtx returned may not be the same as TARGET.
6437
6438   If TARGET is CONST0_RTX, it means that the value will be ignored.
6439
6440   If TMODE is not VOIDmode, it suggests generating the
6441   result in mode TMODE.  But this is done only when convenient.
6442   Otherwise, TMODE is ignored and the value generated in its natural mode.
6443   TMODE is just a suggestion; callers must assume that
6444   the rtx returned may not have mode TMODE.
6445
6446   Note that TARGET may have neither TMODE nor MODE.  In that case, it
6447   probably will not be used.
6448
6449   If MODIFIER is EXPAND_SUM then when EXP is an addition
6450   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6451   or a nest of (PLUS ...) and (MINUS ...) where the terms are
6452   products as above, or REG or MEM, or constant.
6453   Ordinarily in such cases we would output mul or add instructions
6454   and then return a pseudo reg containing the sum.
6455
6456   EXPAND_INITIALIZER is much like EXPAND_SUM except that
6457   it also marks a label as absolutely required (it can't be dead).
6458   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6459   This is used for outputting expressions used in initializers.
6460
6461   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6462   with a constant address even if that address is not normally legitimate.
6463   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6464
6465   EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6466   a call parameter.  Such targets require special care as we haven't yet
6467   marked TARGET so that it's safe from being trashed by libcalls.  We
6468   don't want to use TARGET for anything but the final result;
6469   Intermediate values must go elsewhere.   Additionally, calls to
6470   emit_block_move will be flagged with BLOCK_OP_CALL_PARM.  */
6471
6472rtx
6473expand_expr (exp, target, tmode, modifier)
6474     tree exp;
6475     rtx target;
6476     enum machine_mode tmode;
6477     enum expand_modifier modifier;
6478{
6479  rtx op0, op1, temp;
6480  tree type = TREE_TYPE (exp);
6481  int unsignedp = TREE_UNSIGNED (type);
6482  enum machine_mode mode;
6483  enum tree_code code = TREE_CODE (exp);
6484  optab this_optab;
6485  rtx subtarget, original_target;
6486  int ignore;
6487  tree context;
6488
6489  /* Handle ERROR_MARK before anybody tries to access its type.  */
6490  if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6491    {
6492      op0 = CONST0_RTX (tmode);
6493      if (op0 != 0)
6494	return op0;
6495      return const0_rtx;
6496    }
6497
6498  mode = TYPE_MODE (type);
6499  /* Use subtarget as the target for operand 0 of a binary operation.  */
6500  subtarget = get_subtarget (target);
6501  original_target = target;
6502  ignore = (target == const0_rtx
6503	    || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6504		 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6505		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6506		&& TREE_CODE (type) == VOID_TYPE));
6507
6508  /* If we are going to ignore this result, we need only do something
6509     if there is a side-effect somewhere in the expression.  If there
6510     is, short-circuit the most common cases here.  Note that we must
6511     not call expand_expr with anything but const0_rtx in case this
6512     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
6513
6514  if (ignore)
6515    {
6516      if (! TREE_SIDE_EFFECTS (exp))
6517	return const0_rtx;
6518
6519      /* Ensure we reference a volatile object even if value is ignored, but
6520	 don't do this if all we are doing is taking its address.  */
6521      if (TREE_THIS_VOLATILE (exp)
6522	  && TREE_CODE (exp) != FUNCTION_DECL
6523	  && mode != VOIDmode && mode != BLKmode
6524	  && modifier != EXPAND_CONST_ADDRESS)
6525	{
6526	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6527	  if (GET_CODE (temp) == MEM)
6528	    temp = copy_to_reg (temp);
6529	  return const0_rtx;
6530	}
6531
6532      if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6533	  || code == INDIRECT_REF || code == BUFFER_REF)
6534	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6535			    modifier);
6536
6537      else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6538	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6539	{
6540	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6541	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6542	  return const0_rtx;
6543	}
6544      else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6545	       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6546	/* If the second operand has no side effects, just evaluate
6547	   the first.  */
6548	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6549			    modifier);
6550      else if (code == BIT_FIELD_REF)
6551	{
6552	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6553	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6554	  expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6555	  return const0_rtx;
6556	}
6557
6558      target = 0;
6559    }
6560
6561#ifdef MAX_INTEGER_COMPUTATION_MODE
6562  /* Only check stuff here if the mode we want is different from the mode
6563     of the expression; if it's the same, check_max_integer_computiation_mode
6564     will handle it.  Do we really need to check this stuff at all?  */
6565
6566  if (target
6567      && GET_MODE (target) != mode
6568      && TREE_CODE (exp) != INTEGER_CST
6569      && TREE_CODE (exp) != PARM_DECL
6570      && TREE_CODE (exp) != ARRAY_REF
6571      && TREE_CODE (exp) != ARRAY_RANGE_REF
6572      && TREE_CODE (exp) != COMPONENT_REF
6573      && TREE_CODE (exp) != BIT_FIELD_REF
6574      && TREE_CODE (exp) != INDIRECT_REF
6575      && TREE_CODE (exp) != CALL_EXPR
6576      && TREE_CODE (exp) != VAR_DECL
6577      && TREE_CODE (exp) != RTL_EXPR)
6578    {
6579      enum machine_mode mode = GET_MODE (target);
6580
6581      if (GET_MODE_CLASS (mode) == MODE_INT
6582	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6583	internal_error ("unsupported wide integer operation");
6584    }
6585
6586  if (tmode != mode
6587      && TREE_CODE (exp) != INTEGER_CST
6588      && TREE_CODE (exp) != PARM_DECL
6589      && TREE_CODE (exp) != ARRAY_REF
6590      && TREE_CODE (exp) != ARRAY_RANGE_REF
6591      && TREE_CODE (exp) != COMPONENT_REF
6592      && TREE_CODE (exp) != BIT_FIELD_REF
6593      && TREE_CODE (exp) != INDIRECT_REF
6594      && TREE_CODE (exp) != VAR_DECL
6595      && TREE_CODE (exp) != CALL_EXPR
6596      && TREE_CODE (exp) != RTL_EXPR
6597      && GET_MODE_CLASS (tmode) == MODE_INT
6598      && tmode > MAX_INTEGER_COMPUTATION_MODE)
6599    internal_error ("unsupported wide integer operation");
6600
6601  check_max_integer_computation_mode (exp);
6602#endif
6603
6604  /* If will do cse, generate all results into pseudo registers
6605     since 1) that allows cse to find more things
6606     and 2) otherwise cse could produce an insn the machine
6607     cannot support.  And exception is a CONSTRUCTOR into a multi-word
6608     MEM: that's much more likely to be most efficient into the MEM.  */
6609
6610  if (! cse_not_expected && mode != BLKmode && target
6611      && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6612      && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6613    target = 0;
6614
6615  switch (code)
6616    {
6617    case LABEL_DECL:
6618      {
6619	tree function = decl_function_context (exp);
6620	/* Handle using a label in a containing function.  */
6621	if (function != current_function_decl
6622	    && function != inline_function_decl && function != 0)
6623	  {
6624	    struct function *p = find_function_data (function);
6625	    p->expr->x_forced_labels
6626	      = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6627				   p->expr->x_forced_labels);
6628	  }
6629	else
6630	  {
6631	    if (modifier == EXPAND_INITIALIZER)
6632	      forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6633						 label_rtx (exp),
6634						 forced_labels);
6635	  }
6636
6637	temp = gen_rtx_MEM (FUNCTION_MODE,
6638			    gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6639	if (function != current_function_decl
6640	    && function != inline_function_decl && function != 0)
6641	  LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6642	return temp;
6643      }
6644
6645    case PARM_DECL:
6646      if (!DECL_RTL_SET_P (exp))
6647	{
6648	  error_with_decl (exp, "prior parameter's size depends on `%s'");
6649	  return CONST0_RTX (mode);
6650	}
6651
6652      /* ... fall through ...  */
6653
6654    case VAR_DECL:
6655      /* If a static var's type was incomplete when the decl was written,
6656	 but the type is complete now, lay out the decl now.  */
6657      if (DECL_SIZE (exp) == 0
6658	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6659	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6660	layout_decl (exp, 0);
6661
6662      /* ... fall through ...  */
6663
6664    case FUNCTION_DECL:
6665    case RESULT_DECL:
6666      if (DECL_RTL (exp) == 0)
6667	abort ();
6668
6669      /* Ensure variable marked as used even if it doesn't go through
6670	 a parser.  If it hasn't be used yet, write out an external
6671	 definition.  */
6672      if (! TREE_USED (exp))
6673	{
6674	  assemble_external (exp);
6675	  TREE_USED (exp) = 1;
6676	}
6677
6678      /* Show we haven't gotten RTL for this yet.  */
6679      temp = 0;
6680
6681      /* Handle variables inherited from containing functions.  */
6682      context = decl_function_context (exp);
6683
6684      /* We treat inline_function_decl as an alias for the current function
6685	 because that is the inline function whose vars, types, etc.
6686	 are being merged into the current function.
6687	 See expand_inline_function.  */
6688
6689      if (context != 0 && context != current_function_decl
6690	  && context != inline_function_decl
6691	  /* If var is static, we don't need a static chain to access it.  */
6692	  && ! (GET_CODE (DECL_RTL (exp)) == MEM
6693		&& CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6694	{
6695	  rtx addr;
6696
6697	  /* Mark as non-local and addressable.  */
6698	  DECL_NONLOCAL (exp) = 1;
6699	  if (DECL_NO_STATIC_CHAIN (current_function_decl))
6700	    abort ();
6701	  (*lang_hooks.mark_addressable) (exp);
6702	  if (GET_CODE (DECL_RTL (exp)) != MEM)
6703	    abort ();
6704	  addr = XEXP (DECL_RTL (exp), 0);
6705	  if (GET_CODE (addr) == MEM)
6706	    addr
6707	      = replace_equiv_address (addr,
6708				       fix_lexical_addr (XEXP (addr, 0), exp));
6709	  else
6710	    addr = fix_lexical_addr (addr, exp);
6711
6712	  temp = replace_equiv_address (DECL_RTL (exp), addr);
6713	}
6714
6715      /* This is the case of an array whose size is to be determined
6716	 from its initializer, while the initializer is still being parsed.
6717	 See expand_decl.  */
6718
6719      else if (GET_CODE (DECL_RTL (exp)) == MEM
6720	       && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6721	temp = validize_mem (DECL_RTL (exp));
6722
6723      /* If DECL_RTL is memory, we are in the normal case and either
6724	 the address is not valid or it is not a register and -fforce-addr
6725	 is specified, get the address into a register.  */
6726
6727      else if (GET_CODE (DECL_RTL (exp)) == MEM
6728	       && modifier != EXPAND_CONST_ADDRESS
6729	       && modifier != EXPAND_SUM
6730	       && modifier != EXPAND_INITIALIZER
6731	       && (! memory_address_p (DECL_MODE (exp),
6732				       XEXP (DECL_RTL (exp), 0))
6733		   || (flag_force_addr
6734		       && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6735	temp = replace_equiv_address (DECL_RTL (exp),
6736				      copy_rtx (XEXP (DECL_RTL (exp), 0)));
6737
6738      /* If we got something, return it.  But first, set the alignment
6739	 if the address is a register.  */
6740      if (temp != 0)
6741	{
6742	  if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6743	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6744
6745	  return temp;
6746	}
6747
6748      /* If the mode of DECL_RTL does not match that of the decl, it
6749	 must be a promoted value.  We return a SUBREG of the wanted mode,
6750	 but mark it so that we know that it was already extended.  */
6751
6752      if (GET_CODE (DECL_RTL (exp)) == REG
6753	  && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6754	{
6755	  /* Get the signedness used for this variable.  Ensure we get the
6756	     same mode we got when the variable was declared.  */
6757	  if (GET_MODE (DECL_RTL (exp))
6758	      != promote_mode (type, DECL_MODE (exp), &unsignedp,
6759			       (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6760	    abort ();
6761
6762	  temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6763	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6764	  SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6765	  return temp;
6766	}
6767
6768      return DECL_RTL (exp);
6769
6770    case INTEGER_CST:
6771      temp = immed_double_const (TREE_INT_CST_LOW (exp),
6772				 TREE_INT_CST_HIGH (exp), mode);
6773
6774      /* ??? If overflow is set, fold will have done an incomplete job,
6775	 which can result in (plus xx (const_int 0)), which can get
6776	 simplified by validate_replace_rtx during virtual register
6777	 instantiation, which can result in unrecognizable insns.
6778	 Avoid this by forcing all overflows into registers.  */
6779      if (TREE_CONSTANT_OVERFLOW (exp)
6780	  && modifier != EXPAND_INITIALIZER)
6781	temp = force_reg (mode, temp);
6782
6783      return temp;
6784
6785    case CONST_DECL:
6786      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6787
6788    case REAL_CST:
6789      /* If optimized, generate immediate CONST_DOUBLE
6790	 which will be turned into memory by reload if necessary.
6791
6792	 We used to force a register so that loop.c could see it.  But
6793	 this does not allow gen_* patterns to perform optimizations with
6794	 the constants.  It also produces two insns in cases like "x = 1.0;".
6795	 On most machines, floating-point constants are not permitted in
6796	 many insns, so we'd end up copying it to a register in any case.
6797
6798	 Now, we do the copying in expand_binop, if appropriate.  */
6799      return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6800					   TYPE_MODE (TREE_TYPE (exp)));
6801
6802    case COMPLEX_CST:
6803    case STRING_CST:
6804      if (! TREE_CST_RTL (exp))
6805	output_constant_def (exp, 1);
6806
6807      /* TREE_CST_RTL probably contains a constant address.
6808	 On RISC machines where a constant address isn't valid,
6809	 make some insns to get that address into a register.  */
6810      if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6811	  && modifier != EXPAND_CONST_ADDRESS
6812	  && modifier != EXPAND_INITIALIZER
6813	  && modifier != EXPAND_SUM
6814	  && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6815	      || (flag_force_addr
6816		  && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6817	return replace_equiv_address (TREE_CST_RTL (exp),
6818				      copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6819      return TREE_CST_RTL (exp);
6820
6821    case EXPR_WITH_FILE_LOCATION:
6822      {
6823	rtx to_return;
6824	const char *saved_input_filename = input_filename;
6825	int saved_lineno = lineno;
6826	input_filename = EXPR_WFL_FILENAME (exp);
6827	lineno = EXPR_WFL_LINENO (exp);
6828	if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6829	  emit_line_note (input_filename, lineno);
6830	/* Possibly avoid switching back and forth here.  */
6831	to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6832	input_filename = saved_input_filename;
6833	lineno = saved_lineno;
6834	return to_return;
6835      }
6836
6837    case SAVE_EXPR:
6838      context = decl_function_context (exp);
6839
6840      /* If this SAVE_EXPR was at global context, assume we are an
6841	 initialization function and move it into our context.  */
6842      if (context == 0)
6843	SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6844
6845      /* We treat inline_function_decl as an alias for the current function
6846	 because that is the inline function whose vars, types, etc.
6847	 are being merged into the current function.
6848	 See expand_inline_function.  */
6849      if (context == current_function_decl || context == inline_function_decl)
6850	context = 0;
6851
6852      /* If this is non-local, handle it.  */
6853      if (context)
6854	{
6855	  /* The following call just exists to abort if the context is
6856	     not of a containing function.  */
6857	  find_function_data (context);
6858
6859	  temp = SAVE_EXPR_RTL (exp);
6860	  if (temp && GET_CODE (temp) == REG)
6861	    {
6862	      put_var_into_stack (exp, /*rescan=*/true);
6863	      temp = SAVE_EXPR_RTL (exp);
6864	    }
6865	  if (temp == 0 || GET_CODE (temp) != MEM)
6866	    abort ();
6867	  return
6868	    replace_equiv_address (temp,
6869				   fix_lexical_addr (XEXP (temp, 0), exp));
6870	}
6871      if (SAVE_EXPR_RTL (exp) == 0)
6872	{
6873	  if (mode == VOIDmode)
6874	    temp = const0_rtx;
6875	  else
6876	    temp = assign_temp (build_qualified_type (type,
6877						      (TYPE_QUALS (type)
6878						       | TYPE_QUAL_CONST)),
6879				3, 0, 0);
6880
6881	  SAVE_EXPR_RTL (exp) = temp;
6882	  if (!optimize && GET_CODE (temp) == REG)
6883	    save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6884						save_expr_regs);
6885
6886	  /* If the mode of TEMP does not match that of the expression, it
6887	     must be a promoted value.  We pass store_expr a SUBREG of the
6888	     wanted mode but mark it so that we know that it was already
6889	     extended.  */
6890
6891	  if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6892	    {
6893	      temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6894	      promote_mode (type, mode, &unsignedp, 0);
6895	      SUBREG_PROMOTED_VAR_P (temp) = 1;
6896	      SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6897	    }
6898
6899	  if (temp == const0_rtx)
6900	    expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6901	  else
6902	    store_expr (TREE_OPERAND (exp, 0), temp,
6903			modifier == EXPAND_STACK_PARM ? 2 : 0);
6904
6905	  TREE_USED (exp) = 1;
6906	}
6907
6908      /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6909	 must be a promoted value.  We return a SUBREG of the wanted mode,
6910	 but mark it so that we know that it was already extended.  */
6911
6912      if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6913	  && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6914	{
6915	  /* Compute the signedness and make the proper SUBREG.  */
6916	  promote_mode (type, mode, &unsignedp, 0);
6917	  temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6918	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6919	  SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6920	  return temp;
6921	}
6922
6923      return SAVE_EXPR_RTL (exp);
6924
6925    case UNSAVE_EXPR:
6926      {
6927	rtx temp;
6928	temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6929	TREE_OPERAND (exp, 0)
6930	  = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6931	return temp;
6932      }
6933
6934    case PLACEHOLDER_EXPR:
6935      {
6936	tree old_list = placeholder_list;
6937	tree placeholder_expr = 0;
6938
6939	exp = find_placeholder (exp, &placeholder_expr);
6940	if (exp == 0)
6941	  abort ();
6942
6943	placeholder_list = TREE_CHAIN (placeholder_expr);
6944	temp = expand_expr (exp, original_target, tmode, modifier);
6945	placeholder_list = old_list;
6946	return temp;
6947      }
6948
6949    case WITH_RECORD_EXPR:
6950      /* Put the object on the placeholder list, expand our first operand,
6951	 and pop the list.  */
6952      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6953				    placeholder_list);
6954      target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6955			    modifier);
6956      placeholder_list = TREE_CHAIN (placeholder_list);
6957      return target;
6958
6959    case GOTO_EXPR:
6960      if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6961	expand_goto (TREE_OPERAND (exp, 0));
6962      else
6963	expand_computed_goto (TREE_OPERAND (exp, 0));
6964      return const0_rtx;
6965
6966    case EXIT_EXPR:
6967      expand_exit_loop_if_false (NULL,
6968				 invert_truthvalue (TREE_OPERAND (exp, 0)));
6969      return const0_rtx;
6970
6971    case LABELED_BLOCK_EXPR:
6972      if (LABELED_BLOCK_BODY (exp))
6973	expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6974      /* Should perhaps use expand_label, but this is simpler and safer.  */
6975      do_pending_stack_adjust ();
6976      emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6977      return const0_rtx;
6978
6979    case EXIT_BLOCK_EXPR:
6980      if (EXIT_BLOCK_RETURN (exp))
6981	sorry ("returned value in block_exit_expr");
6982      expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6983      return const0_rtx;
6984
6985    case LOOP_EXPR:
6986      push_temp_slots ();
6987      expand_start_loop (1);
6988      expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6989      expand_end_loop ();
6990      pop_temp_slots ();
6991
6992      return const0_rtx;
6993
6994    case BIND_EXPR:
6995      {
6996	tree vars = TREE_OPERAND (exp, 0);
6997	int vars_need_expansion = 0;
6998
6999	/* Need to open a binding contour here because
7000	   if there are any cleanups they must be contained here.  */
7001	expand_start_bindings (2);
7002
7003	/* Mark the corresponding BLOCK for output in its proper place.  */
7004	if (TREE_OPERAND (exp, 2) != 0
7005	    && ! TREE_USED (TREE_OPERAND (exp, 2)))
7006	  (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7007
7008	/* If VARS have not yet been expanded, expand them now.  */
7009	while (vars)
7010	  {
7011	    if (!DECL_RTL_SET_P (vars))
7012	      {
7013		vars_need_expansion = 1;
7014		expand_decl (vars);
7015	      }
7016	    expand_decl_init (vars);
7017	    vars = TREE_CHAIN (vars);
7018	  }
7019
7020	temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7021
7022	expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7023
7024	return temp;
7025      }
7026
7027    case RTL_EXPR:
7028      if (RTL_EXPR_SEQUENCE (exp))
7029	{
7030	  if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7031	    abort ();
7032	  emit_insn (RTL_EXPR_SEQUENCE (exp));
7033	  RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7034	}
7035      preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7036      free_temps_for_rtl_expr (exp);
7037      return RTL_EXPR_RTL (exp);
7038
7039    case CONSTRUCTOR:
7040      /* If we don't need the result, just ensure we evaluate any
7041	 subexpressions.  */
7042      if (ignore)
7043	{
7044	  tree elt;
7045
7046	  for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7047	    expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7048
7049	  return const0_rtx;
7050	}
7051
7052      /* All elts simple constants => refer to a constant in memory.  But
7053	 if this is a non-BLKmode mode, let it store a field at a time
7054	 since that should make a CONST_INT or CONST_DOUBLE when we
7055	 fold.  Likewise, if we have a target we can use, it is best to
7056	 store directly into the target unless the type is large enough
7057	 that memcpy will be used.  If we are making an initializer and
7058	 all operands are constant, put it in memory as well.
7059
7060	FIXME: Avoid trying to fill vector constructors piece-meal.
7061	Output them with output_constant_def below unless we're sure
7062	they're zeros.  This should go away when vector initializers
7063	are treated like VECTOR_CST instead of arrays.
7064      */
7065      else if ((TREE_STATIC (exp)
7066		&& ((mode == BLKmode
7067		     && ! (target != 0 && safe_from_p (target, exp, 1)))
7068		    || TREE_ADDRESSABLE (exp)
7069		    || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7070			&& (! MOVE_BY_PIECES_P
7071			    (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7072			     TYPE_ALIGN (type)))
7073			&& ((TREE_CODE (type) == VECTOR_TYPE
7074			     && !is_zeros_p (exp))
7075			    || ! mostly_zeros_p (exp)))))
7076	       || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7077	{
7078	  rtx constructor = output_constant_def (exp, 1);
7079
7080	  if (modifier != EXPAND_CONST_ADDRESS
7081	      && modifier != EXPAND_INITIALIZER
7082	      && modifier != EXPAND_SUM)
7083	    constructor = validize_mem (constructor);
7084
7085	  return constructor;
7086	}
7087      else
7088	{
7089	  /* Handle calls that pass values in multiple non-contiguous
7090	     locations.  The Irix 6 ABI has examples of this.  */
7091	  if (target == 0 || ! safe_from_p (target, exp, 1)
7092	      || GET_CODE (target) == PARALLEL
7093	      || modifier == EXPAND_STACK_PARM)
7094	    target
7095	      = assign_temp (build_qualified_type (type,
7096						   (TYPE_QUALS (type)
7097						    | (TREE_READONLY (exp)
7098						       * TYPE_QUAL_CONST))),
7099			     0, TREE_ADDRESSABLE (exp), 1);
7100
7101	  store_constructor (exp, target, 0, int_expr_size (exp));
7102	  return target;
7103	}
7104
7105    case INDIRECT_REF:
7106      {
7107	tree exp1 = TREE_OPERAND (exp, 0);
7108	tree index;
7109	tree string = string_constant (exp1, &index);
7110
7111	/* Try to optimize reads from const strings.  */
7112	if (string
7113	    && TREE_CODE (string) == STRING_CST
7114	    && TREE_CODE (index) == INTEGER_CST
7115	    && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7116	    && GET_MODE_CLASS (mode) == MODE_INT
7117	    && GET_MODE_SIZE (mode) == 1
7118	    && modifier != EXPAND_WRITE)
7119	  return gen_int_mode (TREE_STRING_POINTER (string)
7120			       [TREE_INT_CST_LOW (index)], mode);
7121
7122	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7123	op0 = memory_address (mode, op0);
7124	temp = gen_rtx_MEM (mode, op0);
7125	set_mem_attributes (temp, exp, 0);
7126
7127	/* If we are writing to this object and its type is a record with
7128	   readonly fields, we must mark it as readonly so it will
7129	   conflict with readonly references to those fields.  */
7130	if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7131	  RTX_UNCHANGING_P (temp) = 1;
7132
7133	return temp;
7134      }
7135
7136    case ARRAY_REF:
7137      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7138	abort ();
7139
7140      {
7141	tree array = TREE_OPERAND (exp, 0);
7142	tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7143	tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7144	tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7145	HOST_WIDE_INT i;
7146
7147	/* Optimize the special-case of a zero lower bound.
7148
7149	   We convert the low_bound to sizetype to avoid some problems
7150	   with constant folding.  (E.g. suppose the lower bound is 1,
7151	   and its mode is QI.  Without the conversion,  (ARRAY
7152	   +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7153	   +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
7154
7155	if (! integer_zerop (low_bound))
7156	  index = size_diffop (index, convert (sizetype, low_bound));
7157
7158	/* Fold an expression like: "foo"[2].
7159	   This is not done in fold so it won't happen inside &.
7160	   Don't fold if this is for wide characters since it's too
7161	   difficult to do correctly and this is a very rare case.  */
7162
7163	if (modifier != EXPAND_CONST_ADDRESS
7164	    && modifier != EXPAND_INITIALIZER
7165	    && modifier != EXPAND_MEMORY
7166	    && TREE_CODE (array) == STRING_CST
7167	    && TREE_CODE (index) == INTEGER_CST
7168	    && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7169	    && GET_MODE_CLASS (mode) == MODE_INT
7170	    && GET_MODE_SIZE (mode) == 1)
7171	  return gen_int_mode (TREE_STRING_POINTER (array)
7172			       [TREE_INT_CST_LOW (index)], mode);
7173
7174	/* If this is a constant index into a constant array,
7175	   just get the value from the array.  Handle both the cases when
7176	   we have an explicit constructor and when our operand is a variable
7177	   that was declared const.  */
7178
7179	if (modifier != EXPAND_CONST_ADDRESS
7180	    && modifier != EXPAND_INITIALIZER
7181	    && modifier != EXPAND_MEMORY
7182	    && TREE_CODE (array) == CONSTRUCTOR
7183	    && ! TREE_SIDE_EFFECTS (array)
7184	    && TREE_CODE (index) == INTEGER_CST
7185	    && 0 > compare_tree_int (index,
7186				     list_length (CONSTRUCTOR_ELTS
7187						  (TREE_OPERAND (exp, 0)))))
7188	  {
7189	    tree elem;
7190
7191	    for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7192		 i = TREE_INT_CST_LOW (index);
7193		 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7194	      ;
7195
7196	    if (elem)
7197	      return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7198				  modifier);
7199	  }
7200
7201	else if (optimize >= 1
7202		 && modifier != EXPAND_CONST_ADDRESS
7203		 && modifier != EXPAND_INITIALIZER
7204		 && modifier != EXPAND_MEMORY
7205		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7206		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7207		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7208	  {
7209	    if (TREE_CODE (index) == INTEGER_CST)
7210	      {
7211		tree init = DECL_INITIAL (array);
7212
7213		if (TREE_CODE (init) == CONSTRUCTOR)
7214		  {
7215		    tree elem;
7216
7217		    for (elem = CONSTRUCTOR_ELTS (init);
7218			 (elem
7219			  && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7220			 elem = TREE_CHAIN (elem))
7221		      ;
7222
7223		    if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7224		      return expand_expr (fold (TREE_VALUE (elem)), target,
7225					  tmode, modifier);
7226		  }
7227		else if (TREE_CODE (init) == STRING_CST
7228			 && 0 > compare_tree_int (index,
7229						  TREE_STRING_LENGTH (init)))
7230		  {
7231		    tree type = TREE_TYPE (TREE_TYPE (init));
7232		    enum machine_mode mode = TYPE_MODE (type);
7233
7234		    if (GET_MODE_CLASS (mode) == MODE_INT
7235			&& GET_MODE_SIZE (mode) == 1)
7236		      return gen_int_mode (TREE_STRING_POINTER (init)
7237					   [TREE_INT_CST_LOW (index)], mode);
7238		  }
7239	      }
7240	  }
7241      }
7242      /* Fall through.  */
7243
7244    case COMPONENT_REF:
7245    case BIT_FIELD_REF:
7246    case ARRAY_RANGE_REF:
7247      /* If the operand is a CONSTRUCTOR, we can just extract the
7248	 appropriate field if it is present.  Don't do this if we have
7249	 already written the data since we want to refer to that copy
7250	 and varasm.c assumes that's what we'll do.  */
7251      if (code == COMPONENT_REF
7252	  && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7253	  && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7254	{
7255	  tree elt;
7256
7257	  for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7258	       elt = TREE_CHAIN (elt))
7259	    if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7260		/* We can normally use the value of the field in the
7261		   CONSTRUCTOR.  However, if this is a bitfield in
7262		   an integral mode that we can fit in a HOST_WIDE_INT,
7263		   we must mask only the number of bits in the bitfield,
7264		   since this is done implicitly by the constructor.  If
7265		   the bitfield does not meet either of those conditions,
7266		   we can't do this optimization.  */
7267		&& (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7268		    || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7269			 == MODE_INT)
7270			&& (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7271			    <= HOST_BITS_PER_WIDE_INT))))
7272	      {
7273		if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7274		    && modifier == EXPAND_STACK_PARM)
7275		  target = 0;
7276		op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7277		if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7278		  {
7279		    HOST_WIDE_INT bitsize
7280		      = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7281		    enum machine_mode imode
7282		      = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7283
7284		    if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7285		      {
7286			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7287			op0 = expand_and (imode, op0, op1, target);
7288		      }
7289		    else
7290		      {
7291			tree count
7292			  = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7293					 0);
7294
7295			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7296					    target, 0);
7297			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7298					    target, 0);
7299		      }
7300		  }
7301
7302		return op0;
7303	      }
7304	}
7305
7306      {
7307	enum machine_mode mode1;
7308	HOST_WIDE_INT bitsize, bitpos;
7309	tree offset;
7310	int volatilep = 0;
7311	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7312					&mode1, &unsignedp, &volatilep);
7313	rtx orig_op0;
7314
7315	/* If we got back the original object, something is wrong.  Perhaps
7316	   we are evaluating an expression too early.  In any event, don't
7317	   infinitely recurse.  */
7318	if (tem == exp)
7319	  abort ();
7320
7321	/* If TEM's type is a union of variable size, pass TARGET to the inner
7322	   computation, since it will need a temporary and TARGET is known
7323	   to have to do.  This occurs in unchecked conversion in Ada.  */
7324
7325	orig_op0 = op0
7326	  = expand_expr (tem,
7327			 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7328			  && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7329			      != INTEGER_CST)
7330			  && modifier != EXPAND_STACK_PARM
7331			  ? target : NULL_RTX),
7332			 VOIDmode,
7333			 (modifier == EXPAND_INITIALIZER
7334			  || modifier == EXPAND_CONST_ADDRESS
7335			  || modifier == EXPAND_STACK_PARM)
7336			 ? modifier : EXPAND_NORMAL);
7337
7338	/* If this is a constant, put it into a register if it is a
7339	   legitimate constant and OFFSET is 0 and memory if it isn't.  */
7340	if (CONSTANT_P (op0))
7341	  {
7342	    enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7343	    if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7344		&& offset == 0)
7345	      op0 = force_reg (mode, op0);
7346	    else
7347	      op0 = validize_mem (force_const_mem (mode, op0));
7348	  }
7349
7350	if (offset != 0)
7351	  {
7352	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7353					  EXPAND_SUM);
7354
7355	    /* If this object is in a register, put it into memory.
7356	       This case can't occur in C, but can in Ada if we have
7357	       unchecked conversion of an expression from a scalar type to
7358	       an array or record type.  */
7359	    if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7360		|| GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7361	      {
7362		/* If the operand is a SAVE_EXPR, we can deal with this by
7363		   forcing the SAVE_EXPR into memory.  */
7364		if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7365		  {
7366		    put_var_into_stack (TREE_OPERAND (exp, 0),
7367					/*rescan=*/true);
7368		    op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7369		  }
7370		else
7371		  {
7372		    tree nt
7373		      = build_qualified_type (TREE_TYPE (tem),
7374					      (TYPE_QUALS (TREE_TYPE (tem))
7375					       | TYPE_QUAL_CONST));
7376		    rtx memloc = assign_temp (nt, 1, 1, 1);
7377
7378		    emit_move_insn (memloc, op0);
7379		    op0 = memloc;
7380		  }
7381	      }
7382
7383	    if (GET_CODE (op0) != MEM)
7384	      abort ();
7385
7386#ifdef POINTERS_EXTEND_UNSIGNED
7387	    if (GET_MODE (offset_rtx) != Pmode)
7388	      offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7389#else
7390	    if (GET_MODE (offset_rtx) != ptr_mode)
7391	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7392#endif
7393
7394	    /* A constant address in OP0 can have VOIDmode, we must not try
7395	       to call force_reg for that case.  Avoid that case.  */
7396	    if (GET_CODE (op0) == MEM
7397		&& GET_MODE (op0) == BLKmode
7398		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
7399		&& bitsize != 0
7400		&& (bitpos % bitsize) == 0
7401		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7402		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7403	      {
7404		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7405		bitpos = 0;
7406	      }
7407
7408	    op0 = offset_address (op0, offset_rtx,
7409				  highest_pow2_factor (offset));
7410	  }
7411
7412	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7413	   record its alignment as BIGGEST_ALIGNMENT.  */
7414	if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7415	    && is_aligning_offset (offset, tem))
7416	  set_mem_align (op0, BIGGEST_ALIGNMENT);
7417
7418	/* Don't forget about volatility even if this is a bitfield.  */
7419	if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7420	  {
7421	    if (op0 == orig_op0)
7422	      op0 = copy_rtx (op0);
7423
7424	    MEM_VOLATILE_P (op0) = 1;
7425	  }
7426
7427	/* The following code doesn't handle CONCAT.
7428	   Assume only bitpos == 0 can be used for CONCAT, due to
7429	   one element arrays having the same mode as its element.  */
7430	if (GET_CODE (op0) == CONCAT)
7431	  {
7432	    if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7433	      abort ();
7434	    return op0;
7435	  }
7436
7437	/* In cases where an aligned union has an unaligned object
7438	   as a field, we might be extracting a BLKmode value from
7439	   an integer-mode (e.g., SImode) object.  Handle this case
7440	   by doing the extract into an object as wide as the field
7441	   (which we know to be the width of a basic mode), then
7442	   storing into memory, and changing the mode to BLKmode.  */
7443	if (mode1 == VOIDmode
7444	    || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7445	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
7446		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7447		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7448		&& modifier != EXPAND_CONST_ADDRESS
7449		&& modifier != EXPAND_INITIALIZER)
7450	    /* If the field isn't aligned enough to fetch as a memref,
7451	       fetch it as a bit field.  */
7452	    || (mode1 != BLKmode
7453		&& SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7454		&& ((TYPE_ALIGN (TREE_TYPE (tem))
7455		     < GET_MODE_ALIGNMENT (mode))
7456		    || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7457	    /* If the type and the field are a constant size and the
7458	       size of the type isn't the same size as the bitfield,
7459	       we must use bitfield operations.  */
7460	    || (bitsize >= 0
7461		&& (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7462		    == INTEGER_CST)
7463		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7464					  bitsize)))
7465	  {
7466	    enum machine_mode ext_mode = mode;
7467
7468	    if (ext_mode == BLKmode
7469		&& ! (target != 0 && GET_CODE (op0) == MEM
7470		      && GET_CODE (target) == MEM
7471		      && bitpos % BITS_PER_UNIT == 0))
7472	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7473
7474	    if (ext_mode == BLKmode)
7475	      {
7476		/* In this case, BITPOS must start at a byte boundary and
7477		   TARGET, if specified, must be a MEM.  */
7478		if (GET_CODE (op0) != MEM
7479		    || (target != 0 && GET_CODE (target) != MEM)
7480		    || bitpos % BITS_PER_UNIT != 0)
7481		  abort ();
7482
7483		op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7484		if (target == 0)
7485		  target = assign_temp (type, 0, 1, 1);
7486
7487		emit_block_move (target, op0,
7488				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7489					  / BITS_PER_UNIT),
7490				 (modifier == EXPAND_STACK_PARM
7491				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7492
7493		return target;
7494	      }
7495
7496	    op0 = validize_mem (op0);
7497
7498	    if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7499	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7500
7501	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7502				     (modifier == EXPAND_STACK_PARM
7503				      ? NULL_RTX : target),
7504				     ext_mode, ext_mode,
7505				     int_size_in_bytes (TREE_TYPE (tem)));
7506
7507	    /* If the result is a record type and BITSIZE is narrower than
7508	       the mode of OP0, an integral mode, and this is a big endian
7509	       machine, we must put the field into the high-order bits.  */
7510	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7511		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7512		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7513	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7514				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7515					    - bitsize),
7516				  op0, 1);
7517
7518	    if (mode == BLKmode)
7519	      {
7520		rtx new = assign_temp (build_qualified_type
7521				       ((*lang_hooks.types.type_for_mode)
7522					(ext_mode, 0),
7523					TYPE_QUAL_CONST), 0, 1, 1);
7524
7525		emit_move_insn (new, op0);
7526		op0 = copy_rtx (new);
7527		PUT_MODE (op0, BLKmode);
7528		set_mem_attributes (op0, exp, 1);
7529	      }
7530
7531	    return op0;
7532	  }
7533
7534	/* If the result is BLKmode, use that to access the object
7535	   now as well.  */
7536	if (mode == BLKmode)
7537	  mode1 = BLKmode;
7538
7539	/* Get a reference to just this component.  */
7540	if (modifier == EXPAND_CONST_ADDRESS
7541	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7542	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7543	else
7544	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7545
7546	if (op0 == orig_op0)
7547	  op0 = copy_rtx (op0);
7548
7549	set_mem_attributes (op0, exp, 0);
7550	if (GET_CODE (XEXP (op0, 0)) == REG)
7551	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7552
7553	MEM_VOLATILE_P (op0) |= volatilep;
7554	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7555	    || modifier == EXPAND_CONST_ADDRESS
7556	    || modifier == EXPAND_INITIALIZER)
7557	  return op0;
7558	else if (target == 0)
7559	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7560
7561	convert_move (target, op0, unsignedp);
7562	return target;
7563      }
7564
7565    case VTABLE_REF:
7566      {
7567	rtx insn, before = get_last_insn (), vtbl_ref;
7568
7569	/* Evaluate the interior expression.  */
7570	subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7571				 tmode, modifier);
7572
7573	/* Get or create an instruction off which to hang a note.  */
7574	if (REG_P (subtarget))
7575	  {
7576	    target = subtarget;
7577	    insn = get_last_insn ();
7578	    if (insn == before)
7579	      abort ();
7580	    if (! INSN_P (insn))
7581	      insn = prev_nonnote_insn (insn);
7582	  }
7583	else
7584	  {
7585	    target = gen_reg_rtx (GET_MODE (subtarget));
7586	    insn = emit_move_insn (target, subtarget);
7587	  }
7588
7589	/* Collect the data for the note.  */
7590	vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7591	vtbl_ref = plus_constant (vtbl_ref,
7592				  tree_low_cst (TREE_OPERAND (exp, 2), 0));
7593	/* Discard the initial CONST that was added.  */
7594	vtbl_ref = XEXP (vtbl_ref, 0);
7595
7596	REG_NOTES (insn)
7597	  = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7598
7599	return target;
7600      }
7601
7602      /* Intended for a reference to a buffer of a file-object in Pascal.
7603	 But it's not certain that a special tree code will really be
7604	 necessary for these.  INDIRECT_REF might work for them.  */
7605    case BUFFER_REF:
7606      abort ();
7607
7608    case IN_EXPR:
7609      {
7610	/* Pascal set IN expression.
7611
7612	   Algorithm:
7613	       rlo       = set_low - (set_low%bits_per_word);
7614	       the_word  = set [ (index - rlo)/bits_per_word ];
7615	       bit_index = index % bits_per_word;
7616	       bitmask   = 1 << bit_index;
7617	       return !!(the_word & bitmask);  */
7618
7619	tree set = TREE_OPERAND (exp, 0);
7620	tree index = TREE_OPERAND (exp, 1);
7621	int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7622	tree set_type = TREE_TYPE (set);
7623	tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7624	tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7625	rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7626	rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7627	rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7628	rtx setval = expand_expr (set, 0, VOIDmode, 0);
7629	rtx setaddr = XEXP (setval, 0);
7630	enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7631	rtx rlow;
7632	rtx diff, quo, rem, addr, bit, result;
7633
7634	/* If domain is empty, answer is no.  Likewise if index is constant
7635	   and out of bounds.  */
7636	if (((TREE_CODE (set_high_bound) == INTEGER_CST
7637	     && TREE_CODE (set_low_bound) == INTEGER_CST
7638	     && tree_int_cst_lt (set_high_bound, set_low_bound))
7639	     || (TREE_CODE (index) == INTEGER_CST
7640		 && TREE_CODE (set_low_bound) == INTEGER_CST
7641		 && tree_int_cst_lt (index, set_low_bound))
7642	     || (TREE_CODE (set_high_bound) == INTEGER_CST
7643		 && TREE_CODE (index) == INTEGER_CST
7644		 && tree_int_cst_lt (set_high_bound, index))))
7645	  return const0_rtx;
7646
7647	if (target == 0)
7648	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7649
7650	/* If we get here, we have to generate the code for both cases
7651	   (in range and out of range).  */
7652
7653	op0 = gen_label_rtx ();
7654	op1 = gen_label_rtx ();
7655
7656	if (! (GET_CODE (index_val) == CONST_INT
7657	       && GET_CODE (lo_r) == CONST_INT))
7658	  emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7659				   GET_MODE (index_val), iunsignedp, op1);
7660
7661	if (! (GET_CODE (index_val) == CONST_INT
7662	       && GET_CODE (hi_r) == CONST_INT))
7663	  emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7664				   GET_MODE (index_val), iunsignedp, op1);
7665
7666	/* Calculate the element number of bit zero in the first word
7667	   of the set.  */
7668	if (GET_CODE (lo_r) == CONST_INT)
7669	  rlow = GEN_INT (INTVAL (lo_r)
7670			  & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7671	else
7672	  rlow = expand_binop (index_mode, and_optab, lo_r,
7673			       GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7674			       NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7675
7676	diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7677			     NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7678
7679	quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7680			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7681	rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7682			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7683
7684	addr = memory_address (byte_mode,
7685			       expand_binop (index_mode, add_optab, diff,
7686					     setaddr, NULL_RTX, iunsignedp,
7687					     OPTAB_LIB_WIDEN));
7688
7689	/* Extract the bit we want to examine.  */
7690	bit = expand_shift (RSHIFT_EXPR, byte_mode,
7691			    gen_rtx_MEM (byte_mode, addr),
7692			    make_tree (TREE_TYPE (index), rem),
7693			    NULL_RTX, 1);
7694	result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7695			       GET_MODE (target) == byte_mode ? target : 0,
7696			       1, OPTAB_LIB_WIDEN);
7697
7698	if (result != target)
7699	  convert_move (target, result, 1);
7700
7701	/* Output the code to handle the out-of-range case.  */
7702	emit_jump (op0);
7703	emit_label (op1);
7704	emit_move_insn (target, const0_rtx);
7705	emit_label (op0);
7706	return target;
7707      }
7708
7709    case WITH_CLEANUP_EXPR:
7710      if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7711	{
7712	  WITH_CLEANUP_EXPR_RTL (exp)
7713	    = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7714	  expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7715				  CLEANUP_EH_ONLY (exp));
7716
7717	  /* That's it for this cleanup.  */
7718	  TREE_OPERAND (exp, 1) = 0;
7719	}
7720      return WITH_CLEANUP_EXPR_RTL (exp);
7721
7722    case CLEANUP_POINT_EXPR:
7723      {
7724	/* Start a new binding layer that will keep track of all cleanup
7725	   actions to be performed.  */
7726	expand_start_bindings (2);
7727
7728	target_temp_slot_level = temp_slot_level;
7729
7730	op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7731	/* If we're going to use this value, load it up now.  */
7732	if (! ignore)
7733	  op0 = force_not_mem (op0);
7734	preserve_temp_slots (op0);
7735	expand_end_bindings (NULL_TREE, 0, 0);
7736      }
7737      return op0;
7738
7739    case CALL_EXPR:
7740      /* Check for a built-in function.  */
7741      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7742	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7743	      == FUNCTION_DECL)
7744	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7745	{
7746	  if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7747	      == BUILT_IN_FRONTEND)
7748	    return (*lang_hooks.expand_expr) (exp, original_target,
7749					      tmode, modifier);
7750	  else
7751	    return expand_builtin (exp, target, subtarget, tmode, ignore);
7752	}
7753
7754      return expand_call (exp, target, ignore);
7755
7756    case NON_LVALUE_EXPR:
7757    case NOP_EXPR:
7758    case CONVERT_EXPR:
7759    case REFERENCE_EXPR:
7760      if (TREE_OPERAND (exp, 0) == error_mark_node)
7761	return const0_rtx;
7762
7763      if (TREE_CODE (type) == UNION_TYPE)
7764	{
7765	  tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7766
7767	  /* If both input and output are BLKmode, this conversion isn't doing
7768	     anything except possibly changing memory attribute.  */
7769	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7770	    {
7771	      rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7772					modifier);
7773
7774	      result = copy_rtx (result);
7775	      set_mem_attributes (result, exp, 0);
7776	      return result;
7777	    }
7778
7779	  if (target == 0)
7780	    target = assign_temp (type, 0, 1, 1);
7781
7782	  if (GET_CODE (target) == MEM)
7783	    /* Store data into beginning of memory target.  */
7784	    store_expr (TREE_OPERAND (exp, 0),
7785			adjust_address (target, TYPE_MODE (valtype), 0),
7786			modifier == EXPAND_STACK_PARM ? 2 : 0);
7787
7788	  else if (GET_CODE (target) == REG)
7789	    /* Store this field into a union of the proper type.  */
7790	    store_field (target,
7791			 MIN ((int_size_in_bytes (TREE_TYPE
7792						  (TREE_OPERAND (exp, 0)))
7793			       * BITS_PER_UNIT),
7794			      (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7795			 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7796			 VOIDmode, 0, type, 0);
7797	  else
7798	    abort ();
7799
7800	  /* Return the entire union.  */
7801	  return target;
7802	}
7803
7804      if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7805	{
7806	  op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7807			     modifier);
7808
7809	  /* If the signedness of the conversion differs and OP0 is
7810	     a promoted SUBREG, clear that indication since we now
7811	     have to do the proper extension.  */
7812	  if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7813	      && GET_CODE (op0) == SUBREG)
7814	    SUBREG_PROMOTED_VAR_P (op0) = 0;
7815
7816	  return op0;
7817	}
7818
7819      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7820      if (GET_MODE (op0) == mode)
7821	return op0;
7822
7823      /* If OP0 is a constant, just convert it into the proper mode.  */
7824      if (CONSTANT_P (op0))
7825	{
7826	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7827	  enum machine_mode inner_mode = TYPE_MODE (inner_type);
7828
7829	  if (modifier == EXPAND_INITIALIZER)
7830	    return simplify_gen_subreg (mode, op0, inner_mode,
7831					subreg_lowpart_offset (mode,
7832							       inner_mode));
7833	  else
7834	    return convert_modes (mode, inner_mode, op0,
7835				  TREE_UNSIGNED (inner_type));
7836	}
7837
7838      if (modifier == EXPAND_INITIALIZER)
7839	return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7840
7841      if (target == 0)
7842	return
7843	  convert_to_mode (mode, op0,
7844			   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7845      else
7846	convert_move (target, op0,
7847		      TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7848      return target;
7849
7850    case VIEW_CONVERT_EXPR:
7851      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7852
7853      /* If the input and output modes are both the same, we are done.
7854	 Otherwise, if neither mode is BLKmode and both are within a word, we
7855	 can use gen_lowpart.  If neither is true, make sure the operand is
7856	 in memory and convert the MEM to the new mode.  */
7857      if (TYPE_MODE (type) == GET_MODE (op0))
7858	;
7859      else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7860	       && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7861	       && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7862	op0 = gen_lowpart (TYPE_MODE (type), op0);
7863      else if (GET_CODE (op0) != MEM)
7864	{
7865	  /* If the operand is not a MEM, force it into memory.  Since we
7866	     are going to be be changing the mode of the MEM, don't call
7867	     force_const_mem for constants because we don't allow pool
7868	     constants to change mode.  */
7869	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7870
7871	  if (TREE_ADDRESSABLE (exp))
7872	    abort ();
7873
7874	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7875	    target
7876	      = assign_stack_temp_for_type
7877		(TYPE_MODE (inner_type),
7878		 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7879
7880	  emit_move_insn (target, op0);
7881	  op0 = target;
7882	}
7883
7884      /* At this point, OP0 is in the correct mode.  If the output type is such
7885	 that the operand is known to be aligned, indicate that it is.
7886	 Otherwise, we need only be concerned about alignment for non-BLKmode
7887	 results.  */
7888      if (GET_CODE (op0) == MEM)
7889	{
7890	  op0 = copy_rtx (op0);
7891
7892	  if (TYPE_ALIGN_OK (type))
7893	    set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7894	  else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7895		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7896	    {
7897	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7898	      HOST_WIDE_INT temp_size
7899		= MAX (int_size_in_bytes (inner_type),
7900		       (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7901	      rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7902						    temp_size, 0, type);
7903	      rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7904
7905	      if (TREE_ADDRESSABLE (exp))
7906		abort ();
7907
7908	      if (GET_MODE (op0) == BLKmode)
7909		emit_block_move (new_with_op0_mode, op0,
7910				 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7911				 (modifier == EXPAND_STACK_PARM
7912				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7913	      else
7914		emit_move_insn (new_with_op0_mode, op0);
7915
7916	      op0 = new;
7917	    }
7918
7919	  op0 = adjust_address (op0, TYPE_MODE (type), 0);
7920	}
7921
7922      return op0;
7923
7924    case PLUS_EXPR:
7925      this_optab = ! unsignedp && flag_trapv
7926                   && (GET_MODE_CLASS (mode) == MODE_INT)
7927                   ? addv_optab : add_optab;
7928
7929      /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7930	 something else, make sure we add the register to the constant and
7931	 then to the other thing.  This case can occur during strength
7932	 reduction and doing it this way will produce better code if the
7933	 frame pointer or argument pointer is eliminated.
7934
7935	 fold-const.c will ensure that the constant is always in the inner
7936	 PLUS_EXPR, so the only case we need to do anything about is if
7937	 sp, ap, or fp is our second argument, in which case we must swap
7938	 the innermost first argument and our second argument.  */
7939
7940      if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7941	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7942	  && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7943	  && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7944	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7945	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7946	{
7947	  tree t = TREE_OPERAND (exp, 1);
7948
7949	  TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7950	  TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7951	}
7952
7953      /* If the result is to be ptr_mode and we are adding an integer to
7954	 something, we might be forming a constant.  So try to use
7955	 plus_constant.  If it produces a sum and we can't accept it,
7956	 use force_operand.  This allows P = &ARR[const] to generate
7957	 efficient code on machines where a SYMBOL_REF is not a valid
7958	 address.
7959
7960	 If this is an EXPAND_SUM call, always return the sum.  */
7961      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7962	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7963	{
7964	  if (modifier == EXPAND_STACK_PARM)
7965	    target = 0;
7966	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7967	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7968	      && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7969	    {
7970	      rtx constant_part;
7971
7972	      op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7973				 EXPAND_SUM);
7974	      /* Use immed_double_const to ensure that the constant is
7975		 truncated according to the mode of OP1, then sign extended
7976		 to a HOST_WIDE_INT.  Using the constant directly can result
7977		 in non-canonical RTL in a 64x32 cross compile.  */
7978	      constant_part
7979		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7980				      (HOST_WIDE_INT) 0,
7981				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7982	      op1 = plus_constant (op1, INTVAL (constant_part));
7983	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7984		op1 = force_operand (op1, target);
7985	      return op1;
7986	    }
7987
7988	  else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7989		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7990		   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7991	    {
7992	      rtx constant_part;
7993
7994	      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7995				 (modifier == EXPAND_INITIALIZER
7996				 ? EXPAND_INITIALIZER : EXPAND_SUM));
7997	      if (! CONSTANT_P (op0))
7998		{
7999		  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8000				     VOIDmode, modifier);
8001		  /* Don't go to both_summands if modifier
8002		     says it's not right to return a PLUS.  */
8003		  if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8004		    goto binop2;
8005		  goto both_summands;
8006		}
8007	      /* Use immed_double_const to ensure that the constant is
8008		 truncated according to the mode of OP1, then sign extended
8009		 to a HOST_WIDE_INT.  Using the constant directly can result
8010		 in non-canonical RTL in a 64x32 cross compile.  */
8011	      constant_part
8012		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8013				      (HOST_WIDE_INT) 0,
8014				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8015	      op0 = plus_constant (op0, INTVAL (constant_part));
8016	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8017		op0 = force_operand (op0, target);
8018	      return op0;
8019	    }
8020	}
8021
8022      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8023	subtarget = 0;
8024
8025      /* No sense saving up arithmetic to be done
8026	 if it's all in the wrong mode to form part of an address.
8027	 And force_operand won't know whether to sign-extend or
8028	 zero-extend.  */
8029      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8030	  || mode != ptr_mode)
8031	{
8032	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8033	  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8034	  if (op0 == const0_rtx)
8035	    return op1;
8036	  if (op1 == const0_rtx)
8037	    return op0;
8038	  goto binop2;
8039	}
8040
8041      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8042      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8043
8044      /* We come here from MINUS_EXPR when the second operand is a
8045         constant.  */
8046    both_summands:
8047      /* Make sure any term that's a sum with a constant comes last.  */
8048      if (GET_CODE (op0) == PLUS
8049	  && CONSTANT_P (XEXP (op0, 1)))
8050	{
8051	  temp = op0;
8052	  op0 = op1;
8053	  op1 = temp;
8054	}
8055      /* If adding to a sum including a constant,
8056	 associate it to put the constant outside.  */
8057      if (GET_CODE (op1) == PLUS
8058	  && CONSTANT_P (XEXP (op1, 1)))
8059	{
8060	  rtx constant_term = const0_rtx;
8061
8062	  temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8063	  if (temp != 0)
8064	    op0 = temp;
8065	  /* Ensure that MULT comes first if there is one.  */
8066	  else if (GET_CODE (op0) == MULT)
8067	    op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8068	  else
8069	    op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8070
8071	  /* Let's also eliminate constants from op0 if possible.  */
8072	  op0 = eliminate_constant_term (op0, &constant_term);
8073
8074	  /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8075	     their sum should be a constant.  Form it into OP1, since the
8076	     result we want will then be OP0 + OP1.  */
8077
8078	  temp = simplify_binary_operation (PLUS, mode, constant_term,
8079					    XEXP (op1, 1));
8080	  if (temp != 0)
8081	    op1 = temp;
8082	  else
8083	    op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8084	}
8085
8086      /* Put a constant term last and put a multiplication first.  */
8087      if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8088	temp = op1, op1 = op0, op0 = temp;
8089
8090      temp = simplify_binary_operation (PLUS, mode, op0, op1);
8091      return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8092
8093    case MINUS_EXPR:
8094      /* For initializers, we are allowed to return a MINUS of two
8095	 symbolic constants.  Here we handle all cases when both operands
8096	 are constant.  */
8097      /* Handle difference of two symbolic constants,
8098	 for the sake of an initializer.  */
8099      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8100	  && really_constant_p (TREE_OPERAND (exp, 0))
8101	  && really_constant_p (TREE_OPERAND (exp, 1)))
8102	{
8103	  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8104				 modifier);
8105	  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8106				 modifier);
8107
8108	  /* If the last operand is a CONST_INT, use plus_constant of
8109	     the negated constant.  Else make the MINUS.  */
8110	  if (GET_CODE (op1) == CONST_INT)
8111	    return plus_constant (op0, - INTVAL (op1));
8112	  else
8113	    return gen_rtx_MINUS (mode, op0, op1);
8114	}
8115
8116      this_optab = ! unsignedp && flag_trapv
8117                   && (GET_MODE_CLASS(mode) == MODE_INT)
8118                   ? subv_optab : sub_optab;
8119
8120      /* No sense saving up arithmetic to be done
8121	 if it's all in the wrong mode to form part of an address.
8122	 And force_operand won't know whether to sign-extend or
8123	 zero-extend.  */
8124      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8125	  || mode != ptr_mode)
8126	goto binop;
8127
8128      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8129	subtarget = 0;
8130
8131      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8132      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8133
8134      /* Convert A - const to A + (-const).  */
8135      if (GET_CODE (op1) == CONST_INT)
8136	{
8137	  op1 = negate_rtx (mode, op1);
8138	  goto both_summands;
8139	}
8140
8141      goto binop2;
8142
8143    case MULT_EXPR:
8144      /* If first operand is constant, swap them.
8145	 Thus the following special case checks need only
8146	 check the second operand.  */
8147      if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8148	{
8149	  tree t1 = TREE_OPERAND (exp, 0);
8150	  TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8151	  TREE_OPERAND (exp, 1) = t1;
8152	}
8153
8154      /* Attempt to return something suitable for generating an
8155	 indexed address, for machines that support that.  */
8156
8157      if (modifier == EXPAND_SUM && mode == ptr_mode
8158	  && host_integerp (TREE_OPERAND (exp, 1), 0))
8159	{
8160	  tree exp1 = TREE_OPERAND (exp, 1);
8161
8162	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8163			     EXPAND_SUM);
8164
8165	  /* If we knew for certain that this is arithmetic for an array
8166	     reference, and we knew the bounds of the array, then we could
8167	     apply the distributive law across (PLUS X C) for constant C.
8168	     Without such knowledge, we risk overflowing the computation
8169	     when both X and C are large, but X+C isn't.  */
8170	  /* ??? Could perhaps special-case EXP being unsigned and C being
8171	     positive.  In that case we are certain that X+C is no smaller
8172	     than X and so the transformed expression will overflow iff the
8173	     original would have.  */
8174
8175	  if (GET_CODE (op0) != REG)
8176	    op0 = force_operand (op0, NULL_RTX);
8177	  if (GET_CODE (op0) != REG)
8178	    op0 = copy_to_mode_reg (mode, op0);
8179
8180	  return gen_rtx_MULT (mode, op0,
8181			       gen_int_mode (tree_low_cst (exp1, 0),
8182					     TYPE_MODE (TREE_TYPE (exp1))));
8183	}
8184
8185      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8186	subtarget = 0;
8187
8188      if (modifier == EXPAND_STACK_PARM)
8189	target = 0;
8190
8191      /* Check for multiplying things that have been extended
8192	 from a narrower type.  If this machine supports multiplying
8193	 in that narrower type with a result in the desired type,
8194	 do it that way, and avoid the explicit type-conversion.  */
8195      if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8196	  && TREE_CODE (type) == INTEGER_TYPE
8197	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8198	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8199	  && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8200	       && int_fits_type_p (TREE_OPERAND (exp, 1),
8201				   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8202	       /* Don't use a widening multiply if a shift will do.  */
8203	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8204		    > HOST_BITS_PER_WIDE_INT)
8205		   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8206	      ||
8207	      (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8208	       && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8209		   ==
8210		   TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8211	       /* If both operands are extended, they must either both
8212		  be zero-extended or both be sign-extended.  */
8213	       && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8214		   ==
8215		   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8216	{
8217	  enum machine_mode innermode
8218	    = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8219	  optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8220			? smul_widen_optab : umul_widen_optab);
8221	  this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8222			? umul_widen_optab : smul_widen_optab);
8223	  if (mode == GET_MODE_WIDER_MODE (innermode))
8224	    {
8225	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8226		{
8227		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8228				     NULL_RTX, VOIDmode, 0);
8229		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8230		    op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8231				       VOIDmode, 0);
8232		  else
8233		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8234				       NULL_RTX, VOIDmode, 0);
8235		  goto binop2;
8236		}
8237	      else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8238		       && innermode == word_mode)
8239		{
8240		  rtx htem;
8241		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8242				     NULL_RTX, VOIDmode, 0);
8243		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8244		    op1 = convert_modes (innermode, mode,
8245					 expand_expr (TREE_OPERAND (exp, 1),
8246						      NULL_RTX, VOIDmode, 0),
8247					 unsignedp);
8248		  else
8249		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8250				       NULL_RTX, VOIDmode, 0);
8251		  temp = expand_binop (mode, other_optab, op0, op1, target,
8252				       unsignedp, OPTAB_LIB_WIDEN);
8253		  htem = expand_mult_highpart_adjust (innermode,
8254						      gen_highpart (innermode, temp),
8255						      op0, op1,
8256						      gen_highpart (innermode, temp),
8257						      unsignedp);
8258		  emit_move_insn (gen_highpart (innermode, temp), htem);
8259		  return temp;
8260		}
8261	    }
8262	}
8263      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8264      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8265      return expand_mult (mode, op0, op1, target, unsignedp);
8266
8267    case TRUNC_DIV_EXPR:
8268    case FLOOR_DIV_EXPR:
8269    case CEIL_DIV_EXPR:
8270    case ROUND_DIV_EXPR:
8271    case EXACT_DIV_EXPR:
8272      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8273	subtarget = 0;
8274      if (modifier == EXPAND_STACK_PARM)
8275	target = 0;
8276      /* Possible optimization: compute the dividend with EXPAND_SUM
8277	 then if the divisor is constant can optimize the case
8278	 where some terms of the dividend have coeffs divisible by it.  */
8279      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8280      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8281      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8282
8283    case RDIV_EXPR:
8284      /* Emit a/b as a*(1/b).  Later we may manage CSE the reciprocal saving
8285         expensive divide.  If not, combine will rebuild the original
8286         computation.  */
8287      if (flag_unsafe_math_optimizations && optimize && !optimize_size
8288	  && TREE_CODE (type) == REAL_TYPE
8289	  && !real_onep (TREE_OPERAND (exp, 0)))
8290        return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8291				   build (RDIV_EXPR, type,
8292					  build_real (type, dconst1),
8293					  TREE_OPERAND (exp, 1))),
8294			    target, tmode, modifier);
8295      this_optab = sdiv_optab;
8296      goto binop;
8297
8298    case TRUNC_MOD_EXPR:
8299    case FLOOR_MOD_EXPR:
8300    case CEIL_MOD_EXPR:
8301    case ROUND_MOD_EXPR:
8302      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8303	subtarget = 0;
8304      if (modifier == EXPAND_STACK_PARM)
8305	target = 0;
8306      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8307      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8308      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8309
8310    case FIX_ROUND_EXPR:
8311    case FIX_FLOOR_EXPR:
8312    case FIX_CEIL_EXPR:
8313      abort ();			/* Not used for C.  */
8314
8315    case FIX_TRUNC_EXPR:
8316      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8317      if (target == 0 || modifier == EXPAND_STACK_PARM)
8318	target = gen_reg_rtx (mode);
8319      expand_fix (target, op0, unsignedp);
8320      return target;
8321
8322    case FLOAT_EXPR:
8323      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8324      if (target == 0 || modifier == EXPAND_STACK_PARM)
8325	target = gen_reg_rtx (mode);
8326      /* expand_float can't figure out what to do if FROM has VOIDmode.
8327	 So give it the correct mode.  With -O, cse will optimize this.  */
8328      if (GET_MODE (op0) == VOIDmode)
8329	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8330				op0);
8331      expand_float (target, op0,
8332		    TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8333      return target;
8334
8335    case NEGATE_EXPR:
8336      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8337      if (modifier == EXPAND_STACK_PARM)
8338	target = 0;
8339      temp = expand_unop (mode,
8340			  ! unsignedp && flag_trapv
8341			  && (GET_MODE_CLASS(mode) == MODE_INT)
8342			  ? negv_optab : neg_optab, op0, target, 0);
8343      if (temp == 0)
8344	abort ();
8345      return temp;
8346
8347    case ABS_EXPR:
8348      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8349      if (modifier == EXPAND_STACK_PARM)
8350	target = 0;
8351
8352      /* Handle complex values specially.  */
8353      if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8354	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8355	return expand_complex_abs (mode, op0, target, unsignedp);
8356
8357      /* Unsigned abs is simply the operand.  Testing here means we don't
8358	 risk generating incorrect code below.  */
8359      if (TREE_UNSIGNED (type))
8360	return op0;
8361
8362      return expand_abs (mode, op0, target, unsignedp,
8363			 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8364
8365    case MAX_EXPR:
8366    case MIN_EXPR:
8367      target = original_target;
8368      if (target == 0
8369	  || modifier == EXPAND_STACK_PARM
8370	  || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8371	  || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8372	  || GET_MODE (target) != mode
8373	  || (GET_CODE (target) == REG
8374	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
8375	target = gen_reg_rtx (mode);
8376      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8377      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8378
8379      /* First try to do it with a special MIN or MAX instruction.
8380	 If that does not win, use a conditional jump to select the proper
8381	 value.  */
8382      this_optab = (TREE_UNSIGNED (type)
8383		    ? (code == MIN_EXPR ? umin_optab : umax_optab)
8384		    : (code == MIN_EXPR ? smin_optab : smax_optab));
8385
8386      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8387			   OPTAB_WIDEN);
8388      if (temp != 0)
8389	return temp;
8390
8391      /* At this point, a MEM target is no longer useful; we will get better
8392	 code without it.  */
8393
8394      if (GET_CODE (target) == MEM)
8395	target = gen_reg_rtx (mode);
8396
8397      if (target != op0)
8398	emit_move_insn (target, op0);
8399
8400      op0 = gen_label_rtx ();
8401
8402      /* If this mode is an integer too wide to compare properly,
8403	 compare word by word.  Rely on cse to optimize constant cases.  */
8404      if (GET_MODE_CLASS (mode) == MODE_INT
8405	  && ! can_compare_p (GE, mode, ccp_jump))
8406	{
8407	  if (code == MAX_EXPR)
8408	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8409					  target, op1, NULL_RTX, op0);
8410	  else
8411	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8412					  op1, target, NULL_RTX, op0);
8413	}
8414      else
8415	{
8416	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8417	  do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8418				   unsignedp, mode, NULL_RTX, NULL_RTX,
8419				   op0);
8420	}
8421      emit_move_insn (target, op1);
8422      emit_label (op0);
8423      return target;
8424
8425    case BIT_NOT_EXPR:
8426      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8427      if (modifier == EXPAND_STACK_PARM)
8428	target = 0;
8429      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8430      if (temp == 0)
8431	abort ();
8432      return temp;
8433
8434    case FFS_EXPR:
8435      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8436      if (modifier == EXPAND_STACK_PARM)
8437	target = 0;
8438      temp = expand_unop (mode, ffs_optab, op0, target, 1);
8439      if (temp == 0)
8440	abort ();
8441      return temp;
8442
8443      /* ??? Can optimize bitwise operations with one arg constant.
8444	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8445	 and (a bitwise1 b) bitwise2 b (etc)
8446	 but that is probably not worth while.  */
8447
8448      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
8449	 boolean values when we want in all cases to compute both of them.  In
8450	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8451	 as actual zero-or-1 values and then bitwise anding.  In cases where
8452	 there cannot be any side effects, better code would be made by
8453	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8454	 how to recognize those cases.  */
8455
8456    case TRUTH_AND_EXPR:
8457    case BIT_AND_EXPR:
8458      this_optab = and_optab;
8459      goto binop;
8460
8461    case TRUTH_OR_EXPR:
8462    case BIT_IOR_EXPR:
8463      this_optab = ior_optab;
8464      goto binop;
8465
8466    case TRUTH_XOR_EXPR:
8467    case BIT_XOR_EXPR:
8468      this_optab = xor_optab;
8469      goto binop;
8470
8471    case LSHIFT_EXPR:
8472    case RSHIFT_EXPR:
8473    case LROTATE_EXPR:
8474    case RROTATE_EXPR:
8475      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8476	subtarget = 0;
8477      if (modifier == EXPAND_STACK_PARM)
8478	target = 0;
8479      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8480      return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8481			   unsignedp);
8482
8483      /* Could determine the answer when only additive constants differ.  Also,
8484	 the addition of one can be handled by changing the condition.  */
8485    case LT_EXPR:
8486    case LE_EXPR:
8487    case GT_EXPR:
8488    case GE_EXPR:
8489    case EQ_EXPR:
8490    case NE_EXPR:
8491    case UNORDERED_EXPR:
8492    case ORDERED_EXPR:
8493    case UNLT_EXPR:
8494    case UNLE_EXPR:
8495    case UNGT_EXPR:
8496    case UNGE_EXPR:
8497    case UNEQ_EXPR:
8498      temp = do_store_flag (exp,
8499			    modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8500			    tmode != VOIDmode ? tmode : mode, 0);
8501      if (temp != 0)
8502	return temp;
8503
8504      /* For foo != 0, load foo, and if it is nonzero load 1 instead.  */
8505      if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8506	  && original_target
8507	  && GET_CODE (original_target) == REG
8508	  && (GET_MODE (original_target)
8509	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8510	{
8511	  temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8512			      VOIDmode, 0);
8513
8514	  /* If temp is constant, we can just compute the result.  */
8515	  if (GET_CODE (temp) == CONST_INT)
8516	    {
8517	      if (INTVAL (temp) != 0)
8518	        emit_move_insn (target, const1_rtx);
8519	      else
8520	        emit_move_insn (target, const0_rtx);
8521
8522	      return target;
8523	    }
8524
8525	  if (temp != original_target)
8526	    {
8527	      enum machine_mode mode1 = GET_MODE (temp);
8528	      if (mode1 == VOIDmode)
8529		mode1 = tmode != VOIDmode ? tmode : mode;
8530
8531	      temp = copy_to_mode_reg (mode1, temp);
8532	    }
8533
8534	  op1 = gen_label_rtx ();
8535	  emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8536				   GET_MODE (temp), unsignedp, op1);
8537	  emit_move_insn (temp, const1_rtx);
8538	  emit_label (op1);
8539	  return temp;
8540	}
8541
8542      /* If no set-flag instruction, must generate a conditional
8543	 store into a temporary variable.  Drop through
8544	 and handle this like && and ||.  */
8545
8546    case TRUTH_ANDIF_EXPR:
8547    case TRUTH_ORIF_EXPR:
8548      if (! ignore
8549	  && (target == 0
8550	      || modifier == EXPAND_STACK_PARM
8551	      || ! safe_from_p (target, exp, 1)
8552	      /* Make sure we don't have a hard reg (such as function's return
8553		 value) live across basic blocks, if not optimizing.  */
8554	      || (!optimize && GET_CODE (target) == REG
8555		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8556	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8557
8558      if (target)
8559	emit_clr_insn (target);
8560
8561      op1 = gen_label_rtx ();
8562      jumpifnot (exp, op1);
8563
8564      if (target)
8565	emit_0_to_1_insn (target);
8566
8567      emit_label (op1);
8568      return ignore ? const0_rtx : target;
8569
8570    case TRUTH_NOT_EXPR:
8571      if (modifier == EXPAND_STACK_PARM)
8572	target = 0;
8573      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8574      /* The parser is careful to generate TRUTH_NOT_EXPR
8575	 only with operands that are always zero or one.  */
8576      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8577			   target, 1, OPTAB_LIB_WIDEN);
8578      if (temp == 0)
8579	abort ();
8580      return temp;
8581
8582    case COMPOUND_EXPR:
8583      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8584      emit_queue ();
8585      return expand_expr (TREE_OPERAND (exp, 1),
8586			  (ignore ? const0_rtx : target),
8587			  VOIDmode, modifier);
8588
8589    case COND_EXPR:
8590      /* If we would have a "singleton" (see below) were it not for a
8591	 conversion in each arm, bring that conversion back out.  */
8592      if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8593	  && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8594	  && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8595	      == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8596	{
8597	  tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8598	  tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8599
8600	  if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8601	       && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8602	      || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8603		  && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8604	      || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8605		  && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8606	      || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8607		  && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8608	    return expand_expr (build1 (NOP_EXPR, type,
8609					build (COND_EXPR, TREE_TYPE (iftrue),
8610					       TREE_OPERAND (exp, 0),
8611					       iftrue, iffalse)),
8612				target, tmode, modifier);
8613	}
8614
8615      {
8616	/* Note that COND_EXPRs whose type is a structure or union
8617	   are required to be constructed to contain assignments of
8618	   a temporary variable, so that we can evaluate them here
8619	   for side effect only.  If type is void, we must do likewise.  */
8620
8621	/* If an arm of the branch requires a cleanup,
8622	   only that cleanup is performed.  */
8623
8624	tree singleton = 0;
8625	tree binary_op = 0, unary_op = 0;
8626
8627	/* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8628	   convert it to our mode, if necessary.  */
8629	if (integer_onep (TREE_OPERAND (exp, 1))
8630	    && integer_zerop (TREE_OPERAND (exp, 2))
8631	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8632	  {
8633	    if (ignore)
8634	      {
8635		expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8636			     modifier);
8637		return const0_rtx;
8638	      }
8639
8640	    if (modifier == EXPAND_STACK_PARM)
8641	      target = 0;
8642	    op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8643	    if (GET_MODE (op0) == mode)
8644	      return op0;
8645
8646	    if (target == 0)
8647	      target = gen_reg_rtx (mode);
8648	    convert_move (target, op0, unsignedp);
8649	    return target;
8650	  }
8651
8652	/* Check for X ? A + B : A.  If we have this, we can copy A to the
8653	   output and conditionally add B.  Similarly for unary operations.
8654	   Don't do this if X has side-effects because those side effects
8655	   might affect A or B and the "?" operation is a sequence point in
8656	   ANSI.  (operand_equal_p tests for side effects.)  */
8657
8658	if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8659	    && operand_equal_p (TREE_OPERAND (exp, 2),
8660				TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8661	  singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8662	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8663		 && operand_equal_p (TREE_OPERAND (exp, 1),
8664				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8665	  singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8666	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8667		 && operand_equal_p (TREE_OPERAND (exp, 2),
8668				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8669	  singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8670	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8671		 && operand_equal_p (TREE_OPERAND (exp, 1),
8672				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8673	  singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8674
8675	/* If we are not to produce a result, we have no target.  Otherwise,
8676	   if a target was specified use it; it will not be used as an
8677	   intermediate target unless it is safe.  If no target, use a
8678	   temporary.  */
8679
8680	if (ignore)
8681	  temp = 0;
8682	else if (modifier == EXPAND_STACK_PARM)
8683	  temp = assign_temp (type, 0, 0, 1);
8684	else if (original_target
8685		 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8686		     || (singleton && GET_CODE (original_target) == REG
8687			 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8688			 && original_target == var_rtx (singleton)))
8689		 && GET_MODE (original_target) == mode
8690#ifdef HAVE_conditional_move
8691		 && (! can_conditionally_move_p (mode)
8692		     || GET_CODE (original_target) == REG
8693		     || TREE_ADDRESSABLE (type))
8694#endif
8695		 && (GET_CODE (original_target) != MEM
8696		     || TREE_ADDRESSABLE (type)))
8697	  temp = original_target;
8698	else if (TREE_ADDRESSABLE (type))
8699	  abort ();
8700	else
8701	  temp = assign_temp (type, 0, 0, 1);
8702
8703	/* If we had X ? A + C : A, with C a constant power of 2, and we can
8704	   do the test of X as a store-flag operation, do this as
8705	   A + ((X != 0) << log C).  Similarly for other simple binary
8706	   operators.  Only do for C == 1 if BRANCH_COST is low.  */
8707	if (temp && singleton && binary_op
8708	    && (TREE_CODE (binary_op) == PLUS_EXPR
8709		|| TREE_CODE (binary_op) == MINUS_EXPR
8710		|| TREE_CODE (binary_op) == BIT_IOR_EXPR
8711		|| TREE_CODE (binary_op) == BIT_XOR_EXPR)
8712	    && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8713		: integer_onep (TREE_OPERAND (binary_op, 1)))
8714	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8715	  {
8716	    rtx result;
8717	    tree cond;
8718	    optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8719			    ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8720			       ? addv_optab : add_optab)
8721			    : TREE_CODE (binary_op) == MINUS_EXPR
8722			    ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8723			       ? subv_optab : sub_optab)
8724			    : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8725			    : xor_optab);
8726
8727	    /* If we had X ? A : A + 1, do this as A + (X == 0).  */
8728	    if (singleton == TREE_OPERAND (exp, 1))
8729	      cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8730	    else
8731	      cond = TREE_OPERAND (exp, 0);
8732
8733	    result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8734					   ? temp : NULL_RTX),
8735				    mode, BRANCH_COST <= 1);
8736
8737	    if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8738	      result = expand_shift (LSHIFT_EXPR, mode, result,
8739				     build_int_2 (tree_log2
8740						  (TREE_OPERAND
8741						   (binary_op, 1)),
8742						  0),
8743				     (safe_from_p (temp, singleton, 1)
8744				      ? temp : NULL_RTX), 0);
8745
8746	    if (result)
8747	      {
8748		op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8749		return expand_binop (mode, boptab, op1, result, temp,
8750				     unsignedp, OPTAB_LIB_WIDEN);
8751	      }
8752	  }
8753
8754	do_pending_stack_adjust ();
8755	NO_DEFER_POP;
8756	op0 = gen_label_rtx ();
8757
8758	if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8759	  {
8760	    if (temp != 0)
8761	      {
8762		/* If the target conflicts with the other operand of the
8763		   binary op, we can't use it.  Also, we can't use the target
8764		   if it is a hard register, because evaluating the condition
8765		   might clobber it.  */
8766		if ((binary_op
8767		     && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8768		    || (GET_CODE (temp) == REG
8769			&& REGNO (temp) < FIRST_PSEUDO_REGISTER))
8770		  temp = gen_reg_rtx (mode);
8771		store_expr (singleton, temp,
8772			    modifier == EXPAND_STACK_PARM ? 2 : 0);
8773	      }
8774	    else
8775	      expand_expr (singleton,
8776			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8777	    if (singleton == TREE_OPERAND (exp, 1))
8778	      jumpif (TREE_OPERAND (exp, 0), op0);
8779	    else
8780	      jumpifnot (TREE_OPERAND (exp, 0), op0);
8781
8782	    start_cleanup_deferral ();
8783	    if (binary_op && temp == 0)
8784	      /* Just touch the other operand.  */
8785	      expand_expr (TREE_OPERAND (binary_op, 1),
8786			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8787	    else if (binary_op)
8788	      store_expr (build (TREE_CODE (binary_op), type,
8789				 make_tree (type, temp),
8790				 TREE_OPERAND (binary_op, 1)),
8791			  temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8792	    else
8793	      store_expr (build1 (TREE_CODE (unary_op), type,
8794				  make_tree (type, temp)),
8795			  temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8796	    op1 = op0;
8797	  }
8798	/* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8799	   comparison operator.  If we have one of these cases, set the
8800	   output to A, branch on A (cse will merge these two references),
8801	   then set the output to FOO.  */
8802	else if (temp
8803		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8804		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8805		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8806				     TREE_OPERAND (exp, 1), 0)
8807		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8808		     || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8809		 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8810	  {
8811	    if (GET_CODE (temp) == REG
8812		&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
8813	      temp = gen_reg_rtx (mode);
8814	    store_expr (TREE_OPERAND (exp, 1), temp,
8815			modifier == EXPAND_STACK_PARM ? 2 : 0);
8816	    jumpif (TREE_OPERAND (exp, 0), op0);
8817
8818	    start_cleanup_deferral ();
8819	    store_expr (TREE_OPERAND (exp, 2), temp,
8820			modifier == EXPAND_STACK_PARM ? 2 : 0);
8821	    op1 = op0;
8822	  }
8823	else if (temp
8824		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8825		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8826		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8827				     TREE_OPERAND (exp, 2), 0)
8828		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8829		     || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8830		 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8831	  {
8832	    if (GET_CODE (temp) == REG
8833		&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
8834	      temp = gen_reg_rtx (mode);
8835	    store_expr (TREE_OPERAND (exp, 2), temp,
8836			modifier == EXPAND_STACK_PARM ? 2 : 0);
8837	    jumpifnot (TREE_OPERAND (exp, 0), op0);
8838
8839	    start_cleanup_deferral ();
8840	    store_expr (TREE_OPERAND (exp, 1), temp,
8841			modifier == EXPAND_STACK_PARM ? 2 : 0);
8842	    op1 = op0;
8843	  }
8844	else
8845	  {
8846	    op1 = gen_label_rtx ();
8847	    jumpifnot (TREE_OPERAND (exp, 0), op0);
8848
8849	    start_cleanup_deferral ();
8850
8851	    /* One branch of the cond can be void, if it never returns. For
8852	       example A ? throw : E  */
8853	    if (temp != 0
8854		&& TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8855	      store_expr (TREE_OPERAND (exp, 1), temp,
8856			  modifier == EXPAND_STACK_PARM ? 2 : 0);
8857	    else
8858	      expand_expr (TREE_OPERAND (exp, 1),
8859			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8860	    end_cleanup_deferral ();
8861	    emit_queue ();
8862	    emit_jump_insn (gen_jump (op1));
8863	    emit_barrier ();
8864	    emit_label (op0);
8865	    start_cleanup_deferral ();
8866	    if (temp != 0
8867		&& TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8868	      store_expr (TREE_OPERAND (exp, 2), temp,
8869			  modifier == EXPAND_STACK_PARM ? 2 : 0);
8870	    else
8871	      expand_expr (TREE_OPERAND (exp, 2),
8872			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8873	  }
8874
8875	end_cleanup_deferral ();
8876
8877	emit_queue ();
8878	emit_label (op1);
8879	OK_DEFER_POP;
8880
8881	return temp;
8882      }
8883
8884    case TARGET_EXPR:
8885      {
8886	/* Something needs to be initialized, but we didn't know
8887	   where that thing was when building the tree.  For example,
8888	   it could be the return value of a function, or a parameter
8889	   to a function which lays down in the stack, or a temporary
8890	   variable which must be passed by reference.
8891
8892	   We guarantee that the expression will either be constructed
8893	   or copied into our original target.  */
8894
8895	tree slot = TREE_OPERAND (exp, 0);
8896	tree cleanups = NULL_TREE;
8897	tree exp1;
8898
8899	if (TREE_CODE (slot) != VAR_DECL)
8900	  abort ();
8901
8902	if (! ignore)
8903	  target = original_target;
8904
8905	/* Set this here so that if we get a target that refers to a
8906	   register variable that's already been used, put_reg_into_stack
8907	   knows that it should fix up those uses.  */
8908	TREE_USED (slot) = 1;
8909
8910	if (target == 0)
8911	  {
8912	    if (DECL_RTL_SET_P (slot))
8913	      {
8914		target = DECL_RTL (slot);
8915		/* If we have already expanded the slot, so don't do
8916		   it again.  (mrs)  */
8917		if (TREE_OPERAND (exp, 1) == NULL_TREE)
8918		  return target;
8919	      }
8920	    else
8921	      {
8922		target = assign_temp (type, 2, 0, 1);
8923		/* All temp slots at this level must not conflict.  */
8924		preserve_temp_slots (target);
8925		SET_DECL_RTL (slot, target);
8926		if (TREE_ADDRESSABLE (slot))
8927		  put_var_into_stack (slot, /*rescan=*/false);
8928
8929		/* Since SLOT is not known to the called function
8930		   to belong to its stack frame, we must build an explicit
8931		   cleanup.  This case occurs when we must build up a reference
8932		   to pass the reference as an argument.  In this case,
8933		   it is very likely that such a reference need not be
8934		   built here.  */
8935
8936		if (TREE_OPERAND (exp, 2) == 0)
8937		  TREE_OPERAND (exp, 2)
8938		    = (*lang_hooks.maybe_build_cleanup) (slot);
8939		cleanups = TREE_OPERAND (exp, 2);
8940	      }
8941	  }
8942	else
8943	  {
8944	    /* This case does occur, when expanding a parameter which
8945	       needs to be constructed on the stack.  The target
8946	       is the actual stack address that we want to initialize.
8947	       The function we call will perform the cleanup in this case.  */
8948
8949	    /* If we have already assigned it space, use that space,
8950	       not target that we were passed in, as our target
8951	       parameter is only a hint.  */
8952	    if (DECL_RTL_SET_P (slot))
8953	      {
8954		target = DECL_RTL (slot);
8955		/* If we have already expanded the slot, so don't do
8956                   it again.  (mrs)  */
8957		if (TREE_OPERAND (exp, 1) == NULL_TREE)
8958		  return target;
8959	      }
8960	    else
8961	      {
8962		SET_DECL_RTL (slot, target);
8963		/* If we must have an addressable slot, then make sure that
8964		   the RTL that we just stored in slot is OK.  */
8965		if (TREE_ADDRESSABLE (slot))
8966		  put_var_into_stack (slot, /*rescan=*/true);
8967	      }
8968	  }
8969
8970	exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8971	/* Mark it as expanded.  */
8972	TREE_OPERAND (exp, 1) = NULL_TREE;
8973
8974	store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8975
8976	expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8977
8978	return target;
8979      }
8980
8981    case INIT_EXPR:
8982      {
8983	tree lhs = TREE_OPERAND (exp, 0);
8984	tree rhs = TREE_OPERAND (exp, 1);
8985
8986	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8987	return temp;
8988      }
8989
8990    case MODIFY_EXPR:
8991      {
8992	/* If lhs is complex, expand calls in rhs before computing it.
8993	   That's so we don't compute a pointer and save it over a
8994	   call.  If lhs is simple, compute it first so we can give it
8995	   as a target if the rhs is just a call.  This avoids an
8996	   extra temp and copy and that prevents a partial-subsumption
8997	   which makes bad code.  Actually we could treat
8998	   component_ref's of vars like vars.  */
8999
9000	tree lhs = TREE_OPERAND (exp, 0);
9001	tree rhs = TREE_OPERAND (exp, 1);
9002
9003	temp = 0;
9004
9005	/* Check for |= or &= of a bitfield of size one into another bitfield
9006	   of size 1.  In this case, (unless we need the result of the
9007	   assignment) we can do this more efficiently with a
9008	   test followed by an assignment, if necessary.
9009
9010	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
9011	   things change so we do, this code should be enhanced to
9012	   support it.  */
9013	if (ignore
9014	    && TREE_CODE (lhs) == COMPONENT_REF
9015	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
9016		|| TREE_CODE (rhs) == BIT_AND_EXPR)
9017	    && TREE_OPERAND (rhs, 0) == lhs
9018	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9019	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9020	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9021	  {
9022	    rtx label = gen_label_rtx ();
9023
9024	    do_jump (TREE_OPERAND (rhs, 1),
9025		     TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9026		     TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9027	    expand_assignment (lhs, convert (TREE_TYPE (rhs),
9028					     (TREE_CODE (rhs) == BIT_IOR_EXPR
9029					      ? integer_one_node
9030					      : integer_zero_node)),
9031			       0, 0);
9032	    do_pending_stack_adjust ();
9033	    emit_label (label);
9034	    return const0_rtx;
9035	  }
9036
9037	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9038
9039	return temp;
9040      }
9041
9042    case RETURN_EXPR:
9043      if (!TREE_OPERAND (exp, 0))
9044	expand_null_return ();
9045      else
9046	expand_return (TREE_OPERAND (exp, 0));
9047      return const0_rtx;
9048
9049    case PREINCREMENT_EXPR:
9050    case PREDECREMENT_EXPR:
9051      return expand_increment (exp, 0, ignore);
9052
9053    case POSTINCREMENT_EXPR:
9054    case POSTDECREMENT_EXPR:
9055      /* Faster to treat as pre-increment if result is not used.  */
9056      return expand_increment (exp, ! ignore, ignore);
9057
9058    case ADDR_EXPR:
9059      if (modifier == EXPAND_STACK_PARM)
9060	target = 0;
9061      /* Are we taking the address of a nested function?  */
9062      if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9063	  && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9064	  && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9065	  && ! TREE_STATIC (exp))
9066	{
9067	  op0 = trampoline_address (TREE_OPERAND (exp, 0));
9068	  op0 = force_operand (op0, target);
9069	}
9070      /* If we are taking the address of something erroneous, just
9071	 return a zero.  */
9072      else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9073	return const0_rtx;
9074      /* If we are taking the address of a constant and are at the
9075	 top level, we have to use output_constant_def since we can't
9076	 call force_const_mem at top level.  */
9077      else if (cfun == 0
9078	       && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9079		   || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9080		       == 'c')))
9081	op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9082      else
9083	{
9084	  /* We make sure to pass const0_rtx down if we came in with
9085	     ignore set, to avoid doing the cleanups twice for something.  */
9086	  op0 = expand_expr (TREE_OPERAND (exp, 0),
9087			     ignore ? const0_rtx : NULL_RTX, VOIDmode,
9088			     (modifier == EXPAND_INITIALIZER
9089			      ? modifier : EXPAND_CONST_ADDRESS));
9090
9091	  /* If we are going to ignore the result, OP0 will have been set
9092	     to const0_rtx, so just return it.  Don't get confused and
9093	     think we are taking the address of the constant.  */
9094	  if (ignore)
9095	    return op0;
9096
9097	  /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9098	     clever and returns a REG when given a MEM.  */
9099	  op0 = protect_from_queue (op0, 1);
9100
9101	  /* We would like the object in memory.  If it is a constant, we can
9102	     have it be statically allocated into memory.  For a non-constant,
9103	     we need to allocate some memory and store the value into it.  */
9104
9105	  if (CONSTANT_P (op0))
9106	    op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9107				   op0);
9108	  else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9109		   || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9110		   || GET_CODE (op0) == PARALLEL)
9111	    {
9112	      /* If the operand is a SAVE_EXPR, we can deal with this by
9113		 forcing the SAVE_EXPR into memory.  */
9114	      if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9115		{
9116		  put_var_into_stack (TREE_OPERAND (exp, 0),
9117				      /*rescan=*/true);
9118		  op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9119		}
9120	      else
9121		{
9122		  /* If this object is in a register, it can't be BLKmode.  */
9123		  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9124		  rtx memloc = assign_temp (inner_type, 1, 1, 1);
9125
9126		  if (GET_CODE (op0) == PARALLEL)
9127		    /* Handle calls that pass values in multiple
9128		       non-contiguous locations.  The Irix 6 ABI has examples
9129		       of this.  */
9130		    emit_group_store (memloc, op0,
9131				      int_size_in_bytes (inner_type));
9132		  else
9133		    emit_move_insn (memloc, op0);
9134
9135		  op0 = memloc;
9136		}
9137	    }
9138
9139	  if (GET_CODE (op0) != MEM)
9140	    abort ();
9141
9142	  mark_temp_addr_taken (op0);
9143	  if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9144	    {
9145	      op0 = XEXP (op0, 0);
9146#ifdef POINTERS_EXTEND_UNSIGNED
9147	      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9148		  && mode == ptr_mode)
9149		op0 = convert_memory_address (ptr_mode, op0);
9150#endif
9151	      return op0;
9152	    }
9153
9154	  /* If OP0 is not aligned as least as much as the type requires, we
9155	     need to make a temporary, copy OP0 to it, and take the address of
9156	     the temporary.  We want to use the alignment of the type, not of
9157	     the operand.  Note that this is incorrect for FUNCTION_TYPE, but
9158	     the test for BLKmode means that can't happen.  The test for
9159	     BLKmode is because we never make mis-aligned MEMs with
9160	     non-BLKmode.
9161
9162	     We don't need to do this at all if the machine doesn't have
9163	     strict alignment.  */
9164	  if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9165	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9166		  > MEM_ALIGN (op0))
9167	      && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9168	    {
9169	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9170	      rtx new;
9171
9172	      if (TYPE_ALIGN_OK (inner_type))
9173		abort ();
9174
9175	      if (TREE_ADDRESSABLE (inner_type))
9176		{
9177		  /* We can't make a bitwise copy of this object, so fail.  */
9178		  error ("cannot take the address of an unaligned member");
9179		  return const0_rtx;
9180		}
9181
9182	      new = assign_stack_temp_for_type
9183		(TYPE_MODE (inner_type),
9184		 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9185		 : int_size_in_bytes (inner_type),
9186		 1, build_qualified_type (inner_type,
9187					  (TYPE_QUALS (inner_type)
9188					   | TYPE_QUAL_CONST)));
9189
9190	      emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9191			       (modifier == EXPAND_STACK_PARM
9192				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9193
9194	      op0 = new;
9195	    }
9196
9197	  op0 = force_operand (XEXP (op0, 0), target);
9198	}
9199
9200      if (flag_force_addr
9201	  && GET_CODE (op0) != REG
9202	  && modifier != EXPAND_CONST_ADDRESS
9203	  && modifier != EXPAND_INITIALIZER
9204	  && modifier != EXPAND_SUM)
9205	op0 = force_reg (Pmode, op0);
9206
9207      if (GET_CODE (op0) == REG
9208	  && ! REG_USERVAR_P (op0))
9209	mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9210
9211#ifdef POINTERS_EXTEND_UNSIGNED
9212      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9213	  && mode == ptr_mode)
9214	op0 = convert_memory_address (ptr_mode, op0);
9215#endif
9216
9217      return op0;
9218
9219    case ENTRY_VALUE_EXPR:
9220      abort ();
9221
9222    /* COMPLEX type for Extended Pascal & Fortran  */
9223    case COMPLEX_EXPR:
9224      {
9225	enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9226	rtx insns;
9227
9228	/* Get the rtx code of the operands.  */
9229	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9230	op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9231
9232	if (! target)
9233	  target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9234
9235	start_sequence ();
9236
9237	/* Move the real (op0) and imaginary (op1) parts to their location.  */
9238	emit_move_insn (gen_realpart (mode, target), op0);
9239	emit_move_insn (gen_imagpart (mode, target), op1);
9240
9241	insns = get_insns ();
9242	end_sequence ();
9243
9244	/* Complex construction should appear as a single unit.  */
9245	/* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9246	   each with a separate pseudo as destination.
9247	   It's not correct for flow to treat them as a unit.  */
9248	if (GET_CODE (target) != CONCAT)
9249	  emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9250	else
9251	  emit_insn (insns);
9252
9253	return target;
9254      }
9255
9256    case REALPART_EXPR:
9257      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9258      return gen_realpart (mode, op0);
9259
9260    case IMAGPART_EXPR:
9261      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9262      return gen_imagpart (mode, op0);
9263
9264    case CONJ_EXPR:
9265      {
9266	enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9267	rtx imag_t;
9268	rtx insns;
9269
9270	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9271
9272	if (! target)
9273	  target = gen_reg_rtx (mode);
9274
9275	start_sequence ();
9276
9277	/* Store the realpart and the negated imagpart to target.  */
9278	emit_move_insn (gen_realpart (partmode, target),
9279			gen_realpart (partmode, op0));
9280
9281	imag_t = gen_imagpart (partmode, target);
9282	temp = expand_unop (partmode,
9283			    ! unsignedp && flag_trapv
9284			    && (GET_MODE_CLASS(partmode) == MODE_INT)
9285			    ? negv_optab : neg_optab,
9286			    gen_imagpart (partmode, op0), imag_t, 0);
9287	if (temp != imag_t)
9288	  emit_move_insn (imag_t, temp);
9289
9290	insns = get_insns ();
9291	end_sequence ();
9292
9293	/* Conjugate should appear as a single unit
9294	   If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9295	   each with a separate pseudo as destination.
9296	   It's not correct for flow to treat them as a unit.  */
9297	if (GET_CODE (target) != CONCAT)
9298	  emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9299	else
9300	  emit_insn (insns);
9301
9302	return target;
9303      }
9304
9305    case TRY_CATCH_EXPR:
9306      {
9307	tree handler = TREE_OPERAND (exp, 1);
9308
9309	expand_eh_region_start ();
9310
9311	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9312
9313	expand_eh_region_end_cleanup (handler);
9314
9315	return op0;
9316      }
9317
9318    case TRY_FINALLY_EXPR:
9319      {
9320	tree try_block = TREE_OPERAND (exp, 0);
9321	tree finally_block = TREE_OPERAND (exp, 1);
9322
9323        if (!optimize || unsafe_for_reeval (finally_block) > 1)
9324	  {
9325	    /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9326	       is not sufficient, so we cannot expand the block twice.
9327	       So we play games with GOTO_SUBROUTINE_EXPR to let us
9328	       expand the thing only once.  */
9329	    /* When not optimizing, we go ahead with this form since
9330	       (1) user breakpoints operate more predictably without
9331		   code duplication, and
9332	       (2) we're not running any of the global optimizers
9333	           that would explode in time/space with the highly
9334		   connected CFG created by the indirect branching.  */
9335
9336	    rtx finally_label = gen_label_rtx ();
9337	    rtx done_label = gen_label_rtx ();
9338	    rtx return_link = gen_reg_rtx (Pmode);
9339	    tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9340			          (tree) finally_label, (tree) return_link);
9341	    TREE_SIDE_EFFECTS (cleanup) = 1;
9342
9343	    /* Start a new binding layer that will keep track of all cleanup
9344	       actions to be performed.  */
9345	    expand_start_bindings (2);
9346	    target_temp_slot_level = temp_slot_level;
9347
9348	    expand_decl_cleanup (NULL_TREE, cleanup);
9349	    op0 = expand_expr (try_block, target, tmode, modifier);
9350
9351	    preserve_temp_slots (op0);
9352	    expand_end_bindings (NULL_TREE, 0, 0);
9353	    emit_jump (done_label);
9354	    emit_label (finally_label);
9355	    expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9356	    emit_indirect_jump (return_link);
9357	    emit_label (done_label);
9358	  }
9359	else
9360	  {
9361	    expand_start_bindings (2);
9362	    target_temp_slot_level = temp_slot_level;
9363
9364	    expand_decl_cleanup (NULL_TREE, finally_block);
9365	    op0 = expand_expr (try_block, target, tmode, modifier);
9366
9367	    preserve_temp_slots (op0);
9368	    expand_end_bindings (NULL_TREE, 0, 0);
9369	  }
9370
9371	return op0;
9372      }
9373
9374    case GOTO_SUBROUTINE_EXPR:
9375      {
9376	rtx subr = (rtx) TREE_OPERAND (exp, 0);
9377	rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9378	rtx return_address = gen_label_rtx ();
9379	emit_move_insn (return_link,
9380			gen_rtx_LABEL_REF (Pmode, return_address));
9381	emit_jump (subr);
9382	emit_label (return_address);
9383	return const0_rtx;
9384      }
9385
9386    case VA_ARG_EXPR:
9387      return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9388
9389    case EXC_PTR_EXPR:
9390      return get_exception_pointer (cfun);
9391
9392    case FDESC_EXPR:
9393      /* Function descriptors are not valid except for as
9394	 initialization constants, and should not be expanded.  */
9395      abort ();
9396
9397    default:
9398      return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9399    }
9400
9401  /* Here to do an ordinary binary operator, generating an instruction
9402     from the optab already placed in `this_optab'.  */
9403 binop:
9404  if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9405    subtarget = 0;
9406  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9407  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9408 binop2:
9409  if (modifier == EXPAND_STACK_PARM)
9410    target = 0;
9411  temp = expand_binop (mode, this_optab, op0, op1, target,
9412		       unsignedp, OPTAB_LIB_WIDEN);
9413  if (temp == 0)
9414    abort ();
9415  return temp;
9416}
9417
9418/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9419   when applied to the address of EXP produces an address known to be
9420   aligned more than BIGGEST_ALIGNMENT.  */
9421
9422static int
9423is_aligning_offset (offset, exp)
9424     tree offset;
9425     tree exp;
9426{
9427  /* Strip off any conversions and WITH_RECORD_EXPR nodes.  */
9428  while (TREE_CODE (offset) == NON_LVALUE_EXPR
9429	 || TREE_CODE (offset) == NOP_EXPR
9430	 || TREE_CODE (offset) == CONVERT_EXPR
9431	 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9432    offset = TREE_OPERAND (offset, 0);
9433
9434  /* We must now have a BIT_AND_EXPR with a constant that is one less than
9435     power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
9436  if (TREE_CODE (offset) != BIT_AND_EXPR
9437      || !host_integerp (TREE_OPERAND (offset, 1), 1)
9438      || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9439      || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9440    return 0;
9441
9442  /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9443     It must be NEGATE_EXPR.  Then strip any more conversions.  */
9444  offset = TREE_OPERAND (offset, 0);
9445  while (TREE_CODE (offset) == NON_LVALUE_EXPR
9446	 || TREE_CODE (offset) == NOP_EXPR
9447	 || TREE_CODE (offset) == CONVERT_EXPR)
9448    offset = TREE_OPERAND (offset, 0);
9449
9450  if (TREE_CODE (offset) != NEGATE_EXPR)
9451    return 0;
9452
9453  offset = TREE_OPERAND (offset, 0);
9454  while (TREE_CODE (offset) == NON_LVALUE_EXPR
9455	 || TREE_CODE (offset) == NOP_EXPR
9456	 || TREE_CODE (offset) == CONVERT_EXPR)
9457    offset = TREE_OPERAND (offset, 0);
9458
9459  /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9460     whose type is the same as EXP.  */
9461  return (TREE_CODE (offset) == ADDR_EXPR
9462	  && (TREE_OPERAND (offset, 0) == exp
9463	      || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9464		  && (TREE_TYPE (TREE_OPERAND (offset, 0))
9465		      == TREE_TYPE (exp)))));
9466}
9467
9468/* Return the tree node if an ARG corresponds to a string constant or zero
9469   if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
9470   in bytes within the string that ARG is accessing.  The type of the
9471   offset will be `sizetype'.  */
9472
9473tree
9474string_constant (arg, ptr_offset)
9475     tree arg;
9476     tree *ptr_offset;
9477{
9478  STRIP_NOPS (arg);
9479
9480  if (TREE_CODE (arg) == ADDR_EXPR
9481      && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9482    {
9483      *ptr_offset = size_zero_node;
9484      return TREE_OPERAND (arg, 0);
9485    }
9486  else if (TREE_CODE (arg) == PLUS_EXPR)
9487    {
9488      tree arg0 = TREE_OPERAND (arg, 0);
9489      tree arg1 = TREE_OPERAND (arg, 1);
9490
9491      STRIP_NOPS (arg0);
9492      STRIP_NOPS (arg1);
9493
9494      if (TREE_CODE (arg0) == ADDR_EXPR
9495	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9496	{
9497	  *ptr_offset = convert (sizetype, arg1);
9498	  return TREE_OPERAND (arg0, 0);
9499	}
9500      else if (TREE_CODE (arg1) == ADDR_EXPR
9501	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9502	{
9503	  *ptr_offset = convert (sizetype, arg0);
9504	  return TREE_OPERAND (arg1, 0);
9505	}
9506    }
9507
9508  return 0;
9509}
9510
9511/* Expand code for a post- or pre- increment or decrement
9512   and return the RTX for the result.
9513   POST is 1 for postinc/decrements and 0 for preinc/decrements.  */
9514
9515static rtx
9516expand_increment (exp, post, ignore)
9517     tree exp;
9518     int post, ignore;
9519{
9520  rtx op0, op1;
9521  rtx temp, value;
9522  tree incremented = TREE_OPERAND (exp, 0);
9523  optab this_optab = add_optab;
9524  int icode;
9525  enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9526  int op0_is_copy = 0;
9527  int single_insn = 0;
9528  /* 1 means we can't store into OP0 directly,
9529     because it is a subreg narrower than a word,
9530     and we don't dare clobber the rest of the word.  */
9531  int bad_subreg = 0;
9532
9533  /* Stabilize any component ref that might need to be
9534     evaluated more than once below.  */
9535  if (!post
9536      || TREE_CODE (incremented) == BIT_FIELD_REF
9537      || (TREE_CODE (incremented) == COMPONENT_REF
9538	  && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9539	      || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9540    incremented = stabilize_reference (incremented);
9541  /* Nested *INCREMENT_EXPRs can happen in C++.  We must force innermost
9542     ones into save exprs so that they don't accidentally get evaluated
9543     more than once by the code below.  */
9544  if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9545      || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9546    incremented = save_expr (incremented);
9547
9548  /* Compute the operands as RTX.
9549     Note whether OP0 is the actual lvalue or a copy of it:
9550     I believe it is a copy iff it is a register or subreg
9551     and insns were generated in computing it.  */
9552
9553  temp = get_last_insn ();
9554  op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9555
9556  /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9557     in place but instead must do sign- or zero-extension during assignment,
9558     so we copy it into a new register and let the code below use it as
9559     a copy.
9560
9561     Note that we can safely modify this SUBREG since it is know not to be
9562     shared (it was made by the expand_expr call above).  */
9563
9564  if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9565    {
9566      if (post)
9567	SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9568      else
9569	bad_subreg = 1;
9570    }
9571  else if (GET_CODE (op0) == SUBREG
9572	   && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9573    {
9574      /* We cannot increment this SUBREG in place.  If we are
9575	 post-incrementing, get a copy of the old value.  Otherwise,
9576	 just mark that we cannot increment in place.  */
9577      if (post)
9578	op0 = copy_to_reg (op0);
9579      else
9580	bad_subreg = 1;
9581    }
9582
9583  op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9584		 && temp != get_last_insn ());
9585  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9586
9587  /* Decide whether incrementing or decrementing.  */
9588  if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9589      || TREE_CODE (exp) == PREDECREMENT_EXPR)
9590    this_optab = sub_optab;
9591
9592  /* Convert decrement by a constant into a negative increment.  */
9593  if (this_optab == sub_optab
9594      && GET_CODE (op1) == CONST_INT)
9595    {
9596      op1 = GEN_INT (-INTVAL (op1));
9597      this_optab = add_optab;
9598    }
9599
9600  if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9601    this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9602
9603  /* For a preincrement, see if we can do this with a single instruction.  */
9604  if (!post)
9605    {
9606      icode = (int) this_optab->handlers[(int) mode].insn_code;
9607      if (icode != (int) CODE_FOR_nothing
9608	  /* Make sure that OP0 is valid for operands 0 and 1
9609	     of the insn we want to queue.  */
9610	  && (*insn_data[icode].operand[0].predicate) (op0, mode)
9611	  && (*insn_data[icode].operand[1].predicate) (op0, mode)
9612	  && (*insn_data[icode].operand[2].predicate) (op1, mode))
9613	single_insn = 1;
9614    }
9615
9616  /* If OP0 is not the actual lvalue, but rather a copy in a register,
9617     then we cannot just increment OP0.  We must therefore contrive to
9618     increment the original value.  Then, for postincrement, we can return
9619     OP0 since it is a copy of the old value.  For preincrement, expand here
9620     unless we can do it with a single insn.
9621
9622     Likewise if storing directly into OP0 would clobber high bits
9623     we need to preserve (bad_subreg).  */
9624  if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9625    {
9626      /* This is the easiest way to increment the value wherever it is.
9627	 Problems with multiple evaluation of INCREMENTED are prevented
9628	 because either (1) it is a component_ref or preincrement,
9629	 in which case it was stabilized above, or (2) it is an array_ref
9630	 with constant index in an array in a register, which is
9631	 safe to reevaluate.  */
9632      tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9633			     || TREE_CODE (exp) == PREDECREMENT_EXPR)
9634			    ? MINUS_EXPR : PLUS_EXPR),
9635			   TREE_TYPE (exp),
9636			   incremented,
9637			   TREE_OPERAND (exp, 1));
9638
9639      while (TREE_CODE (incremented) == NOP_EXPR
9640	     || TREE_CODE (incremented) == CONVERT_EXPR)
9641	{
9642	  newexp = convert (TREE_TYPE (incremented), newexp);
9643	  incremented = TREE_OPERAND (incremented, 0);
9644	}
9645
9646      temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9647      return post ? op0 : temp;
9648    }
9649
9650  if (post)
9651    {
9652      /* We have a true reference to the value in OP0.
9653	 If there is an insn to add or subtract in this mode, queue it.
9654	 Queueing the increment insn avoids the register shuffling
9655	 that often results if we must increment now and first save
9656	 the old value for subsequent use.  */
9657
9658#if 0  /* Turned off to avoid making extra insn for indexed memref.  */
9659      op0 = stabilize (op0);
9660#endif
9661
9662      icode = (int) this_optab->handlers[(int) mode].insn_code;
9663      if (icode != (int) CODE_FOR_nothing
9664	  /* Make sure that OP0 is valid for operands 0 and 1
9665	     of the insn we want to queue.  */
9666	  && (*insn_data[icode].operand[0].predicate) (op0, mode)
9667	  && (*insn_data[icode].operand[1].predicate) (op0, mode))
9668	{
9669	  if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9670	    op1 = force_reg (mode, op1);
9671
9672	  return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9673	}
9674      if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9675	{
9676	  rtx addr = (general_operand (XEXP (op0, 0), mode)
9677		      ? force_reg (Pmode, XEXP (op0, 0))
9678		      : copy_to_reg (XEXP (op0, 0)));
9679	  rtx temp, result;
9680
9681	  op0 = replace_equiv_address (op0, addr);
9682	  temp = force_reg (GET_MODE (op0), op0);
9683	  if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9684	    op1 = force_reg (mode, op1);
9685
9686	  /* The increment queue is LIFO, thus we have to `queue'
9687	     the instructions in reverse order.  */
9688	  enqueue_insn (op0, gen_move_insn (op0, temp));
9689	  result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9690	  return result;
9691	}
9692    }
9693
9694  /* Preincrement, or we can't increment with one simple insn.  */
9695  if (post)
9696    /* Save a copy of the value before inc or dec, to return it later.  */
9697    temp = value = copy_to_reg (op0);
9698  else
9699    /* Arrange to return the incremented value.  */
9700    /* Copy the rtx because expand_binop will protect from the queue,
9701       and the results of that would be invalid for us to return
9702       if our caller does emit_queue before using our result.  */
9703    temp = copy_rtx (value = op0);
9704
9705  /* Increment however we can.  */
9706  op1 = expand_binop (mode, this_optab, value, op1, op0,
9707		      TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9708
9709  /* Make sure the value is stored into OP0.  */
9710  if (op1 != op0)
9711    emit_move_insn (op0, op1);
9712
9713  return temp;
9714}
9715
9716/* At the start of a function, record that we have no previously-pushed
9717   arguments waiting to be popped.  */
9718
9719void
9720init_pending_stack_adjust ()
9721{
9722  pending_stack_adjust = 0;
9723}
9724
9725/* When exiting from function, if safe, clear out any pending stack adjust
9726   so the adjustment won't get done.
9727
9728   Note, if the current function calls alloca, then it must have a
9729   frame pointer regardless of the value of flag_omit_frame_pointer.  */
9730
9731void
9732clear_pending_stack_adjust ()
9733{
9734#ifdef EXIT_IGNORE_STACK
9735  if (optimize > 0
9736      && (! flag_omit_frame_pointer || current_function_calls_alloca)
9737      && EXIT_IGNORE_STACK
9738      && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9739      && ! flag_inline_functions)
9740    {
9741      stack_pointer_delta -= pending_stack_adjust,
9742      pending_stack_adjust = 0;
9743    }
9744#endif
9745}
9746
9747/* Pop any previously-pushed arguments that have not been popped yet.  */
9748
9749void
9750do_pending_stack_adjust ()
9751{
9752  if (inhibit_defer_pop == 0)
9753    {
9754      if (pending_stack_adjust != 0)
9755	adjust_stack (GEN_INT (pending_stack_adjust));
9756      pending_stack_adjust = 0;
9757    }
9758}
9759
9760/* Expand conditional expressions.  */
9761
9762/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9763   LABEL is an rtx of code CODE_LABEL, in this function and all the
9764   functions here.  */
9765
9766void
9767jumpifnot (exp, label)
9768     tree exp;
9769     rtx label;
9770{
9771  do_jump (exp, label, NULL_RTX);
9772}
9773
9774/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero.  */
9775
9776void
9777jumpif (exp, label)
9778     tree exp;
9779     rtx label;
9780{
9781  do_jump (exp, NULL_RTX, label);
9782}
9783
9784/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9785   the result is zero, or IF_TRUE_LABEL if the result is one.
9786   Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9787   meaning fall through in that case.
9788
9789   do_jump always does any pending stack adjust except when it does not
9790   actually perform a jump.  An example where there is no jump
9791   is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9792
9793   This function is responsible for optimizing cases such as
9794   &&, || and comparison operators in EXP.  */
9795
9796void
9797do_jump (exp, if_false_label, if_true_label)
9798     tree exp;
9799     rtx if_false_label, if_true_label;
9800{
9801  enum tree_code code = TREE_CODE (exp);
9802  /* Some cases need to create a label to jump to
9803     in order to properly fall through.
9804     These cases set DROP_THROUGH_LABEL nonzero.  */
9805  rtx drop_through_label = 0;
9806  rtx temp;
9807  int i;
9808  tree type;
9809  enum machine_mode mode;
9810
9811#ifdef MAX_INTEGER_COMPUTATION_MODE
9812  check_max_integer_computation_mode (exp);
9813#endif
9814
9815  emit_queue ();
9816
9817  switch (code)
9818    {
9819    case ERROR_MARK:
9820      break;
9821
9822    case INTEGER_CST:
9823      temp = integer_zerop (exp) ? if_false_label : if_true_label;
9824      if (temp)
9825	emit_jump (temp);
9826      break;
9827
9828#if 0
9829      /* This is not true with #pragma weak  */
9830    case ADDR_EXPR:
9831      /* The address of something can never be zero.  */
9832      if (if_true_label)
9833	emit_jump (if_true_label);
9834      break;
9835#endif
9836
9837    case UNSAVE_EXPR:
9838      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9839      TREE_OPERAND (exp, 0)
9840	= (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
9841      break;
9842
9843    case NOP_EXPR:
9844      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9845	  || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9846	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9847	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9848	goto normal;
9849    case CONVERT_EXPR:
9850      /* If we are narrowing the operand, we have to do the compare in the
9851	 narrower mode.  */
9852      if ((TYPE_PRECISION (TREE_TYPE (exp))
9853	   < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9854	goto normal;
9855    case NON_LVALUE_EXPR:
9856    case REFERENCE_EXPR:
9857    case ABS_EXPR:
9858    case NEGATE_EXPR:
9859    case LROTATE_EXPR:
9860    case RROTATE_EXPR:
9861      /* These cannot change zero->nonzero or vice versa.  */
9862      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9863      break;
9864
9865    case WITH_RECORD_EXPR:
9866      /* Put the object on the placeholder list, recurse through our first
9867	 operand, and pop the list.  */
9868      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9869				    placeholder_list);
9870      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9871      placeholder_list = TREE_CHAIN (placeholder_list);
9872      break;
9873
9874#if 0
9875      /* This is never less insns than evaluating the PLUS_EXPR followed by
9876	 a test and can be longer if the test is eliminated.  */
9877    case PLUS_EXPR:
9878      /* Reduce to minus.  */
9879      exp = build (MINUS_EXPR, TREE_TYPE (exp),
9880		   TREE_OPERAND (exp, 0),
9881		   fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9882				 TREE_OPERAND (exp, 1))));
9883      /* Process as MINUS.  */
9884#endif
9885
9886    case MINUS_EXPR:
9887      /* Nonzero iff operands of minus differ.  */
9888      do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9889				  TREE_OPERAND (exp, 0),
9890				  TREE_OPERAND (exp, 1)),
9891			   NE, NE, if_false_label, if_true_label);
9892      break;
9893
9894    case BIT_AND_EXPR:
9895      /* If we are AND'ing with a small constant, do this comparison in the
9896	 smallest type that fits.  If the machine doesn't have comparisons
9897	 that small, it will be converted back to the wider comparison.
9898	 This helps if we are testing the sign bit of a narrower object.
9899	 combine can't do this for us because it can't know whether a
9900	 ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */
9901
9902      if (! SLOW_BYTE_ACCESS
9903	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9904	  && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9905	  && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9906	  && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9907	  && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9908	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9909	  && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9910	      != CODE_FOR_nothing))
9911	{
9912	  do_jump (convert (type, exp), if_false_label, if_true_label);
9913	  break;
9914	}
9915      goto normal;
9916
9917    case TRUTH_NOT_EXPR:
9918      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9919      break;
9920
9921    case TRUTH_ANDIF_EXPR:
9922      if (if_false_label == 0)
9923	if_false_label = drop_through_label = gen_label_rtx ();
9924      do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9925      start_cleanup_deferral ();
9926      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9927      end_cleanup_deferral ();
9928      break;
9929
9930    case TRUTH_ORIF_EXPR:
9931      if (if_true_label == 0)
9932	if_true_label = drop_through_label = gen_label_rtx ();
9933      do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9934      start_cleanup_deferral ();
9935      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9936      end_cleanup_deferral ();
9937      break;
9938
9939    case COMPOUND_EXPR:
9940      push_temp_slots ();
9941      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9942      preserve_temp_slots (NULL_RTX);
9943      free_temp_slots ();
9944      pop_temp_slots ();
9945      emit_queue ();
9946      do_pending_stack_adjust ();
9947      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9948      break;
9949
9950    case COMPONENT_REF:
9951    case BIT_FIELD_REF:
9952    case ARRAY_REF:
9953    case ARRAY_RANGE_REF:
9954      {
9955	HOST_WIDE_INT bitsize, bitpos;
9956	int unsignedp;
9957	enum machine_mode mode;
9958	tree type;
9959	tree offset;
9960	int volatilep = 0;
9961
9962	/* Get description of this reference.  We don't actually care
9963	   about the underlying object here.  */
9964	get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9965			     &unsignedp, &volatilep);
9966
9967	type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9968	if (! SLOW_BYTE_ACCESS
9969	    && type != 0 && bitsize >= 0
9970	    && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9971	    && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9972		!= CODE_FOR_nothing))
9973	  {
9974	    do_jump (convert (type, exp), if_false_label, if_true_label);
9975	    break;
9976	  }
9977	goto normal;
9978      }
9979
9980    case COND_EXPR:
9981      /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases.  */
9982      if (integer_onep (TREE_OPERAND (exp, 1))
9983	  && integer_zerop (TREE_OPERAND (exp, 2)))
9984	do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9985
9986      else if (integer_zerop (TREE_OPERAND (exp, 1))
9987	       && integer_onep (TREE_OPERAND (exp, 2)))
9988	do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9989
9990      else
9991	{
9992	  rtx label1 = gen_label_rtx ();
9993	  drop_through_label = gen_label_rtx ();
9994
9995	  do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9996
9997	  start_cleanup_deferral ();
9998	  /* Now the THEN-expression.  */
9999	  do_jump (TREE_OPERAND (exp, 1),
10000		   if_false_label ? if_false_label : drop_through_label,
10001		   if_true_label ? if_true_label : drop_through_label);
10002	  /* In case the do_jump just above never jumps.  */
10003	  do_pending_stack_adjust ();
10004	  emit_label (label1);
10005
10006	  /* Now the ELSE-expression.  */
10007	  do_jump (TREE_OPERAND (exp, 2),
10008		   if_false_label ? if_false_label : drop_through_label,
10009		   if_true_label ? if_true_label : drop_through_label);
10010	  end_cleanup_deferral ();
10011	}
10012      break;
10013
10014    case EQ_EXPR:
10015      {
10016	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10017
10018	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10019	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10020	  {
10021	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10022	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10023	    do_jump
10024	      (fold
10025	       (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10026		       fold (build (EQ_EXPR, TREE_TYPE (exp),
10027				    fold (build1 (REALPART_EXPR,
10028						  TREE_TYPE (inner_type),
10029						  exp0)),
10030				    fold (build1 (REALPART_EXPR,
10031						  TREE_TYPE (inner_type),
10032						  exp1)))),
10033		       fold (build (EQ_EXPR, TREE_TYPE (exp),
10034				    fold (build1 (IMAGPART_EXPR,
10035						  TREE_TYPE (inner_type),
10036						  exp0)),
10037				    fold (build1 (IMAGPART_EXPR,
10038						  TREE_TYPE (inner_type),
10039						  exp1)))))),
10040	       if_false_label, if_true_label);
10041	  }
10042
10043	else if (integer_zerop (TREE_OPERAND (exp, 1)))
10044	  do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10045
10046	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10047		 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
10048	  do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10049	else
10050	  do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
10051	break;
10052      }
10053
10054    case NE_EXPR:
10055      {
10056	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10057
10058	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10059	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10060	  {
10061	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10062	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10063	    do_jump
10064	      (fold
10065	       (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10066		       fold (build (NE_EXPR, TREE_TYPE (exp),
10067				    fold (build1 (REALPART_EXPR,
10068						  TREE_TYPE (inner_type),
10069						  exp0)),
10070				    fold (build1 (REALPART_EXPR,
10071						  TREE_TYPE (inner_type),
10072						  exp1)))),
10073		       fold (build (NE_EXPR, TREE_TYPE (exp),
10074				    fold (build1 (IMAGPART_EXPR,
10075						  TREE_TYPE (inner_type),
10076						  exp0)),
10077				    fold (build1 (IMAGPART_EXPR,
10078						  TREE_TYPE (inner_type),
10079						  exp1)))))),
10080	       if_false_label, if_true_label);
10081	  }
10082
10083	else if (integer_zerop (TREE_OPERAND (exp, 1)))
10084	  do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10085
10086	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10087		 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
10088	  do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10089	else
10090	  do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
10091	break;
10092      }
10093
10094    case LT_EXPR:
10095      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10096      if (GET_MODE_CLASS (mode) == MODE_INT
10097	  && ! can_compare_p (LT, mode, ccp_jump))
10098	do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10099      else
10100	do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
10101      break;
10102
10103    case LE_EXPR:
10104      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10105      if (GET_MODE_CLASS (mode) == MODE_INT
10106	  && ! can_compare_p (LE, mode, ccp_jump))
10107	do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10108      else
10109	do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
10110      break;
10111
10112    case GT_EXPR:
10113      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10114      if (GET_MODE_CLASS (mode) == MODE_INT
10115	  && ! can_compare_p (GT, mode, ccp_jump))
10116	do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10117      else
10118	do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
10119      break;
10120
10121    case GE_EXPR:
10122      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10123      if (GET_MODE_CLASS (mode) == MODE_INT
10124	  && ! can_compare_p (GE, mode, ccp_jump))
10125	do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10126      else
10127	do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
10128      break;
10129
10130    case UNORDERED_EXPR:
10131    case ORDERED_EXPR:
10132      {
10133	enum rtx_code cmp, rcmp;
10134	int do_rev;
10135
10136	if (code == UNORDERED_EXPR)
10137	  cmp = UNORDERED, rcmp = ORDERED;
10138	else
10139	  cmp = ORDERED, rcmp = UNORDERED;
10140	mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10141
10142	do_rev = 0;
10143	if (! can_compare_p (cmp, mode, ccp_jump)
10144	    && (can_compare_p (rcmp, mode, ccp_jump)
10145		/* If the target doesn't provide either UNORDERED or ORDERED
10146		   comparisons, canonicalize on UNORDERED for the library.  */
10147		|| rcmp == UNORDERED))
10148	  do_rev = 1;
10149
10150	if (! do_rev)
10151	  do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
10152	else
10153	  do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
10154      }
10155      break;
10156
10157    {
10158      enum rtx_code rcode1;
10159      enum tree_code tcode2;
10160
10161      case UNLT_EXPR:
10162	rcode1 = UNLT;
10163	tcode2 = LT_EXPR;
10164	goto unordered_bcc;
10165      case UNLE_EXPR:
10166	rcode1 = UNLE;
10167	tcode2 = LE_EXPR;
10168	goto unordered_bcc;
10169      case UNGT_EXPR:
10170	rcode1 = UNGT;
10171	tcode2 = GT_EXPR;
10172	goto unordered_bcc;
10173      case UNGE_EXPR:
10174	rcode1 = UNGE;
10175	tcode2 = GE_EXPR;
10176	goto unordered_bcc;
10177      case UNEQ_EXPR:
10178	rcode1 = UNEQ;
10179	tcode2 = EQ_EXPR;
10180	goto unordered_bcc;
10181
10182      unordered_bcc:
10183	mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10184	if (can_compare_p (rcode1, mode, ccp_jump))
10185	  do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
10186			       if_true_label);
10187	else
10188	  {
10189	    tree op0 = save_expr (TREE_OPERAND (exp, 0));
10190	    tree op1 = save_expr (TREE_OPERAND (exp, 1));
10191	    tree cmp0, cmp1;
10192
10193	    /* If the target doesn't support combined unordered
10194	       compares, decompose into UNORDERED + comparison.  */
10195	    cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
10196	    cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
10197	    exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
10198	    do_jump (exp, if_false_label, if_true_label);
10199	  }
10200      }
10201      break;
10202
10203      /* Special case:
10204		__builtin_expect (<test>, 0)	and
10205		__builtin_expect (<test>, 1)
10206
10207	 We need to do this here, so that <test> is not converted to a SCC
10208	 operation on machines that use condition code registers and COMPARE
10209	 like the PowerPC, and then the jump is done based on whether the SCC
10210	 operation produced a 1 or 0.  */
10211    case CALL_EXPR:
10212      /* Check for a built-in function.  */
10213      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
10214	{
10215	  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10216	  tree arglist = TREE_OPERAND (exp, 1);
10217
10218	  if (TREE_CODE (fndecl) == FUNCTION_DECL
10219	      && DECL_BUILT_IN (fndecl)
10220	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10221	      && arglist != NULL_TREE
10222	      && TREE_CHAIN (arglist) != NULL_TREE)
10223	    {
10224	      rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10225						    if_true_label);
10226
10227	      if (seq != NULL_RTX)
10228		{
10229		  emit_insn (seq);
10230		  return;
10231		}
10232	    }
10233	}
10234      /* fall through and generate the normal code.  */
10235
10236    default:
10237    normal:
10238      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10239#if 0
10240      /* This is not needed any more and causes poor code since it causes
10241	 comparisons and tests from non-SI objects to have different code
10242	 sequences.  */
10243      /* Copy to register to avoid generating bad insns by cse
10244	 from (set (mem ...) (arithop))  (set (cc0) (mem ...)).  */
10245      if (!cse_not_expected && GET_CODE (temp) == MEM)
10246	temp = copy_to_reg (temp);
10247#endif
10248      do_pending_stack_adjust ();
10249      /* Do any postincrements in the expression that was tested.  */
10250      emit_queue ();
10251
10252      if (GET_CODE (temp) == CONST_INT
10253	  || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10254	  || GET_CODE (temp) == LABEL_REF)
10255	{
10256	  rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10257	  if (target)
10258	    emit_jump (target);
10259	}
10260      else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10261	       && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10262	/* Note swapping the labels gives us not-equal.  */
10263	do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10264      else if (GET_MODE (temp) != VOIDmode)
10265	do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10266				 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10267				 GET_MODE (temp), NULL_RTX,
10268				 if_false_label, if_true_label);
10269      else
10270	abort ();
10271    }
10272
10273  if (drop_through_label)
10274    {
10275      /* If do_jump produces code that might be jumped around,
10276	 do any stack adjusts from that code, before the place
10277	 where control merges in.  */
10278      do_pending_stack_adjust ();
10279      emit_label (drop_through_label);
10280    }
10281}
10282
10283/* Given a comparison expression EXP for values too wide to be compared
10284   with one insn, test the comparison and jump to the appropriate label.
10285   The code of EXP is ignored; we always test GT if SWAP is 0,
10286   and LT if SWAP is 1.  */
10287
10288static void
10289do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10290     tree exp;
10291     int swap;
10292     rtx if_false_label, if_true_label;
10293{
10294  rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10295  rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10296  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10297  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10298
10299  do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10300}
10301
10302/* Compare OP0 with OP1, word at a time, in mode MODE.
10303   UNSIGNEDP says to do unsigned comparison.
10304   Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise.  */
10305
10306void
10307do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10308     enum machine_mode mode;
10309     int unsignedp;
10310     rtx op0, op1;
10311     rtx if_false_label, if_true_label;
10312{
10313  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10314  rtx drop_through_label = 0;
10315  int i;
10316
10317  if (! if_true_label || ! if_false_label)
10318    drop_through_label = gen_label_rtx ();
10319  if (! if_true_label)
10320    if_true_label = drop_through_label;
10321  if (! if_false_label)
10322    if_false_label = drop_through_label;
10323
10324  /* Compare a word at a time, high order first.  */
10325  for (i = 0; i < nwords; i++)
10326    {
10327      rtx op0_word, op1_word;
10328
10329      if (WORDS_BIG_ENDIAN)
10330	{
10331	  op0_word = operand_subword_force (op0, i, mode);
10332	  op1_word = operand_subword_force (op1, i, mode);
10333	}
10334      else
10335	{
10336	  op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10337	  op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10338	}
10339
10340      /* All but high-order word must be compared as unsigned.  */
10341      do_compare_rtx_and_jump (op0_word, op1_word, GT,
10342			       (unsignedp || i > 0), word_mode, NULL_RTX,
10343			       NULL_RTX, if_true_label);
10344
10345      /* Consider lower words only if these are equal.  */
10346      do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10347			       NULL_RTX, NULL_RTX, if_false_label);
10348    }
10349
10350  if (if_false_label)
10351    emit_jump (if_false_label);
10352  if (drop_through_label)
10353    emit_label (drop_through_label);
10354}
10355
10356/* Given an EQ_EXPR expression EXP for values too wide to be compared
10357   with one insn, test the comparison and jump to the appropriate label.  */
10358
10359static void
10360do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10361     tree exp;
10362     rtx if_false_label, if_true_label;
10363{
10364  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10365  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10366  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10367  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10368  int i;
10369  rtx drop_through_label = 0;
10370
10371  if (! if_false_label)
10372    drop_through_label = if_false_label = gen_label_rtx ();
10373
10374  for (i = 0; i < nwords; i++)
10375    do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10376			     operand_subword_force (op1, i, mode),
10377			     EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10378			     word_mode, NULL_RTX, if_false_label, NULL_RTX);
10379
10380  if (if_true_label)
10381    emit_jump (if_true_label);
10382  if (drop_through_label)
10383    emit_label (drop_through_label);
10384}
10385
10386/* Jump according to whether OP0 is 0.
10387   We assume that OP0 has an integer mode that is too wide
10388   for the available compare insns.  */
10389
10390void
10391do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10392     rtx op0;
10393     rtx if_false_label, if_true_label;
10394{
10395  int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10396  rtx part;
10397  int i;
10398  rtx drop_through_label = 0;
10399
10400  /* The fastest way of doing this comparison on almost any machine is to
10401     "or" all the words and compare the result.  If all have to be loaded
10402     from memory and this is a very wide item, it's possible this may
10403     be slower, but that's highly unlikely.  */
10404
10405  part = gen_reg_rtx (word_mode);
10406  emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10407  for (i = 1; i < nwords && part != 0; i++)
10408    part = expand_binop (word_mode, ior_optab, part,
10409			 operand_subword_force (op0, i, GET_MODE (op0)),
10410			 part, 1, OPTAB_WIDEN);
10411
10412  if (part != 0)
10413    {
10414      do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10415			       NULL_RTX, if_false_label, if_true_label);
10416
10417      return;
10418    }
10419
10420  /* If we couldn't do the "or" simply, do this with a series of compares.  */
10421  if (! if_false_label)
10422    drop_through_label = if_false_label = gen_label_rtx ();
10423
10424  for (i = 0; i < nwords; i++)
10425    do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10426			     const0_rtx, EQ, 1, word_mode, NULL_RTX,
10427			     if_false_label, NULL_RTX);
10428
10429  if (if_true_label)
10430    emit_jump (if_true_label);
10431
10432  if (drop_through_label)
10433    emit_label (drop_through_label);
10434}
10435
10436/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10437   (including code to compute the values to be compared)
10438   and set (CC0) according to the result.
10439   The decision as to signed or unsigned comparison must be made by the caller.
10440
10441   We force a stack adjustment unless there are currently
10442   things pushed on the stack that aren't yet used.
10443
10444   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10445   compared.  */
10446
10447rtx
10448compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10449     rtx op0, op1;
10450     enum rtx_code code;
10451     int unsignedp;
10452     enum machine_mode mode;
10453     rtx size;
10454{
10455  enum rtx_code ucode;
10456  rtx tem;
10457
10458  /* If one operand is constant, make it the second one.  Only do this
10459     if the other operand is not constant as well.  */
10460
10461  if (swap_commutative_operands_p (op0, op1))
10462    {
10463      tem = op0;
10464      op0 = op1;
10465      op1 = tem;
10466      code = swap_condition (code);
10467    }
10468
10469  if (flag_force_mem)
10470    {
10471      op0 = force_not_mem (op0);
10472      op1 = force_not_mem (op1);
10473    }
10474
10475  do_pending_stack_adjust ();
10476
10477  ucode = unsignedp ? unsigned_condition (code) : code;
10478  if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10479    return tem;
10480
10481#if 0
10482  /* There's no need to do this now that combine.c can eliminate lots of
10483     sign extensions.  This can be less efficient in certain cases on other
10484     machines.  */
10485
10486  /* If this is a signed equality comparison, we can do it as an
10487     unsigned comparison since zero-extension is cheaper than sign
10488     extension and comparisons with zero are done as unsigned.  This is
10489     the case even on machines that can do fast sign extension, since
10490     zero-extension is easier to combine with other operations than
10491     sign-extension is.  If we are comparing against a constant, we must
10492     convert it to what it would look like unsigned.  */
10493  if ((code == EQ || code == NE) && ! unsignedp
10494      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10495    {
10496      if (GET_CODE (op1) == CONST_INT
10497	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10498	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10499      unsignedp = 1;
10500    }
10501#endif
10502
10503  emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10504
10505#if HAVE_cc0
10506  return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10507#else
10508  return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10509#endif
10510}
10511
10512/* Like do_compare_and_jump but expects the values to compare as two rtx's.
10513   The decision as to signed or unsigned comparison must be made by the caller.
10514
10515   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10516   compared.  */
10517
10518void
10519do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10520			 if_false_label, if_true_label)
10521     rtx op0, op1;
10522     enum rtx_code code;
10523     int unsignedp;
10524     enum machine_mode mode;
10525     rtx size;
10526     rtx if_false_label, if_true_label;
10527{
10528  enum rtx_code ucode;
10529  rtx tem;
10530  int dummy_true_label = 0;
10531
10532  /* Reverse the comparison if that is safe and we want to jump if it is
10533     false.  */
10534  if (! if_true_label && ! FLOAT_MODE_P (mode))
10535    {
10536      if_true_label = if_false_label;
10537      if_false_label = 0;
10538      code = reverse_condition (code);
10539    }
10540
10541  /* If one operand is constant, make it the second one.  Only do this
10542     if the other operand is not constant as well.  */
10543
10544  if (swap_commutative_operands_p (op0, op1))
10545    {
10546      tem = op0;
10547      op0 = op1;
10548      op1 = tem;
10549      code = swap_condition (code);
10550    }
10551
10552  if (flag_force_mem)
10553    {
10554      op0 = force_not_mem (op0);
10555      op1 = force_not_mem (op1);
10556    }
10557
10558  do_pending_stack_adjust ();
10559
10560  ucode = unsignedp ? unsigned_condition (code) : code;
10561  if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10562    {
10563      if (tem == const_true_rtx)
10564	{
10565	  if (if_true_label)
10566	    emit_jump (if_true_label);
10567	}
10568      else
10569	{
10570	  if (if_false_label)
10571	    emit_jump (if_false_label);
10572	}
10573      return;
10574    }
10575
10576#if 0
10577  /* There's no need to do this now that combine.c can eliminate lots of
10578     sign extensions.  This can be less efficient in certain cases on other
10579     machines.  */
10580
10581  /* If this is a signed equality comparison, we can do it as an
10582     unsigned comparison since zero-extension is cheaper than sign
10583     extension and comparisons with zero are done as unsigned.  This is
10584     the case even on machines that can do fast sign extension, since
10585     zero-extension is easier to combine with other operations than
10586     sign-extension is.  If we are comparing against a constant, we must
10587     convert it to what it would look like unsigned.  */
10588  if ((code == EQ || code == NE) && ! unsignedp
10589      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10590    {
10591      if (GET_CODE (op1) == CONST_INT
10592	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10593	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10594      unsignedp = 1;
10595    }
10596#endif
10597
10598  if (! if_true_label)
10599    {
10600      dummy_true_label = 1;
10601      if_true_label = gen_label_rtx ();
10602    }
10603
10604  emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10605			   if_true_label);
10606
10607  if (if_false_label)
10608    emit_jump (if_false_label);
10609  if (dummy_true_label)
10610    emit_label (if_true_label);
10611}
10612
10613/* Generate code for a comparison expression EXP (including code to compute
10614   the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10615   IF_TRUE_LABEL.  One of the labels can be NULL_RTX, in which case the
10616   generated code will drop through.
10617   SIGNED_CODE should be the rtx operation for this comparison for
10618   signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10619
10620   We force a stack adjustment unless there are currently
10621   things pushed on the stack that aren't yet used.  */
10622
10623static void
10624do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10625		     if_true_label)
10626     tree exp;
10627     enum rtx_code signed_code, unsigned_code;
10628     rtx if_false_label, if_true_label;
10629{
10630  rtx op0, op1;
10631  tree type;
10632  enum machine_mode mode;
10633  int unsignedp;
10634  enum rtx_code code;
10635
10636  /* Don't crash if the comparison was erroneous.  */
10637  op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10638  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10639    return;
10640
10641  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10642  if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10643    return;
10644
10645  type = TREE_TYPE (TREE_OPERAND (exp, 0));
10646  mode = TYPE_MODE (type);
10647  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10648      && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10649	  || (GET_MODE_BITSIZE (mode)
10650	      > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10651								      1)))))))
10652    {
10653      /* op0 might have been replaced by promoted constant, in which
10654	 case the type of second argument should be used.  */
10655      type = TREE_TYPE (TREE_OPERAND (exp, 1));
10656      mode = TYPE_MODE (type);
10657    }
10658  unsignedp = TREE_UNSIGNED (type);
10659  code = unsignedp ? unsigned_code : signed_code;
10660
10661#ifdef HAVE_canonicalize_funcptr_for_compare
10662  /* If function pointers need to be "canonicalized" before they can
10663     be reliably compared, then canonicalize them.  */
10664  if (HAVE_canonicalize_funcptr_for_compare
10665      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10666      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10667	  == FUNCTION_TYPE))
10668    {
10669      rtx new_op0 = gen_reg_rtx (mode);
10670
10671      emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10672      op0 = new_op0;
10673    }
10674
10675  if (HAVE_canonicalize_funcptr_for_compare
10676      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10677      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10678	  == FUNCTION_TYPE))
10679    {
10680      rtx new_op1 = gen_reg_rtx (mode);
10681
10682      emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10683      op1 = new_op1;
10684    }
10685#endif
10686
10687  /* Do any postincrements in the expression that was tested.  */
10688  emit_queue ();
10689
10690  do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10691			   ((mode == BLKmode)
10692			    ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10693			   if_false_label, if_true_label);
10694}
10695
10696/* Generate code to calculate EXP using a store-flag instruction
10697   and return an rtx for the result.  EXP is either a comparison
10698   or a TRUTH_NOT_EXPR whose operand is a comparison.
10699
10700   If TARGET is nonzero, store the result there if convenient.
10701
10702   If ONLY_CHEAP is nonzero, only do this if it is likely to be very
10703   cheap.
10704
10705   Return zero if there is no suitable set-flag instruction
10706   available on this machine.
10707
10708   Once expand_expr has been called on the arguments of the comparison,
10709   we are committed to doing the store flag, since it is not safe to
10710   re-evaluate the expression.  We emit the store-flag insn by calling
10711   emit_store_flag, but only expand the arguments if we have a reason
10712   to believe that emit_store_flag will be successful.  If we think that
10713   it will, but it isn't, we have to simulate the store-flag with a
10714   set/jump/set sequence.  */
10715
10716static rtx
10717do_store_flag (exp, target, mode, only_cheap)
10718     tree exp;
10719     rtx target;
10720     enum machine_mode mode;
10721     int only_cheap;
10722{
10723  enum rtx_code code;
10724  tree arg0, arg1, type;
10725  tree tem;
10726  enum machine_mode operand_mode;
10727  int invert = 0;
10728  int unsignedp;
10729  rtx op0, op1;
10730  enum insn_code icode;
10731  rtx subtarget = target;
10732  rtx result, label;
10733
10734  /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10735     result at the end.  We can't simply invert the test since it would
10736     have already been inverted if it were valid.  This case occurs for
10737     some floating-point comparisons.  */
10738
10739  if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10740    invert = 1, exp = TREE_OPERAND (exp, 0);
10741
10742  arg0 = TREE_OPERAND (exp, 0);
10743  arg1 = TREE_OPERAND (exp, 1);
10744
10745  /* Don't crash if the comparison was erroneous.  */
10746  if (arg0 == error_mark_node || arg1 == error_mark_node)
10747    return const0_rtx;
10748
10749  type = TREE_TYPE (arg0);
10750  operand_mode = TYPE_MODE (type);
10751  unsignedp = TREE_UNSIGNED (type);
10752
10753  /* We won't bother with BLKmode store-flag operations because it would mean
10754     passing a lot of information to emit_store_flag.  */
10755  if (operand_mode == BLKmode)
10756    return 0;
10757
10758  /* We won't bother with store-flag operations involving function pointers
10759     when function pointers must be canonicalized before comparisons.  */
10760#ifdef HAVE_canonicalize_funcptr_for_compare
10761  if (HAVE_canonicalize_funcptr_for_compare
10762      && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10763	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10764	       == FUNCTION_TYPE))
10765	  || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10766	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10767		  == FUNCTION_TYPE))))
10768    return 0;
10769#endif
10770
10771  STRIP_NOPS (arg0);
10772  STRIP_NOPS (arg1);
10773
10774  /* Get the rtx comparison code to use.  We know that EXP is a comparison
10775     operation of some type.  Some comparisons against 1 and -1 can be
10776     converted to comparisons with zero.  Do so here so that the tests
10777     below will be aware that we have a comparison with zero.   These
10778     tests will not catch constants in the first operand, but constants
10779     are rarely passed as the first operand.  */
10780
10781  switch (TREE_CODE (exp))
10782    {
10783    case EQ_EXPR:
10784      code = EQ;
10785      break;
10786    case NE_EXPR:
10787      code = NE;
10788      break;
10789    case LT_EXPR:
10790      if (integer_onep (arg1))
10791	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10792      else
10793	code = unsignedp ? LTU : LT;
10794      break;
10795    case LE_EXPR:
10796      if (! unsignedp && integer_all_onesp (arg1))
10797	arg1 = integer_zero_node, code = LT;
10798      else
10799	code = unsignedp ? LEU : LE;
10800      break;
10801    case GT_EXPR:
10802      if (! unsignedp && integer_all_onesp (arg1))
10803	arg1 = integer_zero_node, code = GE;
10804      else
10805	code = unsignedp ? GTU : GT;
10806      break;
10807    case GE_EXPR:
10808      if (integer_onep (arg1))
10809	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10810      else
10811	code = unsignedp ? GEU : GE;
10812      break;
10813
10814    case UNORDERED_EXPR:
10815      code = UNORDERED;
10816      break;
10817    case ORDERED_EXPR:
10818      code = ORDERED;
10819      break;
10820    case UNLT_EXPR:
10821      code = UNLT;
10822      break;
10823    case UNLE_EXPR:
10824      code = UNLE;
10825      break;
10826    case UNGT_EXPR:
10827      code = UNGT;
10828      break;
10829    case UNGE_EXPR:
10830      code = UNGE;
10831      break;
10832    case UNEQ_EXPR:
10833      code = UNEQ;
10834      break;
10835
10836    default:
10837      abort ();
10838    }
10839
10840  /* Put a constant second.  */
10841  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10842    {
10843      tem = arg0; arg0 = arg1; arg1 = tem;
10844      code = swap_condition (code);
10845    }
10846
10847  /* If this is an equality or inequality test of a single bit, we can
10848     do this by shifting the bit being tested to the low-order bit and
10849     masking the result with the constant 1.  If the condition was EQ,
10850     we xor it with 1.  This does not require an scc insn and is faster
10851     than an scc insn even if we have it.  */
10852
10853  if ((code == NE || code == EQ)
10854      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10855      && integer_pow2p (TREE_OPERAND (arg0, 1)))
10856    {
10857      tree inner = TREE_OPERAND (arg0, 0);
10858      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10859      int ops_unsignedp;
10860
10861      /* If INNER is a right shift of a constant and it plus BITNUM does
10862	 not overflow, adjust BITNUM and INNER.  */
10863
10864      if (TREE_CODE (inner) == RSHIFT_EXPR
10865	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10866	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10867	  && bitnum < TYPE_PRECISION (type)
10868	  && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10869				   bitnum - TYPE_PRECISION (type)))
10870	{
10871	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10872	  inner = TREE_OPERAND (inner, 0);
10873	}
10874
10875      /* If we are going to be able to omit the AND below, we must do our
10876	 operations as unsigned.  If we must use the AND, we have a choice.
10877	 Normally unsigned is faster, but for some machines signed is.  */
10878      ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10879#ifdef LOAD_EXTEND_OP
10880		       : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10881#else
10882		       : 1
10883#endif
10884		       );
10885
10886      if (! get_subtarget (subtarget)
10887	  || GET_MODE (subtarget) != operand_mode
10888	  || ! safe_from_p (subtarget, inner, 1))
10889	subtarget = 0;
10890
10891      op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10892
10893      if (bitnum != 0)
10894	op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10895			    size_int (bitnum), subtarget, ops_unsignedp);
10896
10897      if (GET_MODE (op0) != mode)
10898	op0 = convert_to_mode (mode, op0, ops_unsignedp);
10899
10900      if ((code == EQ && ! invert) || (code == NE && invert))
10901	op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10902			    ops_unsignedp, OPTAB_LIB_WIDEN);
10903
10904      /* Put the AND last so it can combine with more things.  */
10905      if (bitnum != TYPE_PRECISION (type) - 1)
10906	op0 = expand_and (mode, op0, const1_rtx, subtarget);
10907
10908      return op0;
10909    }
10910
10911  /* Now see if we are likely to be able to do this.  Return if not.  */
10912  if (! can_compare_p (code, operand_mode, ccp_store_flag))
10913    return 0;
10914
10915  icode = setcc_gen_code[(int) code];
10916  if (icode == CODE_FOR_nothing
10917      || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10918    {
10919      /* We can only do this if it is one of the special cases that
10920	 can be handled without an scc insn.  */
10921      if ((code == LT && integer_zerop (arg1))
10922	  || (! only_cheap && code == GE && integer_zerop (arg1)))
10923	;
10924      else if (BRANCH_COST >= 0
10925	       && ! only_cheap && (code == NE || code == EQ)
10926	       && TREE_CODE (type) != REAL_TYPE
10927	       && ((abs_optab->handlers[(int) operand_mode].insn_code
10928		    != CODE_FOR_nothing)
10929		   || (ffs_optab->handlers[(int) operand_mode].insn_code
10930		       != CODE_FOR_nothing)))
10931	;
10932      else
10933	return 0;
10934    }
10935
10936  if (! get_subtarget (target)
10937      || GET_MODE (subtarget) != operand_mode
10938      || ! safe_from_p (subtarget, arg1, 1))
10939    subtarget = 0;
10940
10941  op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10942  op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10943
10944  if (target == 0)
10945    target = gen_reg_rtx (mode);
10946
10947  /* Pass copies of OP0 and OP1 in case they contain a QUEUED.  This is safe
10948     because, if the emit_store_flag does anything it will succeed and
10949     OP0 and OP1 will not be used subsequently.  */
10950
10951  result = emit_store_flag (target, code,
10952			    queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10953			    queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10954			    operand_mode, unsignedp, 1);
10955
10956  if (result)
10957    {
10958      if (invert)
10959	result = expand_binop (mode, xor_optab, result, const1_rtx,
10960			       result, 0, OPTAB_LIB_WIDEN);
10961      return result;
10962    }
10963
10964  /* If this failed, we have to do this with set/compare/jump/set code.  */
10965  if (GET_CODE (target) != REG
10966      || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10967    target = gen_reg_rtx (GET_MODE (target));
10968
10969  emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10970  result = compare_from_rtx (op0, op1, code, unsignedp,
10971			     operand_mode, NULL_RTX);
10972  if (GET_CODE (result) == CONST_INT)
10973    return (((result == const0_rtx && ! invert)
10974	     || (result != const0_rtx && invert))
10975	    ? const0_rtx : const1_rtx);
10976
10977  /* The code of RESULT may not match CODE if compare_from_rtx
10978     decided to swap its operands and reverse the original code.
10979
10980     We know that compare_from_rtx returns either a CONST_INT or
10981     a new comparison code, so it is safe to just extract the
10982     code from RESULT.  */
10983  code = GET_CODE (result);
10984
10985  label = gen_label_rtx ();
10986  if (bcc_gen_fctn[(int) code] == 0)
10987    abort ();
10988
10989  emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10990  emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10991  emit_label (label);
10992
10993  return target;
10994}
10995
10996
10997/* Stubs in case we haven't got a casesi insn.  */
10998#ifndef HAVE_casesi
10999# define HAVE_casesi 0
11000# define gen_casesi(a, b, c, d, e) (0)
11001# define CODE_FOR_casesi CODE_FOR_nothing
11002#endif
11003
11004/* If the machine does not have a case insn that compares the bounds,
11005   this means extra overhead for dispatch tables, which raises the
11006   threshold for using them.  */
11007#ifndef CASE_VALUES_THRESHOLD
11008#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
11009#endif /* CASE_VALUES_THRESHOLD */
11010
11011unsigned int
11012case_values_threshold ()
11013{
11014  return CASE_VALUES_THRESHOLD;
11015}
11016
11017/* Attempt to generate a casesi instruction.  Returns 1 if successful,
11018   0 otherwise (i.e. if there is no casesi instruction).  */
11019int
11020try_casesi (index_type, index_expr, minval, range,
11021	    table_label, default_label)
11022     tree index_type, index_expr, minval, range;
11023     rtx table_label ATTRIBUTE_UNUSED;
11024     rtx default_label;
11025{
11026  enum machine_mode index_mode = SImode;
11027  int index_bits = GET_MODE_BITSIZE (index_mode);
11028  rtx op1, op2, index;
11029  enum machine_mode op_mode;
11030
11031  if (! HAVE_casesi)
11032    return 0;
11033
11034  /* Convert the index to SImode.  */
11035  if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11036    {
11037      enum machine_mode omode = TYPE_MODE (index_type);
11038      rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
11039
11040      /* We must handle the endpoints in the original mode.  */
11041      index_expr = build (MINUS_EXPR, index_type,
11042			  index_expr, minval);
11043      minval = integer_zero_node;
11044      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11045      emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11046			       omode, 1, default_label);
11047      /* Now we can safely truncate.  */
11048      index = convert_to_mode (index_mode, index, 0);
11049    }
11050  else
11051    {
11052      if (TYPE_MODE (index_type) != index_mode)
11053	{
11054	  index_expr = convert ((*lang_hooks.types.type_for_size)
11055				(index_bits, 0), index_expr);
11056	  index_type = TREE_TYPE (index_expr);
11057	}
11058
11059      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11060    }
11061  emit_queue ();
11062  index = protect_from_queue (index, 0);
11063  do_pending_stack_adjust ();
11064
11065  op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
11066  if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
11067      (index, op_mode))
11068    index = copy_to_mode_reg (op_mode, index);
11069
11070  op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
11071
11072  op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
11073  op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
11074		       op1, TREE_UNSIGNED (TREE_TYPE (minval)));
11075  if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
11076      (op1, op_mode))
11077    op1 = copy_to_mode_reg (op_mode, op1);
11078
11079  op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
11080
11081  op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
11082  op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
11083		       op2, TREE_UNSIGNED (TREE_TYPE (range)));
11084  if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
11085      (op2, op_mode))
11086    op2 = copy_to_mode_reg (op_mode, op2);
11087
11088  emit_jump_insn (gen_casesi (index, op1, op2,
11089			      table_label, default_label));
11090  return 1;
11091}
11092
11093/* Attempt to generate a tablejump instruction; same concept.  */
11094#ifndef HAVE_tablejump
11095#define HAVE_tablejump 0
11096#define gen_tablejump(x, y) (0)
11097#endif
11098
11099/* Subroutine of the next function.
11100
11101   INDEX is the value being switched on, with the lowest value
11102   in the table already subtracted.
11103   MODE is its expected mode (needed if INDEX is constant).
11104   RANGE is the length of the jump table.
11105   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11106
11107   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11108   index value is out of range.  */
11109
11110static void
11111do_tablejump (index, mode, range, table_label, default_label)
11112     rtx index, range, table_label, default_label;
11113     enum machine_mode mode;
11114{
11115  rtx temp, vector;
11116
11117  if (INTVAL (range) > cfun->max_jumptable_ents)
11118    cfun->max_jumptable_ents = INTVAL (range);
11119
11120  /* Do an unsigned comparison (in the proper mode) between the index
11121     expression and the value which represents the length of the range.
11122     Since we just finished subtracting the lower bound of the range
11123     from the index expression, this comparison allows us to simultaneously
11124     check that the original index expression value is both greater than
11125     or equal to the minimum value of the range and less than or equal to
11126     the maximum value of the range.  */
11127
11128  emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11129			   default_label);
11130
11131  /* If index is in range, it must fit in Pmode.
11132     Convert to Pmode so we can index with it.  */
11133  if (mode != Pmode)
11134    index = convert_to_mode (Pmode, index, 1);
11135
11136  /* Don't let a MEM slip thru, because then INDEX that comes
11137     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11138     and break_out_memory_refs will go to work on it and mess it up.  */
11139#ifdef PIC_CASE_VECTOR_ADDRESS
11140  if (flag_pic && GET_CODE (index) != REG)
11141    index = copy_to_mode_reg (Pmode, index);
11142#endif
11143
11144  /* If flag_force_addr were to affect this address
11145     it could interfere with the tricky assumptions made
11146     about addresses that contain label-refs,
11147     which may be valid only very near the tablejump itself.  */
11148  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11149     GET_MODE_SIZE, because this indicates how large insns are.  The other
11150     uses should all be Pmode, because they are addresses.  This code
11151     could fail if addresses and insns are not the same size.  */
11152  index = gen_rtx_PLUS (Pmode,
11153			gen_rtx_MULT (Pmode, index,
11154				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11155			gen_rtx_LABEL_REF (Pmode, table_label));
11156#ifdef PIC_CASE_VECTOR_ADDRESS
11157  if (flag_pic)
11158    index = PIC_CASE_VECTOR_ADDRESS (index);
11159  else
11160#endif
11161    index = memory_address_noforce (CASE_VECTOR_MODE, index);
11162  temp = gen_reg_rtx (CASE_VECTOR_MODE);
11163  vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11164  RTX_UNCHANGING_P (vector) = 1;
11165  MEM_NOTRAP_P (vector) = 1;
11166  convert_move (temp, vector, 0);
11167
11168  emit_jump_insn (gen_tablejump (temp, table_label));
11169
11170  /* If we are generating PIC code or if the table is PC-relative, the
11171     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
11172  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11173    emit_barrier ();
11174}
11175
11176int
11177try_tablejump (index_type, index_expr, minval, range,
11178	       table_label, default_label)
11179     tree index_type, index_expr, minval, range;
11180     rtx table_label, default_label;
11181{
11182  rtx index;
11183
11184  if (! HAVE_tablejump)
11185    return 0;
11186
11187  index_expr = fold (build (MINUS_EXPR, index_type,
11188			    convert (index_type, index_expr),
11189			    convert (index_type, minval)));
11190  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11191  emit_queue ();
11192  index = protect_from_queue (index, 0);
11193  do_pending_stack_adjust ();
11194
11195  do_tablejump (index, TYPE_MODE (index_type),
11196		convert_modes (TYPE_MODE (index_type),
11197			       TYPE_MODE (TREE_TYPE (range)),
11198			       expand_expr (range, NULL_RTX,
11199					    VOIDmode, 0),
11200			       TREE_UNSIGNED (TREE_TYPE (range))),
11201		table_label, default_label);
11202  return 1;
11203}
11204
11205/* Nonzero if the mode is a valid vector mode for this architecture.
11206   This returns nonzero even if there is no hardware support for the
11207   vector mode, but we can emulate with narrower modes.  */
11208
11209int
11210vector_mode_valid_p (mode)
11211     enum machine_mode mode;
11212{
11213  enum mode_class class = GET_MODE_CLASS (mode);
11214  enum machine_mode innermode;
11215
11216  /* Doh!  What's going on?  */
11217  if (class != MODE_VECTOR_INT
11218      && class != MODE_VECTOR_FLOAT)
11219    return 0;
11220
11221  /* Hardware support.  Woo hoo!  */
11222  if (VECTOR_MODE_SUPPORTED_P (mode))
11223    return 1;
11224
11225  innermode = GET_MODE_INNER (mode);
11226
11227  /* We should probably return 1 if requesting V4DI and we have no DI,
11228     but we have V2DI, but this is probably very unlikely.  */
11229
11230  /* If we have support for the inner mode, we can safely emulate it.
11231     We may not have V2DI, but me can emulate with a pair of DIs.  */
11232  return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
11233}
11234
11235#include "gt-expr.h"
11236