expr.c revision 122180
1/* Convert tree expression to rtl instructions, for GNU compiler.
2   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "machmode.h"
25#include "real.h"
26#include "rtl.h"
27#include "tree.h"
28#include "flags.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "except.h"
32#include "function.h"
33#include "insn-config.h"
34#include "insn-attr.h"
35/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
36#include "expr.h"
37#include "optabs.h"
38#include "libfuncs.h"
39#include "recog.h"
40#include "reload.h"
41#include "output.h"
42#include "typeclass.h"
43#include "toplev.h"
44#include "ggc.h"
45#include "langhooks.h"
46#include "intl.h"
47#include "tm_p.h"
48
49/* Decide whether a function's arguments should be processed
50   from first to last or from last to first.
51
52   They should if the stack and args grow in opposite directions, but
53   only if we have push insns.  */
54
55#ifdef PUSH_ROUNDING
56
57#ifndef PUSH_ARGS_REVERSED
58#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59#define PUSH_ARGS_REVERSED	/* If it's last to first.  */
60#endif
61#endif
62
63#endif
64
65#ifndef STACK_PUSH_CODE
66#ifdef STACK_GROWS_DOWNWARD
67#define STACK_PUSH_CODE PRE_DEC
68#else
69#define STACK_PUSH_CODE PRE_INC
70#endif
71#endif
72
73/* Assume that case vectors are not pc-relative.  */
74#ifndef CASE_VECTOR_PC_RELATIVE
75#define CASE_VECTOR_PC_RELATIVE 0
76#endif
77
78/* Convert defined/undefined to boolean.  */
79#ifdef TARGET_MEM_FUNCTIONS
80#undef TARGET_MEM_FUNCTIONS
81#define TARGET_MEM_FUNCTIONS 1
82#else
83#define TARGET_MEM_FUNCTIONS 0
84#endif
85
86
87/* If this is nonzero, we do not bother generating VOLATILE
88   around volatile memory references, and we are willing to
89   output indirect addresses.  If cse is to follow, we reject
90   indirect addresses so a useful potential cse is generated;
91   if it is used only once, instruction combination will produce
92   the same indirect address eventually.  */
93int cse_not_expected;
94
95/* Chain of pending expressions for PLACEHOLDER_EXPR to replace.  */
96static tree placeholder_list = 0;
97
98/* This structure is used by move_by_pieces to describe the move to
99   be performed.  */
100struct move_by_pieces
101{
102  rtx to;
103  rtx to_addr;
104  int autinc_to;
105  int explicit_inc_to;
106  rtx from;
107  rtx from_addr;
108  int autinc_from;
109  int explicit_inc_from;
110  unsigned HOST_WIDE_INT len;
111  HOST_WIDE_INT offset;
112  int reverse;
113};
114
115/* This structure is used by store_by_pieces to describe the clear to
116   be performed.  */
117
118struct store_by_pieces
119{
120  rtx to;
121  rtx to_addr;
122  int autinc_to;
123  int explicit_inc_to;
124  unsigned HOST_WIDE_INT len;
125  HOST_WIDE_INT offset;
126  rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
127  PTR constfundata;
128  int reverse;
129};
130
131static rtx enqueue_insn		PARAMS ((rtx, rtx));
132static unsigned HOST_WIDE_INT move_by_pieces_ninsns
133				PARAMS ((unsigned HOST_WIDE_INT,
134					 unsigned int));
135static void move_by_pieces_1	PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
136					 struct move_by_pieces *));
137static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
138static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
139static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
140static tree emit_block_move_libcall_fn PARAMS ((int));
141static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
142static rtx clear_by_pieces_1	PARAMS ((PTR, HOST_WIDE_INT,
143					 enum machine_mode));
144static void clear_by_pieces	PARAMS ((rtx, unsigned HOST_WIDE_INT,
145					 unsigned int));
146static void store_by_pieces_1	PARAMS ((struct store_by_pieces *,
147					 unsigned int));
148static void store_by_pieces_2	PARAMS ((rtx (*) (rtx, ...),
149					 enum machine_mode,
150					 struct store_by_pieces *));
151static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
152static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
153static tree clear_storage_libcall_fn PARAMS ((int));
154static rtx compress_float_constant PARAMS ((rtx, rtx));
155static rtx get_subtarget	PARAMS ((rtx));
156static int is_zeros_p         PARAMS ((tree));
157static int mostly_zeros_p	PARAMS ((tree));
158static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
159					     HOST_WIDE_INT, enum machine_mode,
160					     tree, tree, int, int));
161static void store_constructor	PARAMS ((tree, rtx, int, HOST_WIDE_INT));
162static rtx store_field		PARAMS ((rtx, HOST_WIDE_INT,
163					 HOST_WIDE_INT, enum machine_mode,
164					 tree, enum machine_mode, int, tree,
165					 int));
166static rtx var_rtx		PARAMS ((tree));
167static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
168static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
169static int is_aligning_offset	PARAMS ((tree, tree));
170static rtx expand_increment	PARAMS ((tree, int, int));
171static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
172static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
173static void do_compare_and_jump	PARAMS ((tree, enum rtx_code, enum rtx_code,
174					 rtx, rtx));
175static rtx do_store_flag	PARAMS ((tree, rtx, enum machine_mode, int));
176#ifdef PUSH_ROUNDING
177static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
178#endif
179static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
180static rtx const_vector_from_tree PARAMS ((tree));
181
182/* Record for each mode whether we can move a register directly to or
183   from an object of that mode in memory.  If we can't, we won't try
184   to use that mode directly when accessing a field of that mode.  */
185
186static char direct_load[NUM_MACHINE_MODES];
187static char direct_store[NUM_MACHINE_MODES];
188
189/* Record for each mode whether we can float-extend from memory.  */
190
191static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
192
193/* If a memory-to-memory move would take MOVE_RATIO or more simple
194   move-instruction sequences, we will do a movstr or libcall instead.  */
195
196#ifndef MOVE_RATIO
197#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
198#define MOVE_RATIO 2
199#else
200/* If we are optimizing for space (-Os), cut down the default move ratio.  */
201#define MOVE_RATIO (optimize_size ? 3 : 15)
202#endif
203#endif
204
205/* This macro is used to determine whether move_by_pieces should be called
206   to perform a structure copy.  */
207#ifndef MOVE_BY_PIECES_P
208#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
209  (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
210#endif
211
212/* If a clear memory operation would take CLEAR_RATIO or more simple
213   move-instruction sequences, we will do a clrstr or libcall instead.  */
214
215#ifndef CLEAR_RATIO
216#if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
217#define CLEAR_RATIO 2
218#else
219/* If we are optimizing for space, cut down the default clear ratio.  */
220#define CLEAR_RATIO (optimize_size ? 3 : 15)
221#endif
222#endif
223
224/* This macro is used to determine whether clear_by_pieces should be
225   called to clear storage.  */
226#ifndef CLEAR_BY_PIECES_P
227#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
228  (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
229#endif
230
231/* This array records the insn_code of insns to perform block moves.  */
232enum insn_code movstr_optab[NUM_MACHINE_MODES];
233
234/* This array records the insn_code of insns to perform block clears.  */
235enum insn_code clrstr_optab[NUM_MACHINE_MODES];
236
237/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow.  */
238
239#ifndef SLOW_UNALIGNED_ACCESS
240#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
241#endif
242
243/* This is run once per compilation to set up which modes can be used
244   directly in memory and to initialize the block move optab.  */
245
246void
247init_expr_once ()
248{
249  rtx insn, pat;
250  enum machine_mode mode;
251  int num_clobbers;
252  rtx mem, mem1;
253  rtx reg;
254
255  /* Try indexing by frame ptr and try by stack ptr.
256     It is known that on the Convex the stack ptr isn't a valid index.
257     With luck, one or the other is valid on any machine.  */
258  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
259  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
260
261  /* A scratch register we can modify in-place below to avoid
262     useless RTL allocations.  */
263  reg = gen_rtx_REG (VOIDmode, -1);
264
265  insn = rtx_alloc (INSN);
266  pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
267  PATTERN (insn) = pat;
268
269  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
270       mode = (enum machine_mode) ((int) mode + 1))
271    {
272      int regno;
273
274      direct_load[(int) mode] = direct_store[(int) mode] = 0;
275      PUT_MODE (mem, mode);
276      PUT_MODE (mem1, mode);
277      PUT_MODE (reg, mode);
278
279      /* See if there is some register that can be used in this mode and
280	 directly loaded or stored from memory.  */
281
282      if (mode != VOIDmode && mode != BLKmode)
283	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
284	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
285	     regno++)
286	  {
287	    if (! HARD_REGNO_MODE_OK (regno, mode))
288	      continue;
289
290	    REGNO (reg) = regno;
291
292	    SET_SRC (pat) = mem;
293	    SET_DEST (pat) = reg;
294	    if (recog (pat, insn, &num_clobbers) >= 0)
295	      direct_load[(int) mode] = 1;
296
297	    SET_SRC (pat) = mem1;
298	    SET_DEST (pat) = reg;
299	    if (recog (pat, insn, &num_clobbers) >= 0)
300	      direct_load[(int) mode] = 1;
301
302	    SET_SRC (pat) = reg;
303	    SET_DEST (pat) = mem;
304	    if (recog (pat, insn, &num_clobbers) >= 0)
305	      direct_store[(int) mode] = 1;
306
307	    SET_SRC (pat) = reg;
308	    SET_DEST (pat) = mem1;
309	    if (recog (pat, insn, &num_clobbers) >= 0)
310	      direct_store[(int) mode] = 1;
311	  }
312    }
313
314  mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
315
316  for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
317       mode = GET_MODE_WIDER_MODE (mode))
318    {
319      enum machine_mode srcmode;
320      for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
321	   srcmode = GET_MODE_WIDER_MODE (srcmode))
322	{
323	  enum insn_code ic;
324
325	  ic = can_extend_p (mode, srcmode, 0);
326	  if (ic == CODE_FOR_nothing)
327	    continue;
328
329	  PUT_MODE (mem, srcmode);
330
331	  if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
332	    float_extend_from_mem[mode][srcmode] = true;
333	}
334    }
335}
336
337/* This is run at the start of compiling a function.  */
338
339void
340init_expr ()
341{
342  cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
343
344  pending_chain = 0;
345  pending_stack_adjust = 0;
346  stack_pointer_delta = 0;
347  inhibit_defer_pop = 0;
348  saveregs_value = 0;
349  apply_args_value = 0;
350  forced_labels = 0;
351}
352
353/* Small sanity check that the queue is empty at the end of a function.  */
354
355void
356finish_expr_for_function ()
357{
358  if (pending_chain)
359    abort ();
360}
361
362/* Manage the queue of increment instructions to be output
363   for POSTINCREMENT_EXPR expressions, etc.  */
364
365/* Queue up to increment (or change) VAR later.  BODY says how:
366   BODY should be the same thing you would pass to emit_insn
367   to increment right away.  It will go to emit_insn later on.
368
369   The value is a QUEUED expression to be used in place of VAR
370   where you want to guarantee the pre-incrementation value of VAR.  */
371
372static rtx
373enqueue_insn (var, body)
374     rtx var, body;
375{
376  pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
377				  body, pending_chain);
378  return pending_chain;
379}
380
381/* Use protect_from_queue to convert a QUEUED expression
382   into something that you can put immediately into an instruction.
383   If the queued incrementation has not happened yet,
384   protect_from_queue returns the variable itself.
385   If the incrementation has happened, protect_from_queue returns a temp
386   that contains a copy of the old value of the variable.
387
388   Any time an rtx which might possibly be a QUEUED is to be put
389   into an instruction, it must be passed through protect_from_queue first.
390   QUEUED expressions are not meaningful in instructions.
391
392   Do not pass a value through protect_from_queue and then hold
393   on to it for a while before putting it in an instruction!
394   If the queue is flushed in between, incorrect code will result.  */
395
396rtx
397protect_from_queue (x, modify)
398     rtx x;
399     int modify;
400{
401  RTX_CODE code = GET_CODE (x);
402
403#if 0  /* A QUEUED can hang around after the queue is forced out.  */
404  /* Shortcut for most common case.  */
405  if (pending_chain == 0)
406    return x;
407#endif
408
409  if (code != QUEUED)
410    {
411      /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
412	 use of autoincrement.  Make a copy of the contents of the memory
413	 location rather than a copy of the address, but not if the value is
414	 of mode BLKmode.  Don't modify X in place since it might be
415	 shared.  */
416      if (code == MEM && GET_MODE (x) != BLKmode
417	  && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
418	{
419	  rtx y = XEXP (x, 0);
420	  rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
421
422	  if (QUEUED_INSN (y))
423	    {
424	      rtx temp = gen_reg_rtx (GET_MODE (x));
425
426	      emit_insn_before (gen_move_insn (temp, new),
427				QUEUED_INSN (y));
428	      return temp;
429	    }
430
431	  /* Copy the address into a pseudo, so that the returned value
432	     remains correct across calls to emit_queue.  */
433	  return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
434	}
435
436      /* Otherwise, recursively protect the subexpressions of all
437	 the kinds of rtx's that can contain a QUEUED.  */
438      if (code == MEM)
439	{
440	  rtx tem = protect_from_queue (XEXP (x, 0), 0);
441	  if (tem != XEXP (x, 0))
442	    {
443	      x = copy_rtx (x);
444	      XEXP (x, 0) = tem;
445	    }
446	}
447      else if (code == PLUS || code == MULT)
448	{
449	  rtx new0 = protect_from_queue (XEXP (x, 0), 0);
450	  rtx new1 = protect_from_queue (XEXP (x, 1), 0);
451	  if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
452	    {
453	      x = copy_rtx (x);
454	      XEXP (x, 0) = new0;
455	      XEXP (x, 1) = new1;
456	    }
457	}
458      return x;
459    }
460  /* If the increment has not happened, use the variable itself.  Copy it
461     into a new pseudo so that the value remains correct across calls to
462     emit_queue.  */
463  if (QUEUED_INSN (x) == 0)
464    return copy_to_reg (QUEUED_VAR (x));
465  /* If the increment has happened and a pre-increment copy exists,
466     use that copy.  */
467  if (QUEUED_COPY (x) != 0)
468    return QUEUED_COPY (x);
469  /* The increment has happened but we haven't set up a pre-increment copy.
470     Set one up now, and use it.  */
471  QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
472  emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
473		    QUEUED_INSN (x));
474  return QUEUED_COPY (x);
475}
476
477/* Return nonzero if X contains a QUEUED expression:
478   if it contains anything that will be altered by a queued increment.
479   We handle only combinations of MEM, PLUS, MINUS and MULT operators
480   since memory addresses generally contain only those.  */
481
482int
483queued_subexp_p (x)
484     rtx x;
485{
486  enum rtx_code code = GET_CODE (x);
487  switch (code)
488    {
489    case QUEUED:
490      return 1;
491    case MEM:
492      return queued_subexp_p (XEXP (x, 0));
493    case MULT:
494    case PLUS:
495    case MINUS:
496      return (queued_subexp_p (XEXP (x, 0))
497	      || queued_subexp_p (XEXP (x, 1)));
498    default:
499      return 0;
500    }
501}
502
503/* Perform all the pending incrementations.  */
504
505void
506emit_queue ()
507{
508  rtx p;
509  while ((p = pending_chain))
510    {
511      rtx body = QUEUED_BODY (p);
512
513      switch (GET_CODE (body))
514	{
515	case INSN:
516	case JUMP_INSN:
517	case CALL_INSN:
518	case CODE_LABEL:
519	case BARRIER:
520	case NOTE:
521	  QUEUED_INSN (p) = body;
522	  emit_insn (body);
523	  break;
524
525#ifdef ENABLE_CHECKING
526	case SEQUENCE:
527	  abort ();
528	  break;
529#endif
530
531	default:
532	  QUEUED_INSN (p) = emit_insn (body);
533	  break;
534	}
535
536      pending_chain = QUEUED_NEXT (p);
537    }
538}
539
540/* Copy data from FROM to TO, where the machine modes are not the same.
541   Both modes may be integer, or both may be floating.
542   UNSIGNEDP should be nonzero if FROM is an unsigned type.
543   This causes zero-extension instead of sign-extension.  */
544
545void
546convert_move (to, from, unsignedp)
547     rtx to, from;
548     int unsignedp;
549{
550  enum machine_mode to_mode = GET_MODE (to);
551  enum machine_mode from_mode = GET_MODE (from);
552  int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
553  int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
554  enum insn_code code;
555  rtx libcall;
556
557  /* rtx code for making an equivalent value.  */
558  enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
559			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
560
561  to = protect_from_queue (to, 1);
562  from = protect_from_queue (from, 0);
563
564  if (to_real != from_real)
565    abort ();
566
567  /* If FROM is a SUBREG that indicates that we have already done at least
568     the required extension, strip it.  We don't handle such SUBREGs as
569     TO here.  */
570
571  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
572      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
573	  >= GET_MODE_SIZE (to_mode))
574      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
575    from = gen_lowpart (to_mode, from), from_mode = to_mode;
576
577  if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
578    abort ();
579
580  if (to_mode == from_mode
581      || (from_mode == VOIDmode && CONSTANT_P (from)))
582    {
583      emit_move_insn (to, from);
584      return;
585    }
586
587  if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
588    {
589      if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
590	abort ();
591
592      if (VECTOR_MODE_P (to_mode))
593	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
594      else
595	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
596
597      emit_move_insn (to, from);
598      return;
599    }
600
601  if (to_real != from_real)
602    abort ();
603
604  if (to_real)
605    {
606      rtx value, insns;
607
608      if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
609	{
610	  /* Try converting directly if the insn is supported.  */
611	  if ((code = can_extend_p (to_mode, from_mode, 0))
612	      != CODE_FOR_nothing)
613	    {
614	      emit_unop_insn (code, to, from, UNKNOWN);
615	      return;
616	    }
617	}
618
619#ifdef HAVE_trunchfqf2
620      if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
621	{
622	  emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
623	  return;
624	}
625#endif
626#ifdef HAVE_trunctqfqf2
627      if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
628	{
629	  emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
630	  return;
631	}
632#endif
633#ifdef HAVE_truncsfqf2
634      if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
635	{
636	  emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
637	  return;
638	}
639#endif
640#ifdef HAVE_truncdfqf2
641      if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
642	{
643	  emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
644	  return;
645	}
646#endif
647#ifdef HAVE_truncxfqf2
648      if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
649	{
650	  emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
651	  return;
652	}
653#endif
654#ifdef HAVE_trunctfqf2
655      if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
656	{
657	  emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
658	  return;
659	}
660#endif
661
662#ifdef HAVE_trunctqfhf2
663      if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
664	{
665	  emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
666	  return;
667	}
668#endif
669#ifdef HAVE_truncsfhf2
670      if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
671	{
672	  emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
673	  return;
674	}
675#endif
676#ifdef HAVE_truncdfhf2
677      if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
678	{
679	  emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
680	  return;
681	}
682#endif
683#ifdef HAVE_truncxfhf2
684      if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
685	{
686	  emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
687	  return;
688	}
689#endif
690#ifdef HAVE_trunctfhf2
691      if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
692	{
693	  emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
694	  return;
695	}
696#endif
697
698#ifdef HAVE_truncsftqf2
699      if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
700	{
701	  emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
702	  return;
703	}
704#endif
705#ifdef HAVE_truncdftqf2
706      if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
707	{
708	  emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
709	  return;
710	}
711#endif
712#ifdef HAVE_truncxftqf2
713      if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
714	{
715	  emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
716	  return;
717	}
718#endif
719#ifdef HAVE_trunctftqf2
720      if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
721	{
722	  emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
723	  return;
724	}
725#endif
726
727#ifdef HAVE_truncdfsf2
728      if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
729	{
730	  emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
731	  return;
732	}
733#endif
734#ifdef HAVE_truncxfsf2
735      if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
736	{
737	  emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
738	  return;
739	}
740#endif
741#ifdef HAVE_trunctfsf2
742      if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
743	{
744	  emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
745	  return;
746	}
747#endif
748#ifdef HAVE_truncxfdf2
749      if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
750	{
751	  emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
752	  return;
753	}
754#endif
755#ifdef HAVE_trunctfdf2
756      if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
757	{
758	  emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
759	  return;
760	}
761#endif
762
763      libcall = (rtx) 0;
764      switch (from_mode)
765	{
766	case SFmode:
767	  switch (to_mode)
768	    {
769	    case DFmode:
770	      libcall = extendsfdf2_libfunc;
771	      break;
772
773	    case XFmode:
774	      libcall = extendsfxf2_libfunc;
775	      break;
776
777	    case TFmode:
778	      libcall = extendsftf2_libfunc;
779	      break;
780
781	    default:
782	      break;
783	    }
784	  break;
785
786	case DFmode:
787	  switch (to_mode)
788	    {
789	    case SFmode:
790	      libcall = truncdfsf2_libfunc;
791	      break;
792
793	    case XFmode:
794	      libcall = extenddfxf2_libfunc;
795	      break;
796
797	    case TFmode:
798	      libcall = extenddftf2_libfunc;
799	      break;
800
801	    default:
802	      break;
803	    }
804	  break;
805
806	case XFmode:
807	  switch (to_mode)
808	    {
809	    case SFmode:
810	      libcall = truncxfsf2_libfunc;
811	      break;
812
813	    case DFmode:
814	      libcall = truncxfdf2_libfunc;
815	      break;
816
817	    default:
818	      break;
819	    }
820	  break;
821
822	case TFmode:
823	  switch (to_mode)
824	    {
825	    case SFmode:
826	      libcall = trunctfsf2_libfunc;
827	      break;
828
829	    case DFmode:
830	      libcall = trunctfdf2_libfunc;
831	      break;
832
833	    default:
834	      break;
835	    }
836	  break;
837
838	default:
839	  break;
840	}
841
842      if (libcall == (rtx) 0)
843	/* This conversion is not implemented yet.  */
844	abort ();
845
846      start_sequence ();
847      value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
848				       1, from, from_mode);
849      insns = get_insns ();
850      end_sequence ();
851      emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
852								    from));
853      return;
854    }
855
856  /* Now both modes are integers.  */
857
858  /* Handle expanding beyond a word.  */
859  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
860      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
861    {
862      rtx insns;
863      rtx lowpart;
864      rtx fill_value;
865      rtx lowfrom;
866      int i;
867      enum machine_mode lowpart_mode;
868      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
869
870      /* Try converting directly if the insn is supported.  */
871      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
872	  != CODE_FOR_nothing)
873	{
874	  /* If FROM is a SUBREG, put it into a register.  Do this
875	     so that we always generate the same set of insns for
876	     better cse'ing; if an intermediate assignment occurred,
877	     we won't be doing the operation directly on the SUBREG.  */
878	  if (optimize > 0 && GET_CODE (from) == SUBREG)
879	    from = force_reg (from_mode, from);
880	  emit_unop_insn (code, to, from, equiv_code);
881	  return;
882	}
883      /* Next, try converting via full word.  */
884      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
885	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
886		   != CODE_FOR_nothing))
887	{
888	  if (GET_CODE (to) == REG)
889	    emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
890	  convert_move (gen_lowpart (word_mode, to), from, unsignedp);
891	  emit_unop_insn (code, to,
892			  gen_lowpart (word_mode, to), equiv_code);
893	  return;
894	}
895
896      /* No special multiword conversion insn; do it by hand.  */
897      start_sequence ();
898
899      /* Since we will turn this into a no conflict block, we must ensure
900	 that the source does not overlap the target.  */
901
902      if (reg_overlap_mentioned_p (to, from))
903	from = force_reg (from_mode, from);
904
905      /* Get a copy of FROM widened to a word, if necessary.  */
906      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
907	lowpart_mode = word_mode;
908      else
909	lowpart_mode = from_mode;
910
911      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
912
913      lowpart = gen_lowpart (lowpart_mode, to);
914      emit_move_insn (lowpart, lowfrom);
915
916      /* Compute the value to put in each remaining word.  */
917      if (unsignedp)
918	fill_value = const0_rtx;
919      else
920	{
921#ifdef HAVE_slt
922	  if (HAVE_slt
923	      && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
924	      && STORE_FLAG_VALUE == -1)
925	    {
926	      emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
927			     lowpart_mode, 0);
928	      fill_value = gen_reg_rtx (word_mode);
929	      emit_insn (gen_slt (fill_value));
930	    }
931	  else
932#endif
933	    {
934	      fill_value
935		= expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
936				size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
937				NULL_RTX, 0);
938	      fill_value = convert_to_mode (word_mode, fill_value, 1);
939	    }
940	}
941
942      /* Fill the remaining words.  */
943      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
944	{
945	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
946	  rtx subword = operand_subword (to, index, 1, to_mode);
947
948	  if (subword == 0)
949	    abort ();
950
951	  if (fill_value != subword)
952	    emit_move_insn (subword, fill_value);
953	}
954
955      insns = get_insns ();
956      end_sequence ();
957
958      emit_no_conflict_block (insns, to, from, NULL_RTX,
959			      gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
960      return;
961    }
962
963  /* Truncating multi-word to a word or less.  */
964  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
965      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
966    {
967      if (!((GET_CODE (from) == MEM
968	     && ! MEM_VOLATILE_P (from)
969	     && direct_load[(int) to_mode]
970	     && ! mode_dependent_address_p (XEXP (from, 0)))
971	    || GET_CODE (from) == REG
972	    || GET_CODE (from) == SUBREG))
973	from = force_reg (from_mode, from);
974      convert_move (to, gen_lowpart (word_mode, from), 0);
975      return;
976    }
977
978  /* Handle pointer conversion.  */			/* SPEE 900220.  */
979  if (to_mode == PQImode)
980    {
981      if (from_mode != QImode)
982	from = convert_to_mode (QImode, from, unsignedp);
983
984#ifdef HAVE_truncqipqi2
985      if (HAVE_truncqipqi2)
986	{
987	  emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
988	  return;
989	}
990#endif /* HAVE_truncqipqi2 */
991      abort ();
992    }
993
994  if (from_mode == PQImode)
995    {
996      if (to_mode != QImode)
997	{
998	  from = convert_to_mode (QImode, from, unsignedp);
999	  from_mode = QImode;
1000	}
1001      else
1002	{
1003#ifdef HAVE_extendpqiqi2
1004	  if (HAVE_extendpqiqi2)
1005	    {
1006	      emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1007	      return;
1008	    }
1009#endif /* HAVE_extendpqiqi2 */
1010	  abort ();
1011	}
1012    }
1013
1014  if (to_mode == PSImode)
1015    {
1016      if (from_mode != SImode)
1017	from = convert_to_mode (SImode, from, unsignedp);
1018
1019#ifdef HAVE_truncsipsi2
1020      if (HAVE_truncsipsi2)
1021	{
1022	  emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1023	  return;
1024	}
1025#endif /* HAVE_truncsipsi2 */
1026      abort ();
1027    }
1028
1029  if (from_mode == PSImode)
1030    {
1031      if (to_mode != SImode)
1032	{
1033	  from = convert_to_mode (SImode, from, unsignedp);
1034	  from_mode = SImode;
1035	}
1036      else
1037	{
1038#ifdef HAVE_extendpsisi2
1039	  if (! unsignedp && HAVE_extendpsisi2)
1040	    {
1041	      emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1042	      return;
1043	    }
1044#endif /* HAVE_extendpsisi2 */
1045#ifdef HAVE_zero_extendpsisi2
1046	  if (unsignedp && HAVE_zero_extendpsisi2)
1047	    {
1048	      emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1049	      return;
1050	    }
1051#endif /* HAVE_zero_extendpsisi2 */
1052	  abort ();
1053	}
1054    }
1055
1056  if (to_mode == PDImode)
1057    {
1058      if (from_mode != DImode)
1059	from = convert_to_mode (DImode, from, unsignedp);
1060
1061#ifdef HAVE_truncdipdi2
1062      if (HAVE_truncdipdi2)
1063	{
1064	  emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1065	  return;
1066	}
1067#endif /* HAVE_truncdipdi2 */
1068      abort ();
1069    }
1070
1071  if (from_mode == PDImode)
1072    {
1073      if (to_mode != DImode)
1074	{
1075	  from = convert_to_mode (DImode, from, unsignedp);
1076	  from_mode = DImode;
1077	}
1078      else
1079	{
1080#ifdef HAVE_extendpdidi2
1081	  if (HAVE_extendpdidi2)
1082	    {
1083	      emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1084	      return;
1085	    }
1086#endif /* HAVE_extendpdidi2 */
1087	  abort ();
1088	}
1089    }
1090
1091  /* Now follow all the conversions between integers
1092     no more than a word long.  */
1093
1094  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
1095  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1096      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1097				GET_MODE_BITSIZE (from_mode)))
1098    {
1099      if (!((GET_CODE (from) == MEM
1100	     && ! MEM_VOLATILE_P (from)
1101	     && direct_load[(int) to_mode]
1102	     && ! mode_dependent_address_p (XEXP (from, 0)))
1103	    || GET_CODE (from) == REG
1104	    || GET_CODE (from) == SUBREG))
1105	from = force_reg (from_mode, from);
1106      if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1107	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1108	from = copy_to_reg (from);
1109      emit_move_insn (to, gen_lowpart (to_mode, from));
1110      return;
1111    }
1112
1113  /* Handle extension.  */
1114  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1115    {
1116      /* Convert directly if that works.  */
1117      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1118	  != CODE_FOR_nothing)
1119	{
1120	  if (flag_force_mem)
1121	    from = force_not_mem (from);
1122
1123	  emit_unop_insn (code, to, from, equiv_code);
1124	  return;
1125	}
1126      else
1127	{
1128	  enum machine_mode intermediate;
1129	  rtx tmp;
1130	  tree shift_amount;
1131
1132	  /* Search for a mode to convert via.  */
1133	  for (intermediate = from_mode; intermediate != VOIDmode;
1134	       intermediate = GET_MODE_WIDER_MODE (intermediate))
1135	    if (((can_extend_p (to_mode, intermediate, unsignedp)
1136		  != CODE_FOR_nothing)
1137		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1138		     && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1139					       GET_MODE_BITSIZE (intermediate))))
1140		&& (can_extend_p (intermediate, from_mode, unsignedp)
1141		    != CODE_FOR_nothing))
1142	      {
1143		convert_move (to, convert_to_mode (intermediate, from,
1144						   unsignedp), unsignedp);
1145		return;
1146	      }
1147
1148	  /* No suitable intermediate mode.
1149	     Generate what we need with	shifts.  */
1150	  shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1151				      - GET_MODE_BITSIZE (from_mode), 0);
1152	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
1153	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1154			      to, unsignedp);
1155	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1156			      to, unsignedp);
1157	  if (tmp != to)
1158	    emit_move_insn (to, tmp);
1159	  return;
1160	}
1161    }
1162
1163  /* Support special truncate insns for certain modes.  */
1164
1165  if (from_mode == DImode && to_mode == SImode)
1166    {
1167#ifdef HAVE_truncdisi2
1168      if (HAVE_truncdisi2)
1169	{
1170	  emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1171	  return;
1172	}
1173#endif
1174      convert_move (to, force_reg (from_mode, from), unsignedp);
1175      return;
1176    }
1177
1178  if (from_mode == DImode && to_mode == HImode)
1179    {
1180#ifdef HAVE_truncdihi2
1181      if (HAVE_truncdihi2)
1182	{
1183	  emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1184	  return;
1185	}
1186#endif
1187      convert_move (to, force_reg (from_mode, from), unsignedp);
1188      return;
1189    }
1190
1191  if (from_mode == DImode && to_mode == QImode)
1192    {
1193#ifdef HAVE_truncdiqi2
1194      if (HAVE_truncdiqi2)
1195	{
1196	  emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1197	  return;
1198	}
1199#endif
1200      convert_move (to, force_reg (from_mode, from), unsignedp);
1201      return;
1202    }
1203
1204  if (from_mode == SImode && to_mode == HImode)
1205    {
1206#ifdef HAVE_truncsihi2
1207      if (HAVE_truncsihi2)
1208	{
1209	  emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1210	  return;
1211	}
1212#endif
1213      convert_move (to, force_reg (from_mode, from), unsignedp);
1214      return;
1215    }
1216
1217  if (from_mode == SImode && to_mode == QImode)
1218    {
1219#ifdef HAVE_truncsiqi2
1220      if (HAVE_truncsiqi2)
1221	{
1222	  emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1223	  return;
1224	}
1225#endif
1226      convert_move (to, force_reg (from_mode, from), unsignedp);
1227      return;
1228    }
1229
1230  if (from_mode == HImode && to_mode == QImode)
1231    {
1232#ifdef HAVE_trunchiqi2
1233      if (HAVE_trunchiqi2)
1234	{
1235	  emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1236	  return;
1237	}
1238#endif
1239      convert_move (to, force_reg (from_mode, from), unsignedp);
1240      return;
1241    }
1242
1243  if (from_mode == TImode && to_mode == DImode)
1244    {
1245#ifdef HAVE_trunctidi2
1246      if (HAVE_trunctidi2)
1247	{
1248	  emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1249	  return;
1250	}
1251#endif
1252      convert_move (to, force_reg (from_mode, from), unsignedp);
1253      return;
1254    }
1255
1256  if (from_mode == TImode && to_mode == SImode)
1257    {
1258#ifdef HAVE_trunctisi2
1259      if (HAVE_trunctisi2)
1260	{
1261	  emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1262	  return;
1263	}
1264#endif
1265      convert_move (to, force_reg (from_mode, from), unsignedp);
1266      return;
1267    }
1268
1269  if (from_mode == TImode && to_mode == HImode)
1270    {
1271#ifdef HAVE_trunctihi2
1272      if (HAVE_trunctihi2)
1273	{
1274	  emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1275	  return;
1276	}
1277#endif
1278      convert_move (to, force_reg (from_mode, from), unsignedp);
1279      return;
1280    }
1281
1282  if (from_mode == TImode && to_mode == QImode)
1283    {
1284#ifdef HAVE_trunctiqi2
1285      if (HAVE_trunctiqi2)
1286	{
1287	  emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1288	  return;
1289	}
1290#endif
1291      convert_move (to, force_reg (from_mode, from), unsignedp);
1292      return;
1293    }
1294
1295  /* Handle truncation of volatile memrefs, and so on;
1296     the things that couldn't be truncated directly,
1297     and for which there was no special instruction.  */
1298  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1299    {
1300      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1301      emit_move_insn (to, temp);
1302      return;
1303    }
1304
1305  /* Mode combination is not recognized.  */
1306  abort ();
1307}
1308
1309/* Return an rtx for a value that would result
1310   from converting X to mode MODE.
1311   Both X and MODE may be floating, or both integer.
1312   UNSIGNEDP is nonzero if X is an unsigned value.
1313   This can be done by referring to a part of X in place
1314   or by copying to a new temporary with conversion.
1315
1316   This function *must not* call protect_from_queue
1317   except when putting X into an insn (in which case convert_move does it).  */
1318
1319rtx
1320convert_to_mode (mode, x, unsignedp)
1321     enum machine_mode mode;
1322     rtx x;
1323     int unsignedp;
1324{
1325  return convert_modes (mode, VOIDmode, x, unsignedp);
1326}
1327
1328/* Return an rtx for a value that would result
1329   from converting X from mode OLDMODE to mode MODE.
1330   Both modes may be floating, or both integer.
1331   UNSIGNEDP is nonzero if X is an unsigned value.
1332
1333   This can be done by referring to a part of X in place
1334   or by copying to a new temporary with conversion.
1335
1336   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1337
1338   This function *must not* call protect_from_queue
1339   except when putting X into an insn (in which case convert_move does it).  */
1340
1341rtx
1342convert_modes (mode, oldmode, x, unsignedp)
1343     enum machine_mode mode, oldmode;
1344     rtx x;
1345     int unsignedp;
1346{
1347  rtx temp;
1348
1349  /* If FROM is a SUBREG that indicates that we have already done at least
1350     the required extension, strip it.  */
1351
1352  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1353      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1354      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1355    x = gen_lowpart (mode, x);
1356
1357  if (GET_MODE (x) != VOIDmode)
1358    oldmode = GET_MODE (x);
1359
1360  if (mode == oldmode)
1361    return x;
1362
1363  /* There is one case that we must handle specially: If we are converting
1364     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1365     we are to interpret the constant as unsigned, gen_lowpart will do
1366     the wrong if the constant appears negative.  What we want to do is
1367     make the high-order word of the constant zero, not all ones.  */
1368
1369  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1370      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1371      && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1372    {
1373      HOST_WIDE_INT val = INTVAL (x);
1374
1375      if (oldmode != VOIDmode
1376	  && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1377	{
1378	  int width = GET_MODE_BITSIZE (oldmode);
1379
1380	  /* We need to zero extend VAL.  */
1381	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1382	}
1383
1384      return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1385    }
1386
1387  /* We can do this with a gen_lowpart if both desired and current modes
1388     are integer, and this is either a constant integer, a register, or a
1389     non-volatile MEM.  Except for the constant case where MODE is no
1390     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
1391
1392  if ((GET_CODE (x) == CONST_INT
1393       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1394      || (GET_MODE_CLASS (mode) == MODE_INT
1395	  && GET_MODE_CLASS (oldmode) == MODE_INT
1396	  && (GET_CODE (x) == CONST_DOUBLE
1397	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1398		  && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1399		       && direct_load[(int) mode])
1400		      || (GET_CODE (x) == REG
1401			  && (! HARD_REGISTER_P (x)
1402			      || HARD_REGNO_MODE_OK (REGNO (x), mode))
1403			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1404						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
1405    {
1406      /* ?? If we don't know OLDMODE, we have to assume here that
1407	 X does not need sign- or zero-extension.   This may not be
1408	 the case, but it's the best we can do.  */
1409      if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1410	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1411	{
1412	  HOST_WIDE_INT val = INTVAL (x);
1413	  int width = GET_MODE_BITSIZE (oldmode);
1414
1415	  /* We must sign or zero-extend in this case.  Start by
1416	     zero-extending, then sign extend if we need to.  */
1417	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1418	  if (! unsignedp
1419	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1420	    val |= (HOST_WIDE_INT) (-1) << width;
1421
1422	  return gen_int_mode (val, mode);
1423	}
1424
1425      return gen_lowpart (mode, x);
1426    }
1427
1428  /* Converting from integer constant into mode is always equivalent to an
1429     subreg operation.  */
1430  if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1431    {
1432      if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1433	abort ();
1434      return simplify_gen_subreg (mode, x, oldmode, 0);
1435    }
1436
1437  temp = gen_reg_rtx (mode);
1438  convert_move (temp, x, unsignedp);
1439  return temp;
1440}
1441
1442/* This macro is used to determine what the largest unit size that
1443   move_by_pieces can use is.  */
1444
1445/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1446   move efficiently, as opposed to  MOVE_MAX which is the maximum
1447   number of bytes we can move with a single instruction.  */
1448
1449#ifndef MOVE_MAX_PIECES
1450#define MOVE_MAX_PIECES   MOVE_MAX
1451#endif
1452
1453/* STORE_MAX_PIECES is the number of bytes at a time that we can
1454   store efficiently.  Due to internal GCC limitations, this is
1455   MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1456   for an immediate constant.  */
1457
1458#define STORE_MAX_PIECES  MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1459
1460/* Generate several move instructions to copy LEN bytes from block FROM to
1461   block TO.  (These are MEM rtx's with BLKmode).  The caller must pass FROM
1462   and TO through protect_from_queue before calling.
1463
1464   If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1465   used to push FROM to the stack.
1466
1467   ALIGN is maximum alignment we can assume.  */
1468
1469void
1470move_by_pieces (to, from, len, align)
1471     rtx to, from;
1472     unsigned HOST_WIDE_INT len;
1473     unsigned int align;
1474{
1475  struct move_by_pieces data;
1476  rtx to_addr, from_addr = XEXP (from, 0);
1477  unsigned int max_size = MOVE_MAX_PIECES + 1;
1478  enum machine_mode mode = VOIDmode, tmode;
1479  enum insn_code icode;
1480
1481  data.offset = 0;
1482  data.from_addr = from_addr;
1483  if (to)
1484    {
1485      to_addr = XEXP (to, 0);
1486      data.to = to;
1487      data.autinc_to
1488	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1489	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1490      data.reverse
1491	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1492    }
1493  else
1494    {
1495      to_addr = NULL_RTX;
1496      data.to = NULL_RTX;
1497      data.autinc_to = 1;
1498#ifdef STACK_GROWS_DOWNWARD
1499      data.reverse = 1;
1500#else
1501      data.reverse = 0;
1502#endif
1503    }
1504  data.to_addr = to_addr;
1505  data.from = from;
1506  data.autinc_from
1507    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1508       || GET_CODE (from_addr) == POST_INC
1509       || GET_CODE (from_addr) == POST_DEC);
1510
1511  data.explicit_inc_from = 0;
1512  data.explicit_inc_to = 0;
1513  if (data.reverse) data.offset = len;
1514  data.len = len;
1515
1516  /* If copying requires more than two move insns,
1517     copy addresses to registers (to make displacements shorter)
1518     and use post-increment if available.  */
1519  if (!(data.autinc_from && data.autinc_to)
1520      && move_by_pieces_ninsns (len, align) > 2)
1521    {
1522      /* Find the mode of the largest move...  */
1523      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1524	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1525	if (GET_MODE_SIZE (tmode) < max_size)
1526	  mode = tmode;
1527
1528      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1529	{
1530	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1531	  data.autinc_from = 1;
1532	  data.explicit_inc_from = -1;
1533	}
1534      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1535	{
1536	  data.from_addr = copy_addr_to_reg (from_addr);
1537	  data.autinc_from = 1;
1538	  data.explicit_inc_from = 1;
1539	}
1540      if (!data.autinc_from && CONSTANT_P (from_addr))
1541	data.from_addr = copy_addr_to_reg (from_addr);
1542      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1543	{
1544	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1545	  data.autinc_to = 1;
1546	  data.explicit_inc_to = -1;
1547	}
1548      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1549	{
1550	  data.to_addr = copy_addr_to_reg (to_addr);
1551	  data.autinc_to = 1;
1552	  data.explicit_inc_to = 1;
1553	}
1554      if (!data.autinc_to && CONSTANT_P (to_addr))
1555	data.to_addr = copy_addr_to_reg (to_addr);
1556    }
1557
1558  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1559      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1560    align = MOVE_MAX * BITS_PER_UNIT;
1561
1562  /* First move what we can in the largest integer mode, then go to
1563     successively smaller modes.  */
1564
1565  while (max_size > 1)
1566    {
1567      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1568	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1569	if (GET_MODE_SIZE (tmode) < max_size)
1570	  mode = tmode;
1571
1572      if (mode == VOIDmode)
1573	break;
1574
1575      icode = mov_optab->handlers[(int) mode].insn_code;
1576      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1577	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1578
1579      max_size = GET_MODE_SIZE (mode);
1580    }
1581
1582  /* The code above should have handled everything.  */
1583  if (data.len > 0)
1584    abort ();
1585}
1586
1587/* Return number of insns required to move L bytes by pieces.
1588   ALIGN (in bits) is maximum alignment we can assume.  */
1589
1590static unsigned HOST_WIDE_INT
1591move_by_pieces_ninsns (l, align)
1592     unsigned HOST_WIDE_INT l;
1593     unsigned int align;
1594{
1595  unsigned HOST_WIDE_INT n_insns = 0;
1596  unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1597
1598  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1599      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1600    align = MOVE_MAX * BITS_PER_UNIT;
1601
1602  while (max_size > 1)
1603    {
1604      enum machine_mode mode = VOIDmode, tmode;
1605      enum insn_code icode;
1606
1607      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1608	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1609	if (GET_MODE_SIZE (tmode) < max_size)
1610	  mode = tmode;
1611
1612      if (mode == VOIDmode)
1613	break;
1614
1615      icode = mov_optab->handlers[(int) mode].insn_code;
1616      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1617	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1618
1619      max_size = GET_MODE_SIZE (mode);
1620    }
1621
1622  if (l)
1623    abort ();
1624  return n_insns;
1625}
1626
1627/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1628   with move instructions for mode MODE.  GENFUN is the gen_... function
1629   to make a move insn for that mode.  DATA has all the other info.  */
1630
1631static void
1632move_by_pieces_1 (genfun, mode, data)
1633     rtx (*genfun) PARAMS ((rtx, ...));
1634     enum machine_mode mode;
1635     struct move_by_pieces *data;
1636{
1637  unsigned int size = GET_MODE_SIZE (mode);
1638  rtx to1 = NULL_RTX, from1;
1639
1640  while (data->len >= size)
1641    {
1642      if (data->reverse)
1643	data->offset -= size;
1644
1645      if (data->to)
1646	{
1647	  if (data->autinc_to)
1648	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1649					     data->offset);
1650	  else
1651	    to1 = adjust_address (data->to, mode, data->offset);
1652	}
1653
1654      if (data->autinc_from)
1655	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1656					   data->offset);
1657      else
1658	from1 = adjust_address (data->from, mode, data->offset);
1659
1660      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1661	emit_insn (gen_add2_insn (data->to_addr,
1662				  GEN_INT (-(HOST_WIDE_INT)size)));
1663      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1664	emit_insn (gen_add2_insn (data->from_addr,
1665				  GEN_INT (-(HOST_WIDE_INT)size)));
1666
1667      if (data->to)
1668	emit_insn ((*genfun) (to1, from1));
1669      else
1670	{
1671#ifdef PUSH_ROUNDING
1672	  emit_single_push_insn (mode, from1, NULL);
1673#else
1674	  abort ();
1675#endif
1676	}
1677
1678      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1679	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1680      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1681	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1682
1683      if (! data->reverse)
1684	data->offset += size;
1685
1686      data->len -= size;
1687    }
1688}
1689
1690/* Emit code to move a block Y to a block X.  This may be done with
1691   string-move instructions, with multiple scalar move instructions,
1692   or with a library call.
1693
1694   Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1695   SIZE is an rtx that says how long they are.
1696   ALIGN is the maximum alignment we can assume they have.
1697   METHOD describes what kind of copy this is, and what mechanisms may be used.
1698
1699   Return the address of the new block, if memcpy is called and returns it,
1700   0 otherwise.  */
1701
1702rtx
1703emit_block_move (x, y, size, method)
1704     rtx x, y, size;
1705     enum block_op_methods method;
1706{
1707  bool may_use_call;
1708  rtx retval = 0;
1709  unsigned int align;
1710
1711  switch (method)
1712    {
1713    case BLOCK_OP_NORMAL:
1714      may_use_call = true;
1715      break;
1716
1717    case BLOCK_OP_CALL_PARM:
1718      may_use_call = block_move_libcall_safe_for_call_parm ();
1719
1720      /* Make inhibit_defer_pop nonzero around the library call
1721	 to force it to pop the arguments right away.  */
1722      NO_DEFER_POP;
1723      break;
1724
1725    case BLOCK_OP_NO_LIBCALL:
1726      may_use_call = false;
1727      break;
1728
1729    default:
1730      abort ();
1731    }
1732
1733  align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1734
1735  if (GET_MODE (x) != BLKmode)
1736    abort ();
1737  if (GET_MODE (y) != BLKmode)
1738    abort ();
1739
1740  x = protect_from_queue (x, 1);
1741  y = protect_from_queue (y, 0);
1742  size = protect_from_queue (size, 0);
1743
1744  if (GET_CODE (x) != MEM)
1745    abort ();
1746  if (GET_CODE (y) != MEM)
1747    abort ();
1748  if (size == 0)
1749    abort ();
1750
1751  /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1752     can be incorrect is coming from __builtin_memcpy.  */
1753  if (GET_CODE (size) == CONST_INT)
1754    {
1755      x = shallow_copy_rtx (x);
1756      y = shallow_copy_rtx (y);
1757      set_mem_size (x, size);
1758      set_mem_size (y, size);
1759    }
1760
1761  if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1762    move_by_pieces (x, y, INTVAL (size), align);
1763  else if (emit_block_move_via_movstr (x, y, size, align))
1764    ;
1765  else if (may_use_call)
1766    retval = emit_block_move_via_libcall (x, y, size);
1767  else
1768    emit_block_move_via_loop (x, y, size, align);
1769
1770  if (method == BLOCK_OP_CALL_PARM)
1771    OK_DEFER_POP;
1772
1773  return retval;
1774}
1775
1776/* A subroutine of emit_block_move.  Returns true if calling the
1777   block move libcall will not clobber any parameters which may have
1778   already been placed on the stack.  */
1779
1780static bool
1781block_move_libcall_safe_for_call_parm ()
1782{
1783  if (PUSH_ARGS)
1784    return true;
1785  else
1786    {
1787      /* Check to see whether memcpy takes all register arguments.  */
1788      static enum {
1789	takes_regs_uninit, takes_regs_no, takes_regs_yes
1790      } takes_regs = takes_regs_uninit;
1791
1792      switch (takes_regs)
1793	{
1794	case takes_regs_uninit:
1795	  {
1796	    CUMULATIVE_ARGS args_so_far;
1797	    tree fn, arg;
1798
1799	    fn = emit_block_move_libcall_fn (false);
1800	    INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1801
1802	    arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1803	    for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1804	      {
1805		enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1806		rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1807		if (!tmp || !REG_P (tmp))
1808		  goto fail_takes_regs;
1809#ifdef FUNCTION_ARG_PARTIAL_NREGS
1810		if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1811						NULL_TREE, 1))
1812		  goto fail_takes_regs;
1813#endif
1814		FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1815	      }
1816	  }
1817	  takes_regs = takes_regs_yes;
1818	  /* FALLTHRU */
1819
1820	case takes_regs_yes:
1821	  return true;
1822
1823	fail_takes_regs:
1824	  takes_regs = takes_regs_no;
1825	  /* FALLTHRU */
1826	case takes_regs_no:
1827	  return false;
1828
1829	default:
1830	  abort ();
1831	}
1832    }
1833}
1834
1835/* A subroutine of emit_block_move.  Expand a movstr pattern;
1836   return true if successful.  */
1837
1838static bool
1839emit_block_move_via_movstr (x, y, size, align)
1840     rtx x, y, size;
1841     unsigned int align;
1842{
1843  /* Try the most limited insn first, because there's no point
1844     including more than one in the machine description unless
1845     the more limited one has some advantage.  */
1846
1847  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1848  enum machine_mode mode;
1849
1850  /* Since this is a move insn, we don't care about volatility.  */
1851  volatile_ok = 1;
1852
1853  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1854       mode = GET_MODE_WIDER_MODE (mode))
1855    {
1856      enum insn_code code = movstr_optab[(int) mode];
1857      insn_operand_predicate_fn pred;
1858
1859      if (code != CODE_FOR_nothing
1860	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1861	     here because if SIZE is less than the mode mask, as it is
1862	     returned by the macro, it will definitely be less than the
1863	     actual mode mask.  */
1864	  && ((GET_CODE (size) == CONST_INT
1865	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1866		   <= (GET_MODE_MASK (mode) >> 1)))
1867	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1868	  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1869	      || (*pred) (x, BLKmode))
1870	  && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1871	      || (*pred) (y, BLKmode))
1872	  && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1873	      || (*pred) (opalign, VOIDmode)))
1874	{
1875	  rtx op2;
1876	  rtx last = get_last_insn ();
1877	  rtx pat;
1878
1879	  op2 = convert_to_mode (mode, size, 1);
1880	  pred = insn_data[(int) code].operand[2].predicate;
1881	  if (pred != 0 && ! (*pred) (op2, mode))
1882	    op2 = copy_to_mode_reg (mode, op2);
1883
1884	  /* ??? When called via emit_block_move_for_call, it'd be
1885	     nice if there were some way to inform the backend, so
1886	     that it doesn't fail the expansion because it thinks
1887	     emitting the libcall would be more efficient.  */
1888
1889	  pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1890	  if (pat)
1891	    {
1892	      emit_insn (pat);
1893	      volatile_ok = 0;
1894	      return true;
1895	    }
1896	  else
1897	    delete_insns_since (last);
1898	}
1899    }
1900
1901  volatile_ok = 0;
1902  return false;
1903}
1904
1905/* A subroutine of emit_block_move.  Expand a call to memcpy or bcopy.
1906   Return the return value from memcpy, 0 otherwise.  */
1907
1908static rtx
1909emit_block_move_via_libcall (dst, src, size)
1910     rtx dst, src, size;
1911{
1912  tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1913  enum machine_mode size_mode;
1914  rtx retval;
1915
1916  /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1917
1918     It is unsafe to save the value generated by protect_from_queue
1919     and reuse it later.  Consider what happens if emit_queue is
1920     called before the return value from protect_from_queue is used.
1921
1922     Expansion of the CALL_EXPR below will call emit_queue before
1923     we are finished emitting RTL for argument setup.  So if we are
1924     not careful we could get the wrong value for an argument.
1925
1926     To avoid this problem we go ahead and emit code to copy X, Y &
1927     SIZE into new pseudos.  We can then place those new pseudos
1928     into an RTL_EXPR and use them later, even after a call to
1929     emit_queue.
1930
1931     Note this is not strictly needed for library calls since they
1932     do not call emit_queue before loading their arguments.  However,
1933     we may need to have library calls call emit_queue in the future
1934     since failing to do so could cause problems for targets which
1935     define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
1936
1937  dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1938  src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1939
1940  if (TARGET_MEM_FUNCTIONS)
1941    size_mode = TYPE_MODE (sizetype);
1942  else
1943    size_mode = TYPE_MODE (unsigned_type_node);
1944  size = convert_to_mode (size_mode, size, 1);
1945  size = copy_to_mode_reg (size_mode, size);
1946
1947  /* It is incorrect to use the libcall calling conventions to call
1948     memcpy in this context.  This could be a user call to memcpy and
1949     the user may wish to examine the return value from memcpy.  For
1950     targets where libcalls and normal calls have different conventions
1951     for returning pointers, we could end up generating incorrect code.
1952
1953     For convenience, we generate the call to bcopy this way as well.  */
1954
1955  dst_tree = make_tree (ptr_type_node, dst);
1956  src_tree = make_tree (ptr_type_node, src);
1957  if (TARGET_MEM_FUNCTIONS)
1958    size_tree = make_tree (sizetype, size);
1959  else
1960    size_tree = make_tree (unsigned_type_node, size);
1961
1962  fn = emit_block_move_libcall_fn (true);
1963  arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1964  if (TARGET_MEM_FUNCTIONS)
1965    {
1966      arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1967      arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1968    }
1969  else
1970    {
1971      arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1972      arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1973    }
1974
1975  /* Now we have to build up the CALL_EXPR itself.  */
1976  call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1977  call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1978		     call_expr, arg_list, NULL_TREE);
1979  TREE_SIDE_EFFECTS (call_expr) = 1;
1980
1981  retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1982
1983  /* If we are initializing a readonly value, show the above call
1984     clobbered it.  Otherwise, a load from it may erroneously be
1985     hoisted from a loop.  */
1986  if (RTX_UNCHANGING_P (dst))
1987    emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1988
1989  return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1990}
1991
1992/* A subroutine of emit_block_move_via_libcall.  Create the tree node
1993   for the function we use for block copies.  The first time FOR_CALL
1994   is true, we call assemble_external.  */
1995
1996static GTY(()) tree block_move_fn;
1997
1998static tree
1999emit_block_move_libcall_fn (for_call)
2000      int for_call;
2001{
2002  static bool emitted_extern;
2003  tree fn = block_move_fn, args;
2004
2005  if (!fn)
2006    {
2007      if (TARGET_MEM_FUNCTIONS)
2008	{
2009	  fn = get_identifier ("memcpy");
2010	  args = build_function_type_list (ptr_type_node, ptr_type_node,
2011					   const_ptr_type_node, sizetype,
2012					   NULL_TREE);
2013	}
2014      else
2015	{
2016	  fn = get_identifier ("bcopy");
2017	  args = build_function_type_list (void_type_node, const_ptr_type_node,
2018					   ptr_type_node, unsigned_type_node,
2019					   NULL_TREE);
2020	}
2021
2022      fn = build_decl (FUNCTION_DECL, fn, args);
2023      DECL_EXTERNAL (fn) = 1;
2024      TREE_PUBLIC (fn) = 1;
2025      DECL_ARTIFICIAL (fn) = 1;
2026      TREE_NOTHROW (fn) = 1;
2027
2028      block_move_fn = fn;
2029    }
2030
2031  if (for_call && !emitted_extern)
2032    {
2033      emitted_extern = true;
2034      make_decl_rtl (fn, NULL);
2035      assemble_external (fn);
2036    }
2037
2038  return fn;
2039}
2040
2041/* A subroutine of emit_block_move.  Copy the data via an explicit
2042   loop.  This is used only when libcalls are forbidden.  */
2043/* ??? It'd be nice to copy in hunks larger than QImode.  */
2044
2045static void
2046emit_block_move_via_loop (x, y, size, align)
2047     rtx x, y, size;
2048     unsigned int align ATTRIBUTE_UNUSED;
2049{
2050  rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2051  enum machine_mode iter_mode;
2052
2053  iter_mode = GET_MODE (size);
2054  if (iter_mode == VOIDmode)
2055    iter_mode = word_mode;
2056
2057  top_label = gen_label_rtx ();
2058  cmp_label = gen_label_rtx ();
2059  iter = gen_reg_rtx (iter_mode);
2060
2061  emit_move_insn (iter, const0_rtx);
2062
2063  x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2064  y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2065  do_pending_stack_adjust ();
2066
2067  emit_note (NULL, NOTE_INSN_LOOP_BEG);
2068
2069  emit_jump (cmp_label);
2070  emit_label (top_label);
2071
2072  tmp = convert_modes (Pmode, iter_mode, iter, true);
2073  x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2074  y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2075  x = change_address (x, QImode, x_addr);
2076  y = change_address (y, QImode, y_addr);
2077
2078  emit_move_insn (x, y);
2079
2080  tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2081			     true, OPTAB_LIB_WIDEN);
2082  if (tmp != iter)
2083    emit_move_insn (iter, tmp);
2084
2085  emit_note (NULL, NOTE_INSN_LOOP_CONT);
2086  emit_label (cmp_label);
2087
2088  emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2089			   true, top_label);
2090
2091  emit_note (NULL, NOTE_INSN_LOOP_END);
2092}
2093
2094/* Copy all or part of a value X into registers starting at REGNO.
2095   The number of registers to be filled is NREGS.  */
2096
2097void
2098move_block_to_reg (regno, x, nregs, mode)
2099     int regno;
2100     rtx x;
2101     int nregs;
2102     enum machine_mode mode;
2103{
2104  int i;
2105#ifdef HAVE_load_multiple
2106  rtx pat;
2107  rtx last;
2108#endif
2109
2110  if (nregs == 0)
2111    return;
2112
2113  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2114    x = validize_mem (force_const_mem (mode, x));
2115
2116  /* See if the machine can do this with a load multiple insn.  */
2117#ifdef HAVE_load_multiple
2118  if (HAVE_load_multiple)
2119    {
2120      last = get_last_insn ();
2121      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2122			       GEN_INT (nregs));
2123      if (pat)
2124	{
2125	  emit_insn (pat);
2126	  return;
2127	}
2128      else
2129	delete_insns_since (last);
2130    }
2131#endif
2132
2133  for (i = 0; i < nregs; i++)
2134    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2135		    operand_subword_force (x, i, mode));
2136}
2137
2138/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2139   The number of registers to be filled is NREGS.  SIZE indicates the number
2140   of bytes in the object X.  */
2141
2142void
2143move_block_from_reg (regno, x, nregs, size)
2144     int regno;
2145     rtx x;
2146     int nregs;
2147     int size;
2148{
2149  int i;
2150#ifdef HAVE_store_multiple
2151  rtx pat;
2152  rtx last;
2153#endif
2154  enum machine_mode mode;
2155
2156  if (nregs == 0)
2157    return;
2158
2159  /* If SIZE is that of a mode no bigger than a word, just use that
2160     mode's store operation.  */
2161  if (size <= UNITS_PER_WORD
2162      && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2163    {
2164      emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2165      return;
2166    }
2167
2168  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2169     to the left before storing to memory.  Note that the previous test
2170     doesn't handle all cases (e.g. SIZE == 3).  */
2171  if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2172    {
2173      rtx tem = operand_subword (x, 0, 1, BLKmode);
2174      rtx shift;
2175
2176      if (tem == 0)
2177	abort ();
2178
2179      shift = expand_shift (LSHIFT_EXPR, word_mode,
2180			    gen_rtx_REG (word_mode, regno),
2181			    build_int_2 ((UNITS_PER_WORD - size)
2182					 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2183      emit_move_insn (tem, shift);
2184      return;
2185    }
2186
2187  /* See if the machine can do this with a store multiple insn.  */
2188#ifdef HAVE_store_multiple
2189  if (HAVE_store_multiple)
2190    {
2191      last = get_last_insn ();
2192      pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2193				GEN_INT (nregs));
2194      if (pat)
2195	{
2196	  emit_insn (pat);
2197	  return;
2198	}
2199      else
2200	delete_insns_since (last);
2201    }
2202#endif
2203
2204  for (i = 0; i < nregs; i++)
2205    {
2206      rtx tem = operand_subword (x, i, 1, BLKmode);
2207
2208      if (tem == 0)
2209	abort ();
2210
2211      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2212    }
2213}
2214
2215/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2216   ORIG, where ORIG is a non-consecutive group of registers represented by
2217   a PARALLEL.  The clone is identical to the original except in that the
2218   original set of registers is replaced by a new set of pseudo registers.
2219   The new set has the same modes as the original set.  */
2220
2221rtx
2222gen_group_rtx (orig)
2223     rtx orig;
2224{
2225  int i, length;
2226  rtx *tmps;
2227
2228  if (GET_CODE (orig) != PARALLEL)
2229    abort ();
2230
2231  length = XVECLEN (orig, 0);
2232  tmps = (rtx *) alloca (sizeof (rtx) * length);
2233
2234  /* Skip a NULL entry in first slot.  */
2235  i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2236
2237  if (i)
2238    tmps[0] = 0;
2239
2240  for (; i < length; i++)
2241    {
2242      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2243      rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2244
2245      tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2246    }
2247
2248  return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2249}
2250
2251/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2252   registers represented by a PARALLEL.  SSIZE represents the total size of
2253   block SRC in bytes, or -1 if not known.  */
2254/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2255   the balance will be in what would be the low-order memory addresses, i.e.
2256   left justified for big endian, right justified for little endian.  This
2257   happens to be true for the targets currently using this support.  If this
2258   ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2259   would be needed.  */
2260
2261void
2262emit_group_load (dst, orig_src, ssize)
2263     rtx dst, orig_src;
2264     int ssize;
2265{
2266  rtx *tmps, src;
2267  int start, i;
2268
2269  if (GET_CODE (dst) != PARALLEL)
2270    abort ();
2271
2272  /* Check for a NULL entry, used to indicate that the parameter goes
2273     both on the stack and in registers.  */
2274  if (XEXP (XVECEXP (dst, 0, 0), 0))
2275    start = 0;
2276  else
2277    start = 1;
2278
2279  tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2280
2281  /* Process the pieces.  */
2282  for (i = start; i < XVECLEN (dst, 0); i++)
2283    {
2284      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2285      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2286      unsigned int bytelen = GET_MODE_SIZE (mode);
2287      int shift = 0;
2288
2289      /* Handle trailing fragments that run over the size of the struct.  */
2290      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2291	{
2292	  shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2293	  bytelen = ssize - bytepos;
2294	  if (bytelen <= 0)
2295	    abort ();
2296	}
2297
2298      /* If we won't be loading directly from memory, protect the real source
2299	 from strange tricks we might play; but make sure that the source can
2300	 be loaded directly into the destination.  */
2301      src = orig_src;
2302      if (GET_CODE (orig_src) != MEM
2303	  && (!CONSTANT_P (orig_src)
2304	      || (GET_MODE (orig_src) != mode
2305		  && GET_MODE (orig_src) != VOIDmode)))
2306	{
2307	  if (GET_MODE (orig_src) == VOIDmode)
2308	    src = gen_reg_rtx (mode);
2309	  else
2310	    src = gen_reg_rtx (GET_MODE (orig_src));
2311
2312	  emit_move_insn (src, orig_src);
2313	}
2314
2315      /* Optimize the access just a bit.  */
2316      if (GET_CODE (src) == MEM
2317	  && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2318	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2319	  && bytelen == GET_MODE_SIZE (mode))
2320	{
2321	  tmps[i] = gen_reg_rtx (mode);
2322	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2323	}
2324      else if (GET_CODE (src) == CONCAT)
2325	{
2326	  unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2327	  unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2328
2329	  if ((bytepos == 0 && bytelen == slen0)
2330	      || (bytepos != 0 && bytepos + bytelen <= slen))
2331	    {
2332	      /* The following assumes that the concatenated objects all
2333		 have the same size.  In this case, a simple calculation
2334		 can be used to determine the object and the bit field
2335		 to be extracted.  */
2336	      tmps[i] = XEXP (src, bytepos / slen0);
2337	      if (! CONSTANT_P (tmps[i])
2338		  && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2339		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2340					     (bytepos % slen0) * BITS_PER_UNIT,
2341					     1, NULL_RTX, mode, mode, ssize);
2342	    }
2343	  else if (bytepos == 0)
2344	    {
2345	      rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2346	      emit_move_insn (mem, src);
2347	      tmps[i] = adjust_address (mem, mode, 0);
2348	    }
2349	  else
2350	    abort ();
2351	}
2352      else if (CONSTANT_P (src)
2353	       || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2354	tmps[i] = src;
2355      else
2356	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2357				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2358				     mode, mode, ssize);
2359
2360      if (BYTES_BIG_ENDIAN && shift)
2361	expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2362		      tmps[i], 0, OPTAB_WIDEN);
2363    }
2364
2365  emit_queue ();
2366
2367  /* Copy the extracted pieces into the proper (probable) hard regs.  */
2368  for (i = start; i < XVECLEN (dst, 0); i++)
2369    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2370}
2371
2372/* Emit code to move a block SRC to block DST, where SRC and DST are
2373   non-consecutive groups of registers, each represented by a PARALLEL.  */
2374
2375void
2376emit_group_move (dst, src)
2377     rtx dst, src;
2378{
2379  int i;
2380
2381  if (GET_CODE (src) != PARALLEL
2382      || GET_CODE (dst) != PARALLEL
2383      || XVECLEN (src, 0) != XVECLEN (dst, 0))
2384    abort ();
2385
2386  /* Skip first entry if NULL.  */
2387  for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2388    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2389		    XEXP (XVECEXP (src, 0, i), 0));
2390}
2391
2392/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2393   registers represented by a PARALLEL.  SSIZE represents the total size of
2394   block DST, or -1 if not known.  */
2395
2396void
2397emit_group_store (orig_dst, src, ssize)
2398     rtx orig_dst, src;
2399     int ssize;
2400{
2401  rtx *tmps, dst;
2402  int start, i;
2403
2404  if (GET_CODE (src) != PARALLEL)
2405    abort ();
2406
2407  /* Check for a NULL entry, used to indicate that the parameter goes
2408     both on the stack and in registers.  */
2409  if (XEXP (XVECEXP (src, 0, 0), 0))
2410    start = 0;
2411  else
2412    start = 1;
2413
2414  tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2415
2416  /* Copy the (probable) hard regs into pseudos.  */
2417  for (i = start; i < XVECLEN (src, 0); i++)
2418    {
2419      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2420      tmps[i] = gen_reg_rtx (GET_MODE (reg));
2421      emit_move_insn (tmps[i], reg);
2422    }
2423  emit_queue ();
2424
2425  /* If we won't be storing directly into memory, protect the real destination
2426     from strange tricks we might play.  */
2427  dst = orig_dst;
2428  if (GET_CODE (dst) == PARALLEL)
2429    {
2430      rtx temp;
2431
2432      /* We can get a PARALLEL dst if there is a conditional expression in
2433	 a return statement.  In that case, the dst and src are the same,
2434	 so no action is necessary.  */
2435      if (rtx_equal_p (dst, src))
2436	return;
2437
2438      /* It is unclear if we can ever reach here, but we may as well handle
2439	 it.  Allocate a temporary, and split this into a store/load to/from
2440	 the temporary.  */
2441
2442      temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2443      emit_group_store (temp, src, ssize);
2444      emit_group_load (dst, temp, ssize);
2445      return;
2446    }
2447  else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2448    {
2449      dst = gen_reg_rtx (GET_MODE (orig_dst));
2450      /* Make life a bit easier for combine.  */
2451      emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2452    }
2453
2454  /* Process the pieces.  */
2455  for (i = start; i < XVECLEN (src, 0); i++)
2456    {
2457      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2458      enum machine_mode mode = GET_MODE (tmps[i]);
2459      unsigned int bytelen = GET_MODE_SIZE (mode);
2460      rtx dest = dst;
2461
2462      /* Handle trailing fragments that run over the size of the struct.  */
2463      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2464	{
2465	  if (BYTES_BIG_ENDIAN)
2466	    {
2467	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2468	      expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2469			    tmps[i], 0, OPTAB_WIDEN);
2470	    }
2471	  bytelen = ssize - bytepos;
2472	}
2473
2474      if (GET_CODE (dst) == CONCAT)
2475	{
2476	  if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2477	    dest = XEXP (dst, 0);
2478	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2479	    {
2480	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2481	      dest = XEXP (dst, 1);
2482	    }
2483	  else if (bytepos == 0 && XVECLEN (src, 0))
2484	    {
2485	      dest = assign_stack_temp (GET_MODE (dest),
2486				        GET_MODE_SIZE (GET_MODE (dest)), 0);
2487	      emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2488			      tmps[i]);
2489	      dst = dest;
2490	      break;
2491	    }
2492	  else
2493	    abort ();
2494	}
2495
2496      /* Optimize the access just a bit.  */
2497      if (GET_CODE (dest) == MEM
2498	  && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2499	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2500	  && bytelen == GET_MODE_SIZE (mode))
2501	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2502      else
2503	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2504			 mode, tmps[i], ssize);
2505    }
2506
2507  emit_queue ();
2508
2509  /* Copy from the pseudo into the (probable) hard reg.  */
2510  if (orig_dst != dst)
2511    emit_move_insn (orig_dst, dst);
2512}
2513
2514/* Generate code to copy a BLKmode object of TYPE out of a
2515   set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2516   is null, a stack temporary is created.  TGTBLK is returned.
2517
2518   The primary purpose of this routine is to handle functions
2519   that return BLKmode structures in registers.  Some machines
2520   (the PA for example) want to return all small structures
2521   in registers regardless of the structure's alignment.  */
2522
2523rtx
2524copy_blkmode_from_reg (tgtblk, srcreg, type)
2525     rtx tgtblk;
2526     rtx srcreg;
2527     tree type;
2528{
2529  unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2530  rtx src = NULL, dst = NULL;
2531  unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2532  unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2533
2534  if (tgtblk == 0)
2535    {
2536      tgtblk = assign_temp (build_qualified_type (type,
2537						  (TYPE_QUALS (type)
2538						   | TYPE_QUAL_CONST)),
2539			    0, 1, 1);
2540      preserve_temp_slots (tgtblk);
2541    }
2542
2543  /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2544     into a new pseudo which is a full word.  */
2545
2546  if (GET_MODE (srcreg) != BLKmode
2547      && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2548    srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2549
2550  /* Structures whose size is not a multiple of a word are aligned
2551     to the least significant byte (to the right).  On a BYTES_BIG_ENDIAN
2552     machine, this means we must skip the empty high order bytes when
2553     calculating the bit offset.  */
2554  if (BYTES_BIG_ENDIAN
2555      && bytes % UNITS_PER_WORD)
2556    big_endian_correction
2557      = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2558
2559  /* Copy the structure BITSIZE bites at a time.
2560
2561     We could probably emit more efficient code for machines which do not use
2562     strict alignment, but it doesn't seem worth the effort at the current
2563     time.  */
2564  for (bitpos = 0, xbitpos = big_endian_correction;
2565       bitpos < bytes * BITS_PER_UNIT;
2566       bitpos += bitsize, xbitpos += bitsize)
2567    {
2568      /* We need a new source operand each time xbitpos is on a
2569	 word boundary and when xbitpos == big_endian_correction
2570	 (the first time through).  */
2571      if (xbitpos % BITS_PER_WORD == 0
2572	  || xbitpos == big_endian_correction)
2573	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2574				     GET_MODE (srcreg));
2575
2576      /* We need a new destination operand each time bitpos is on
2577	 a word boundary.  */
2578      if (bitpos % BITS_PER_WORD == 0)
2579	dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2580
2581      /* Use xbitpos for the source extraction (right justified) and
2582	 xbitpos for the destination store (left justified).  */
2583      store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2584		       extract_bit_field (src, bitsize,
2585					  xbitpos % BITS_PER_WORD, 1,
2586					  NULL_RTX, word_mode, word_mode,
2587					  BITS_PER_WORD),
2588		       BITS_PER_WORD);
2589    }
2590
2591  return tgtblk;
2592}
2593
2594/* Add a USE expression for REG to the (possibly empty) list pointed
2595   to by CALL_FUSAGE.  REG must denote a hard register.  */
2596
2597void
2598use_reg (call_fusage, reg)
2599     rtx *call_fusage, reg;
2600{
2601  if (GET_CODE (reg) != REG
2602      || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2603    abort ();
2604
2605  *call_fusage
2606    = gen_rtx_EXPR_LIST (VOIDmode,
2607			 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2608}
2609
2610/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2611   starting at REGNO.  All of these registers must be hard registers.  */
2612
2613void
2614use_regs (call_fusage, regno, nregs)
2615     rtx *call_fusage;
2616     int regno;
2617     int nregs;
2618{
2619  int i;
2620
2621  if (regno + nregs > FIRST_PSEUDO_REGISTER)
2622    abort ();
2623
2624  for (i = 0; i < nregs; i++)
2625    use_reg (call_fusage, regno_reg_rtx[regno + i]);
2626}
2627
2628/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2629   PARALLEL REGS.  This is for calls that pass values in multiple
2630   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2631
2632void
2633use_group_regs (call_fusage, regs)
2634     rtx *call_fusage;
2635     rtx regs;
2636{
2637  int i;
2638
2639  for (i = 0; i < XVECLEN (regs, 0); i++)
2640    {
2641      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2642
2643      /* A NULL entry means the parameter goes both on the stack and in
2644	 registers.  This can also be a MEM for targets that pass values
2645	 partially on the stack and partially in registers.  */
2646      if (reg != 0 && GET_CODE (reg) == REG)
2647	use_reg (call_fusage, reg);
2648    }
2649}
2650
2651
2652/* Determine whether the LEN bytes generated by CONSTFUN can be
2653   stored to memory using several move instructions.  CONSTFUNDATA is
2654   a pointer which will be passed as argument in every CONSTFUN call.
2655   ALIGN is maximum alignment we can assume.  Return nonzero if a
2656   call to store_by_pieces should succeed.  */
2657
2658int
2659can_store_by_pieces (len, constfun, constfundata, align)
2660     unsigned HOST_WIDE_INT len;
2661     rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2662     PTR constfundata;
2663     unsigned int align;
2664{
2665  unsigned HOST_WIDE_INT max_size, l;
2666  HOST_WIDE_INT offset = 0;
2667  enum machine_mode mode, tmode;
2668  enum insn_code icode;
2669  int reverse;
2670  rtx cst;
2671
2672  if (len == 0)
2673    return 1;
2674
2675  if (! MOVE_BY_PIECES_P (len, align))
2676    return 0;
2677
2678  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2679      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2680    align = MOVE_MAX * BITS_PER_UNIT;
2681
2682  /* We would first store what we can in the largest integer mode, then go to
2683     successively smaller modes.  */
2684
2685  for (reverse = 0;
2686       reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2687       reverse++)
2688    {
2689      l = len;
2690      mode = VOIDmode;
2691      max_size = STORE_MAX_PIECES + 1;
2692      while (max_size > 1)
2693	{
2694	  for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2695	       tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2696	    if (GET_MODE_SIZE (tmode) < max_size)
2697	      mode = tmode;
2698
2699	  if (mode == VOIDmode)
2700	    break;
2701
2702	  icode = mov_optab->handlers[(int) mode].insn_code;
2703	  if (icode != CODE_FOR_nothing
2704	      && align >= GET_MODE_ALIGNMENT (mode))
2705	    {
2706	      unsigned int size = GET_MODE_SIZE (mode);
2707
2708	      while (l >= size)
2709		{
2710		  if (reverse)
2711		    offset -= size;
2712
2713		  cst = (*constfun) (constfundata, offset, mode);
2714		  if (!LEGITIMATE_CONSTANT_P (cst))
2715		    return 0;
2716
2717		  if (!reverse)
2718		    offset += size;
2719
2720		  l -= size;
2721		}
2722	    }
2723
2724	  max_size = GET_MODE_SIZE (mode);
2725	}
2726
2727      /* The code above should have handled everything.  */
2728      if (l != 0)
2729	abort ();
2730    }
2731
2732  return 1;
2733}
2734
2735/* Generate several move instructions to store LEN bytes generated by
2736   CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2737   pointer which will be passed as argument in every CONSTFUN call.
2738   ALIGN is maximum alignment we can assume.  */
2739
2740void
2741store_by_pieces (to, len, constfun, constfundata, align)
2742     rtx to;
2743     unsigned HOST_WIDE_INT len;
2744     rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2745     PTR constfundata;
2746     unsigned int align;
2747{
2748  struct store_by_pieces data;
2749
2750  if (len == 0)
2751    return;
2752
2753  if (! MOVE_BY_PIECES_P (len, align))
2754    abort ();
2755  to = protect_from_queue (to, 1);
2756  data.constfun = constfun;
2757  data.constfundata = constfundata;
2758  data.len = len;
2759  data.to = to;
2760  store_by_pieces_1 (&data, align);
2761}
2762
2763/* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2764   rtx with BLKmode).  The caller must pass TO through protect_from_queue
2765   before calling. ALIGN is maximum alignment we can assume.  */
2766
2767static void
2768clear_by_pieces (to, len, align)
2769     rtx to;
2770     unsigned HOST_WIDE_INT len;
2771     unsigned int align;
2772{
2773  struct store_by_pieces data;
2774
2775  if (len == 0)
2776    return;
2777
2778  data.constfun = clear_by_pieces_1;
2779  data.constfundata = NULL;
2780  data.len = len;
2781  data.to = to;
2782  store_by_pieces_1 (&data, align);
2783}
2784
2785/* Callback routine for clear_by_pieces.
2786   Return const0_rtx unconditionally.  */
2787
2788static rtx
2789clear_by_pieces_1 (data, offset, mode)
2790     PTR data ATTRIBUTE_UNUSED;
2791     HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2792     enum machine_mode mode ATTRIBUTE_UNUSED;
2793{
2794  return const0_rtx;
2795}
2796
2797/* Subroutine of clear_by_pieces and store_by_pieces.
2798   Generate several move instructions to store LEN bytes of block TO.  (A MEM
2799   rtx with BLKmode).  The caller must pass TO through protect_from_queue
2800   before calling.  ALIGN is maximum alignment we can assume.  */
2801
2802static void
2803store_by_pieces_1 (data, align)
2804     struct store_by_pieces *data;
2805     unsigned int align;
2806{
2807  rtx to_addr = XEXP (data->to, 0);
2808  unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2809  enum machine_mode mode = VOIDmode, tmode;
2810  enum insn_code icode;
2811
2812  data->offset = 0;
2813  data->to_addr = to_addr;
2814  data->autinc_to
2815    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2816       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2817
2818  data->explicit_inc_to = 0;
2819  data->reverse
2820    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2821  if (data->reverse)
2822    data->offset = data->len;
2823
2824  /* If storing requires more than two move insns,
2825     copy addresses to registers (to make displacements shorter)
2826     and use post-increment if available.  */
2827  if (!data->autinc_to
2828      && move_by_pieces_ninsns (data->len, align) > 2)
2829    {
2830      /* Determine the main mode we'll be using.  */
2831      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2832	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2833	if (GET_MODE_SIZE (tmode) < max_size)
2834	  mode = tmode;
2835
2836      if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2837	{
2838	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2839	  data->autinc_to = 1;
2840	  data->explicit_inc_to = -1;
2841	}
2842
2843      if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2844	  && ! data->autinc_to)
2845	{
2846	  data->to_addr = copy_addr_to_reg (to_addr);
2847	  data->autinc_to = 1;
2848	  data->explicit_inc_to = 1;
2849	}
2850
2851      if ( !data->autinc_to && CONSTANT_P (to_addr))
2852	data->to_addr = copy_addr_to_reg (to_addr);
2853    }
2854
2855  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2856      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2857    align = MOVE_MAX * BITS_PER_UNIT;
2858
2859  /* First store what we can in the largest integer mode, then go to
2860     successively smaller modes.  */
2861
2862  while (max_size > 1)
2863    {
2864      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2865	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2866	if (GET_MODE_SIZE (tmode) < max_size)
2867	  mode = tmode;
2868
2869      if (mode == VOIDmode)
2870	break;
2871
2872      icode = mov_optab->handlers[(int) mode].insn_code;
2873      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2874	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2875
2876      max_size = GET_MODE_SIZE (mode);
2877    }
2878
2879  /* The code above should have handled everything.  */
2880  if (data->len != 0)
2881    abort ();
2882}
2883
2884/* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2885   with move instructions for mode MODE.  GENFUN is the gen_... function
2886   to make a move insn for that mode.  DATA has all the other info.  */
2887
2888static void
2889store_by_pieces_2 (genfun, mode, data)
2890     rtx (*genfun) PARAMS ((rtx, ...));
2891     enum machine_mode mode;
2892     struct store_by_pieces *data;
2893{
2894  unsigned int size = GET_MODE_SIZE (mode);
2895  rtx to1, cst;
2896
2897  while (data->len >= size)
2898    {
2899      if (data->reverse)
2900	data->offset -= size;
2901
2902      if (data->autinc_to)
2903	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2904					 data->offset);
2905      else
2906	to1 = adjust_address (data->to, mode, data->offset);
2907
2908      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2909	emit_insn (gen_add2_insn (data->to_addr,
2910				  GEN_INT (-(HOST_WIDE_INT) size)));
2911
2912      cst = (*data->constfun) (data->constfundata, data->offset, mode);
2913      emit_insn ((*genfun) (to1, cst));
2914
2915      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2916	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2917
2918      if (! data->reverse)
2919	data->offset += size;
2920
2921      data->len -= size;
2922    }
2923}
2924
2925/* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2926   its length in bytes.  */
2927
2928rtx
2929clear_storage (object, size)
2930     rtx object;
2931     rtx size;
2932{
2933  rtx retval = 0;
2934  unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2935			: GET_MODE_ALIGNMENT (GET_MODE (object)));
2936
2937  /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2938     just move a zero.  Otherwise, do this a piece at a time.  */
2939  if (GET_MODE (object) != BLKmode
2940      && GET_CODE (size) == CONST_INT
2941      && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2942    emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2943  else
2944    {
2945      object = protect_from_queue (object, 1);
2946      size = protect_from_queue (size, 0);
2947
2948      if (GET_CODE (size) == CONST_INT && INTVAL (size) == 0)
2949	;
2950      else if (GET_CODE (size) == CONST_INT
2951	  && CLEAR_BY_PIECES_P (INTVAL (size), align))
2952	clear_by_pieces (object, INTVAL (size), align);
2953      else if (clear_storage_via_clrstr (object, size, align))
2954	;
2955      else
2956	retval = clear_storage_via_libcall (object, size);
2957    }
2958
2959  return retval;
2960}
2961
2962/* A subroutine of clear_storage.  Expand a clrstr pattern;
2963   return true if successful.  */
2964
2965static bool
2966clear_storage_via_clrstr (object, size, align)
2967     rtx object, size;
2968     unsigned int align;
2969{
2970  /* Try the most limited insn first, because there's no point
2971     including more than one in the machine description unless
2972     the more limited one has some advantage.  */
2973
2974  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2975  enum machine_mode mode;
2976
2977  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2978       mode = GET_MODE_WIDER_MODE (mode))
2979    {
2980      enum insn_code code = clrstr_optab[(int) mode];
2981      insn_operand_predicate_fn pred;
2982
2983      if (code != CODE_FOR_nothing
2984	  /* We don't need MODE to be narrower than
2985	     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2986	     the mode mask, as it is returned by the macro, it will
2987	     definitely be less than the actual mode mask.  */
2988	  && ((GET_CODE (size) == CONST_INT
2989	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2990		   <= (GET_MODE_MASK (mode) >> 1)))
2991	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2992	  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2993	      || (*pred) (object, BLKmode))
2994	  && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2995	      || (*pred) (opalign, VOIDmode)))
2996	{
2997	  rtx op1;
2998	  rtx last = get_last_insn ();
2999	  rtx pat;
3000
3001	  op1 = convert_to_mode (mode, size, 1);
3002	  pred = insn_data[(int) code].operand[1].predicate;
3003	  if (pred != 0 && ! (*pred) (op1, mode))
3004	    op1 = copy_to_mode_reg (mode, op1);
3005
3006	  pat = GEN_FCN ((int) code) (object, op1, opalign);
3007	  if (pat)
3008	    {
3009	      emit_insn (pat);
3010	      return true;
3011	    }
3012	  else
3013	    delete_insns_since (last);
3014	}
3015    }
3016
3017  return false;
3018}
3019
3020/* A subroutine of clear_storage.  Expand a call to memset or bzero.
3021   Return the return value of memset, 0 otherwise.  */
3022
3023static rtx
3024clear_storage_via_libcall (object, size)
3025     rtx object, size;
3026{
3027  tree call_expr, arg_list, fn, object_tree, size_tree;
3028  enum machine_mode size_mode;
3029  rtx retval;
3030
3031  /* OBJECT or SIZE may have been passed through protect_from_queue.
3032
3033     It is unsafe to save the value generated by protect_from_queue
3034     and reuse it later.  Consider what happens if emit_queue is
3035     called before the return value from protect_from_queue is used.
3036
3037     Expansion of the CALL_EXPR below will call emit_queue before
3038     we are finished emitting RTL for argument setup.  So if we are
3039     not careful we could get the wrong value for an argument.
3040
3041     To avoid this problem we go ahead and emit code to copy OBJECT
3042     and SIZE into new pseudos.  We can then place those new pseudos
3043     into an RTL_EXPR and use them later, even after a call to
3044     emit_queue.
3045
3046     Note this is not strictly needed for library calls since they
3047     do not call emit_queue before loading their arguments.  However,
3048     we may need to have library calls call emit_queue in the future
3049     since failing to do so could cause problems for targets which
3050     define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
3051
3052  object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3053
3054  if (TARGET_MEM_FUNCTIONS)
3055    size_mode = TYPE_MODE (sizetype);
3056  else
3057    size_mode = TYPE_MODE (unsigned_type_node);
3058  size = convert_to_mode (size_mode, size, 1);
3059  size = copy_to_mode_reg (size_mode, size);
3060
3061  /* It is incorrect to use the libcall calling conventions to call
3062     memset in this context.  This could be a user call to memset and
3063     the user may wish to examine the return value from memset.  For
3064     targets where libcalls and normal calls have different conventions
3065     for returning pointers, we could end up generating incorrect code.
3066
3067     For convenience, we generate the call to bzero this way as well.  */
3068
3069  object_tree = make_tree (ptr_type_node, object);
3070  if (TARGET_MEM_FUNCTIONS)
3071    size_tree = make_tree (sizetype, size);
3072  else
3073    size_tree = make_tree (unsigned_type_node, size);
3074
3075  fn = clear_storage_libcall_fn (true);
3076  arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3077  if (TARGET_MEM_FUNCTIONS)
3078    arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3079  arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3080
3081  /* Now we have to build up the CALL_EXPR itself.  */
3082  call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3083  call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3084		     call_expr, arg_list, NULL_TREE);
3085  TREE_SIDE_EFFECTS (call_expr) = 1;
3086
3087  retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3088
3089  /* If we are initializing a readonly value, show the above call
3090     clobbered it.  Otherwise, a load from it may erroneously be
3091     hoisted from a loop.  */
3092  if (RTX_UNCHANGING_P (object))
3093    emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3094
3095  return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3096}
3097
3098/* A subroutine of clear_storage_via_libcall.  Create the tree node
3099   for the function we use for block clears.  The first time FOR_CALL
3100   is true, we call assemble_external.  */
3101
3102static GTY(()) tree block_clear_fn;
3103
3104static tree
3105clear_storage_libcall_fn (for_call)
3106     int for_call;
3107{
3108  static bool emitted_extern;
3109  tree fn = block_clear_fn, args;
3110
3111  if (!fn)
3112    {
3113      if (TARGET_MEM_FUNCTIONS)
3114	{
3115	  fn = get_identifier ("memset");
3116	  args = build_function_type_list (ptr_type_node, ptr_type_node,
3117					   integer_type_node, sizetype,
3118					   NULL_TREE);
3119	}
3120      else
3121	{
3122	  fn = get_identifier ("bzero");
3123	  args = build_function_type_list (void_type_node, ptr_type_node,
3124					   unsigned_type_node, NULL_TREE);
3125	}
3126
3127      fn = build_decl (FUNCTION_DECL, fn, args);
3128      DECL_EXTERNAL (fn) = 1;
3129      TREE_PUBLIC (fn) = 1;
3130      DECL_ARTIFICIAL (fn) = 1;
3131      TREE_NOTHROW (fn) = 1;
3132
3133      block_clear_fn = fn;
3134    }
3135
3136  if (for_call && !emitted_extern)
3137    {
3138      emitted_extern = true;
3139      make_decl_rtl (fn, NULL);
3140      assemble_external (fn);
3141    }
3142
3143  return fn;
3144}
3145
3146/* Generate code to copy Y into X.
3147   Both Y and X must have the same mode, except that
3148   Y can be a constant with VOIDmode.
3149   This mode cannot be BLKmode; use emit_block_move for that.
3150
3151   Return the last instruction emitted.  */
3152
3153rtx
3154emit_move_insn (x, y)
3155     rtx x, y;
3156{
3157  enum machine_mode mode = GET_MODE (x);
3158  rtx y_cst = NULL_RTX;
3159  rtx last_insn;
3160
3161  x = protect_from_queue (x, 1);
3162  y = protect_from_queue (y, 0);
3163
3164  if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3165    abort ();
3166
3167  /* Never force constant_p_rtx to memory.  */
3168  if (GET_CODE (y) == CONSTANT_P_RTX)
3169    ;
3170  else if (CONSTANT_P (y))
3171    {
3172      if (optimize
3173	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3174	  && (last_insn = compress_float_constant (x, y)))
3175	return last_insn;
3176
3177      if (!LEGITIMATE_CONSTANT_P (y))
3178	{
3179	  y_cst = y;
3180	  y = force_const_mem (mode, y);
3181
3182	  /* If the target's cannot_force_const_mem prevented the spill,
3183	     assume that the target's move expanders will also take care
3184	     of the non-legitimate constant.  */
3185	  if (!y)
3186	    y = y_cst;
3187	}
3188    }
3189
3190  /* If X or Y are memory references, verify that their addresses are valid
3191     for the machine.  */
3192  if (GET_CODE (x) == MEM
3193      && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3194	   && ! push_operand (x, GET_MODE (x)))
3195	  || (flag_force_addr
3196	      && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3197    x = validize_mem (x);
3198
3199  if (GET_CODE (y) == MEM
3200      && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3201	  || (flag_force_addr
3202	      && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3203    y = validize_mem (y);
3204
3205  if (mode == BLKmode)
3206    abort ();
3207
3208  last_insn = emit_move_insn_1 (x, y);
3209
3210  if (y_cst && GET_CODE (x) == REG)
3211    set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3212
3213  return last_insn;
3214}
3215
3216/* Low level part of emit_move_insn.
3217   Called just like emit_move_insn, but assumes X and Y
3218   are basically valid.  */
3219
3220rtx
3221emit_move_insn_1 (x, y)
3222     rtx x, y;
3223{
3224  enum machine_mode mode = GET_MODE (x);
3225  enum machine_mode submode;
3226  enum mode_class class = GET_MODE_CLASS (mode);
3227
3228  if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3229    abort ();
3230
3231  if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3232    return
3233      emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3234
3235  /* Expand complex moves by moving real part and imag part, if possible.  */
3236  else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3237	   && BLKmode != (submode = GET_MODE_INNER (mode))
3238	   && (mov_optab->handlers[(int) submode].insn_code
3239	       != CODE_FOR_nothing))
3240    {
3241      /* Don't split destination if it is a stack push.  */
3242      int stack = push_operand (x, GET_MODE (x));
3243
3244#ifdef PUSH_ROUNDING
3245      /* In case we output to the stack, but the size is smaller machine can
3246	 push exactly, we need to use move instructions.  */
3247      if (stack
3248	  && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3249	      != GET_MODE_SIZE (submode)))
3250	{
3251	  rtx temp;
3252	  HOST_WIDE_INT offset1, offset2;
3253
3254	  /* Do not use anti_adjust_stack, since we don't want to update
3255	     stack_pointer_delta.  */
3256	  temp = expand_binop (Pmode,
3257#ifdef STACK_GROWS_DOWNWARD
3258			       sub_optab,
3259#else
3260			       add_optab,
3261#endif
3262			       stack_pointer_rtx,
3263			       GEN_INT
3264				 (PUSH_ROUNDING
3265				  (GET_MODE_SIZE (GET_MODE (x)))),
3266			       stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3267
3268	  if (temp != stack_pointer_rtx)
3269	    emit_move_insn (stack_pointer_rtx, temp);
3270
3271#ifdef STACK_GROWS_DOWNWARD
3272	  offset1 = 0;
3273	  offset2 = GET_MODE_SIZE (submode);
3274#else
3275	  offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3276	  offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3277		     + GET_MODE_SIZE (submode));
3278#endif
3279
3280	  emit_move_insn (change_address (x, submode,
3281					  gen_rtx_PLUS (Pmode,
3282						        stack_pointer_rtx,
3283							GEN_INT (offset1))),
3284			  gen_realpart (submode, y));
3285	  emit_move_insn (change_address (x, submode,
3286					  gen_rtx_PLUS (Pmode,
3287						        stack_pointer_rtx,
3288							GEN_INT (offset2))),
3289			  gen_imagpart (submode, y));
3290	}
3291      else
3292#endif
3293      /* If this is a stack, push the highpart first, so it
3294	 will be in the argument order.
3295
3296	 In that case, change_address is used only to convert
3297	 the mode, not to change the address.  */
3298      if (stack)
3299	{
3300	  /* Note that the real part always precedes the imag part in memory
3301	     regardless of machine's endianness.  */
3302#ifdef STACK_GROWS_DOWNWARD
3303	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3304		     (gen_rtx_MEM (submode, XEXP (x, 0)),
3305		      gen_imagpart (submode, y)));
3306	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3307		     (gen_rtx_MEM (submode, XEXP (x, 0)),
3308		      gen_realpart (submode, y)));
3309#else
3310	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3311		     (gen_rtx_MEM (submode, XEXP (x, 0)),
3312		      gen_realpart (submode, y)));
3313	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3314		     (gen_rtx_MEM (submode, XEXP (x, 0)),
3315		      gen_imagpart (submode, y)));
3316#endif
3317	}
3318      else
3319	{
3320	  rtx realpart_x, realpart_y;
3321	  rtx imagpart_x, imagpart_y;
3322
3323	  /* If this is a complex value with each part being smaller than a
3324	     word, the usual calling sequence will likely pack the pieces into
3325	     a single register.  Unfortunately, SUBREG of hard registers only
3326	     deals in terms of words, so we have a problem converting input
3327	     arguments to the CONCAT of two registers that is used elsewhere
3328	     for complex values.  If this is before reload, we can copy it into
3329	     memory and reload.  FIXME, we should see about using extract and
3330	     insert on integer registers, but complex short and complex char
3331	     variables should be rarely used.  */
3332	  if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3333	      && (reload_in_progress | reload_completed) == 0)
3334	    {
3335	      int packed_dest_p
3336		= (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3337	      int packed_src_p
3338		= (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3339
3340	      if (packed_dest_p || packed_src_p)
3341		{
3342		  enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3343					       ? MODE_FLOAT : MODE_INT);
3344
3345		  enum machine_mode reg_mode
3346		    = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3347
3348		  if (reg_mode != BLKmode)
3349		    {
3350		      rtx mem = assign_stack_temp (reg_mode,
3351						   GET_MODE_SIZE (mode), 0);
3352		      rtx cmem = adjust_address (mem, mode, 0);
3353
3354		      cfun->cannot_inline
3355			= N_("function using short complex types cannot be inline");
3356
3357		      if (packed_dest_p)
3358			{
3359			  rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3360
3361			  emit_move_insn_1 (cmem, y);
3362			  return emit_move_insn_1 (sreg, mem);
3363			}
3364		      else
3365			{
3366			  rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3367
3368			  emit_move_insn_1 (mem, sreg);
3369			  return emit_move_insn_1 (x, cmem);
3370			}
3371		    }
3372		}
3373	    }
3374
3375	  realpart_x = gen_realpart (submode, x);
3376	  realpart_y = gen_realpart (submode, y);
3377	  imagpart_x = gen_imagpart (submode, x);
3378	  imagpart_y = gen_imagpart (submode, y);
3379
3380	  /* Show the output dies here.  This is necessary for SUBREGs
3381	     of pseudos since we cannot track their lifetimes correctly;
3382	     hard regs shouldn't appear here except as return values.
3383	     We never want to emit such a clobber after reload.  */
3384	  if (x != y
3385	      && ! (reload_in_progress || reload_completed)
3386	      && (GET_CODE (realpart_x) == SUBREG
3387		  || GET_CODE (imagpart_x) == SUBREG))
3388	    emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3389
3390	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3391		     (realpart_x, realpart_y));
3392	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3393		     (imagpart_x, imagpart_y));
3394	}
3395
3396      return get_last_insn ();
3397    }
3398
3399  /* This will handle any multi-word or full-word mode that lacks a move_insn
3400     pattern.  However, you will get better code if you define such patterns,
3401     even if they must turn into multiple assembler instructions.  */
3402  else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3403    {
3404      rtx last_insn = 0;
3405      rtx seq, inner;
3406      int need_clobber;
3407      int i;
3408
3409#ifdef PUSH_ROUNDING
3410
3411      /* If X is a push on the stack, do the push now and replace
3412	 X with a reference to the stack pointer.  */
3413      if (push_operand (x, GET_MODE (x)))
3414	{
3415	  rtx temp;
3416	  enum rtx_code code;
3417
3418	  /* Do not use anti_adjust_stack, since we don't want to update
3419	     stack_pointer_delta.  */
3420	  temp = expand_binop (Pmode,
3421#ifdef STACK_GROWS_DOWNWARD
3422			       sub_optab,
3423#else
3424			       add_optab,
3425#endif
3426			       stack_pointer_rtx,
3427			       GEN_INT
3428				 (PUSH_ROUNDING
3429				  (GET_MODE_SIZE (GET_MODE (x)))),
3430			       stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3431
3432	  if (temp != stack_pointer_rtx)
3433	    emit_move_insn (stack_pointer_rtx, temp);
3434
3435	  code = GET_CODE (XEXP (x, 0));
3436
3437	  /* Just hope that small offsets off SP are OK.  */
3438	  if (code == POST_INC)
3439	    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3440				GEN_INT (-((HOST_WIDE_INT)
3441					   GET_MODE_SIZE (GET_MODE (x)))));
3442	  else if (code == POST_DEC)
3443	    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3444				GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3445	  else
3446	    temp = stack_pointer_rtx;
3447
3448	  x = change_address (x, VOIDmode, temp);
3449	}
3450#endif
3451
3452      /* If we are in reload, see if either operand is a MEM whose address
3453	 is scheduled for replacement.  */
3454      if (reload_in_progress && GET_CODE (x) == MEM
3455	  && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3456	x = replace_equiv_address_nv (x, inner);
3457      if (reload_in_progress && GET_CODE (y) == MEM
3458	  && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3459	y = replace_equiv_address_nv (y, inner);
3460
3461      start_sequence ();
3462
3463      need_clobber = 0;
3464      for (i = 0;
3465	   i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3466	   i++)
3467	{
3468	  rtx xpart = operand_subword (x, i, 1, mode);
3469	  rtx ypart = operand_subword (y, i, 1, mode);
3470
3471	  /* If we can't get a part of Y, put Y into memory if it is a
3472	     constant.  Otherwise, force it into a register.  If we still
3473	     can't get a part of Y, abort.  */
3474	  if (ypart == 0 && CONSTANT_P (y))
3475	    {
3476	      y = force_const_mem (mode, y);
3477	      ypart = operand_subword (y, i, 1, mode);
3478	    }
3479	  else if (ypart == 0)
3480	    ypart = operand_subword_force (y, i, mode);
3481
3482	  if (xpart == 0 || ypart == 0)
3483	    abort ();
3484
3485	  need_clobber |= (GET_CODE (xpart) == SUBREG);
3486
3487	  last_insn = emit_move_insn (xpart, ypart);
3488	}
3489
3490      seq = get_insns ();
3491      end_sequence ();
3492
3493      /* Show the output dies here.  This is necessary for SUBREGs
3494	 of pseudos since we cannot track their lifetimes correctly;
3495	 hard regs shouldn't appear here except as return values.
3496	 We never want to emit such a clobber after reload.  */
3497      if (x != y
3498	  && ! (reload_in_progress || reload_completed)
3499	  && need_clobber != 0)
3500	emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3501
3502      emit_insn (seq);
3503
3504      return last_insn;
3505    }
3506  else
3507    abort ();
3508}
3509
3510/* If Y is representable exactly in a narrower mode, and the target can
3511   perform the extension directly from constant or memory, then emit the
3512   move as an extension.  */
3513
3514static rtx
3515compress_float_constant (x, y)
3516     rtx x, y;
3517{
3518  enum machine_mode dstmode = GET_MODE (x);
3519  enum machine_mode orig_srcmode = GET_MODE (y);
3520  enum machine_mode srcmode;
3521  REAL_VALUE_TYPE r;
3522
3523  REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3524
3525  for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3526       srcmode != orig_srcmode;
3527       srcmode = GET_MODE_WIDER_MODE (srcmode))
3528    {
3529      enum insn_code ic;
3530      rtx trunc_y, last_insn;
3531
3532      /* Skip if the target can't extend this way.  */
3533      ic = can_extend_p (dstmode, srcmode, 0);
3534      if (ic == CODE_FOR_nothing)
3535	continue;
3536
3537      /* Skip if the narrowed value isn't exact.  */
3538      if (! exact_real_truncate (srcmode, &r))
3539	continue;
3540
3541      trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3542
3543      if (LEGITIMATE_CONSTANT_P (trunc_y))
3544	{
3545	  /* Skip if the target needs extra instructions to perform
3546	     the extension.  */
3547	  if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3548	    continue;
3549	}
3550      else if (float_extend_from_mem[dstmode][srcmode])
3551	trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3552      else
3553	continue;
3554
3555      emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3556      last_insn = get_last_insn ();
3557
3558      if (GET_CODE (x) == REG)
3559	REG_NOTES (last_insn)
3560	  = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3561
3562      return last_insn;
3563    }
3564
3565  return NULL_RTX;
3566}
3567
3568/* Pushing data onto the stack.  */
3569
3570/* Push a block of length SIZE (perhaps variable)
3571   and return an rtx to address the beginning of the block.
3572   Note that it is not possible for the value returned to be a QUEUED.
3573   The value may be virtual_outgoing_args_rtx.
3574
3575   EXTRA is the number of bytes of padding to push in addition to SIZE.
3576   BELOW nonzero means this padding comes at low addresses;
3577   otherwise, the padding comes at high addresses.  */
3578
3579rtx
3580push_block (size, extra, below)
3581     rtx size;
3582     int extra, below;
3583{
3584  rtx temp;
3585
3586  size = convert_modes (Pmode, ptr_mode, size, 1);
3587  if (CONSTANT_P (size))
3588    anti_adjust_stack (plus_constant (size, extra));
3589  else if (GET_CODE (size) == REG && extra == 0)
3590    anti_adjust_stack (size);
3591  else
3592    {
3593      temp = copy_to_mode_reg (Pmode, size);
3594      if (extra != 0)
3595	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3596			     temp, 0, OPTAB_LIB_WIDEN);
3597      anti_adjust_stack (temp);
3598    }
3599
3600#ifndef STACK_GROWS_DOWNWARD
3601  if (0)
3602#else
3603  if (1)
3604#endif
3605    {
3606      temp = virtual_outgoing_args_rtx;
3607      if (extra != 0 && below)
3608	temp = plus_constant (temp, extra);
3609    }
3610  else
3611    {
3612      if (GET_CODE (size) == CONST_INT)
3613	temp = plus_constant (virtual_outgoing_args_rtx,
3614			      -INTVAL (size) - (below ? 0 : extra));
3615      else if (extra != 0 && !below)
3616	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3617			     negate_rtx (Pmode, plus_constant (size, extra)));
3618      else
3619	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3620			     negate_rtx (Pmode, size));
3621    }
3622
3623  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3624}
3625
3626#ifdef PUSH_ROUNDING
3627
3628/* Emit single push insn.  */
3629
3630static void
3631emit_single_push_insn (mode, x, type)
3632     rtx x;
3633     enum machine_mode mode;
3634     tree type;
3635{
3636  rtx dest_addr;
3637  unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3638  rtx dest;
3639  enum insn_code icode;
3640  insn_operand_predicate_fn pred;
3641
3642  stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3643  /* If there is push pattern, use it.  Otherwise try old way of throwing
3644     MEM representing push operation to move expander.  */
3645  icode = push_optab->handlers[(int) mode].insn_code;
3646  if (icode != CODE_FOR_nothing)
3647    {
3648      if (((pred = insn_data[(int) icode].operand[0].predicate)
3649	   && !((*pred) (x, mode))))
3650	x = force_reg (mode, x);
3651      emit_insn (GEN_FCN (icode) (x));
3652      return;
3653    }
3654  if (GET_MODE_SIZE (mode) == rounded_size)
3655    dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3656  else
3657    {
3658#ifdef STACK_GROWS_DOWNWARD
3659      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3660				GEN_INT (-(HOST_WIDE_INT) rounded_size));
3661#else
3662      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3663				GEN_INT (rounded_size));
3664#endif
3665      dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3666    }
3667
3668  dest = gen_rtx_MEM (mode, dest_addr);
3669
3670  if (type != 0)
3671    {
3672      set_mem_attributes (dest, type, 1);
3673
3674      if (flag_optimize_sibling_calls)
3675	/* Function incoming arguments may overlap with sibling call
3676	   outgoing arguments and we cannot allow reordering of reads
3677	   from function arguments with stores to outgoing arguments
3678	   of sibling calls.  */
3679	set_mem_alias_set (dest, 0);
3680    }
3681  emit_move_insn (dest, x);
3682}
3683#endif
3684
3685/* Generate code to push X onto the stack, assuming it has mode MODE and
3686   type TYPE.
3687   MODE is redundant except when X is a CONST_INT (since they don't
3688   carry mode info).
3689   SIZE is an rtx for the size of data to be copied (in bytes),
3690   needed only if X is BLKmode.
3691
3692   ALIGN (in bits) is maximum alignment we can assume.
3693
3694   If PARTIAL and REG are both nonzero, then copy that many of the first
3695   words of X into registers starting with REG, and push the rest of X.
3696   The amount of space pushed is decreased by PARTIAL words,
3697   rounded *down* to a multiple of PARM_BOUNDARY.
3698   REG must be a hard register in this case.
3699   If REG is zero but PARTIAL is not, take any all others actions for an
3700   argument partially in registers, but do not actually load any
3701   registers.
3702
3703   EXTRA is the amount in bytes of extra space to leave next to this arg.
3704   This is ignored if an argument block has already been allocated.
3705
3706   On a machine that lacks real push insns, ARGS_ADDR is the address of
3707   the bottom of the argument block for this call.  We use indexing off there
3708   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
3709   argument block has not been preallocated.
3710
3711   ARGS_SO_FAR is the size of args previously pushed for this call.
3712
3713   REG_PARM_STACK_SPACE is nonzero if functions require stack space
3714   for arguments passed in registers.  If nonzero, it will be the number
3715   of bytes required.  */
3716
3717void
3718emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3719		args_addr, args_so_far, reg_parm_stack_space,
3720		alignment_pad)
3721     rtx x;
3722     enum machine_mode mode;
3723     tree type;
3724     rtx size;
3725     unsigned int align;
3726     int partial;
3727     rtx reg;
3728     int extra;
3729     rtx args_addr;
3730     rtx args_so_far;
3731     int reg_parm_stack_space;
3732     rtx alignment_pad;
3733{
3734  rtx xinner;
3735  enum direction stack_direction
3736#ifdef STACK_GROWS_DOWNWARD
3737    = downward;
3738#else
3739    = upward;
3740#endif
3741
3742  /* Decide where to pad the argument: `downward' for below,
3743     `upward' for above, or `none' for don't pad it.
3744     Default is below for small data on big-endian machines; else above.  */
3745  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3746
3747  /* Invert direction if stack is post-decrement.
3748     FIXME: why?  */
3749  if (STACK_PUSH_CODE == POST_DEC)
3750    if (where_pad != none)
3751      where_pad = (where_pad == downward ? upward : downward);
3752
3753  xinner = x = protect_from_queue (x, 0);
3754
3755  if (mode == BLKmode)
3756    {
3757      /* Copy a block into the stack, entirely or partially.  */
3758
3759      rtx temp;
3760      int used = partial * UNITS_PER_WORD;
3761      int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3762      int skip;
3763
3764      if (size == 0)
3765	abort ();
3766
3767      used -= offset;
3768
3769      /* USED is now the # of bytes we need not copy to the stack
3770	 because registers will take care of them.  */
3771
3772      if (partial != 0)
3773	xinner = adjust_address (xinner, BLKmode, used);
3774
3775      /* If the partial register-part of the arg counts in its stack size,
3776	 skip the part of stack space corresponding to the registers.
3777	 Otherwise, start copying to the beginning of the stack space,
3778	 by setting SKIP to 0.  */
3779      skip = (reg_parm_stack_space == 0) ? 0 : used;
3780
3781#ifdef PUSH_ROUNDING
3782      /* Do it with several push insns if that doesn't take lots of insns
3783	 and if there is no difficulty with push insns that skip bytes
3784	 on the stack for alignment purposes.  */
3785      if (args_addr == 0
3786	  && PUSH_ARGS
3787	  && GET_CODE (size) == CONST_INT
3788	  && skip == 0
3789	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3790	  /* Here we avoid the case of a structure whose weak alignment
3791	     forces many pushes of a small amount of data,
3792	     and such small pushes do rounding that causes trouble.  */
3793	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3794	      || align >= BIGGEST_ALIGNMENT
3795	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3796		  == (align / BITS_PER_UNIT)))
3797	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3798	{
3799	  /* Push padding now if padding above and stack grows down,
3800	     or if padding below and stack grows up.
3801	     But if space already allocated, this has already been done.  */
3802	  if (extra && args_addr == 0
3803	      && where_pad != none && where_pad != stack_direction)
3804	    anti_adjust_stack (GEN_INT (extra));
3805
3806	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3807	}
3808      else
3809#endif /* PUSH_ROUNDING  */
3810	{
3811	  rtx target;
3812
3813	  /* Otherwise make space on the stack and copy the data
3814	     to the address of that space.  */
3815
3816	  /* Deduct words put into registers from the size we must copy.  */
3817	  if (partial != 0)
3818	    {
3819	      if (GET_CODE (size) == CONST_INT)
3820		size = GEN_INT (INTVAL (size) - used);
3821	      else
3822		size = expand_binop (GET_MODE (size), sub_optab, size,
3823				     GEN_INT (used), NULL_RTX, 0,
3824				     OPTAB_LIB_WIDEN);
3825	    }
3826
3827	  /* Get the address of the stack space.
3828	     In this case, we do not deal with EXTRA separately.
3829	     A single stack adjust will do.  */
3830	  if (! args_addr)
3831	    {
3832	      temp = push_block (size, extra, where_pad == downward);
3833	      extra = 0;
3834	    }
3835	  else if (GET_CODE (args_so_far) == CONST_INT)
3836	    temp = memory_address (BLKmode,
3837				   plus_constant (args_addr,
3838						  skip + INTVAL (args_so_far)));
3839	  else
3840	    temp = memory_address (BLKmode,
3841				   plus_constant (gen_rtx_PLUS (Pmode,
3842								args_addr,
3843								args_so_far),
3844						  skip));
3845
3846	  if (!ACCUMULATE_OUTGOING_ARGS)
3847	    {
3848	      /* If the source is referenced relative to the stack pointer,
3849		 copy it to another register to stabilize it.  We do not need
3850		 to do this if we know that we won't be changing sp.  */
3851
3852	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3853		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3854		temp = copy_to_reg (temp);
3855	    }
3856
3857	  target = gen_rtx_MEM (BLKmode, temp);
3858
3859	  if (type != 0)
3860	    {
3861	      set_mem_attributes (target, type, 1);
3862	      /* Function incoming arguments may overlap with sibling call
3863		 outgoing arguments and we cannot allow reordering of reads
3864		 from function arguments with stores to outgoing arguments
3865		 of sibling calls.  */
3866	      set_mem_alias_set (target, 0);
3867	    }
3868
3869	  /* ALIGN may well be better aligned than TYPE, e.g. due to
3870	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
3871	  set_mem_align (target, align);
3872
3873	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3874	}
3875    }
3876  else if (partial > 0)
3877    {
3878      /* Scalar partly in registers.  */
3879
3880      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3881      int i;
3882      int not_stack;
3883      /* # words of start of argument
3884	 that we must make space for but need not store.  */
3885      int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3886      int args_offset = INTVAL (args_so_far);
3887      int skip;
3888
3889      /* Push padding now if padding above and stack grows down,
3890	 or if padding below and stack grows up.
3891	 But if space already allocated, this has already been done.  */
3892      if (extra && args_addr == 0
3893	  && where_pad != none && where_pad != stack_direction)
3894	anti_adjust_stack (GEN_INT (extra));
3895
3896      /* If we make space by pushing it, we might as well push
3897	 the real data.  Otherwise, we can leave OFFSET nonzero
3898	 and leave the space uninitialized.  */
3899      if (args_addr == 0)
3900	offset = 0;
3901
3902      /* Now NOT_STACK gets the number of words that we don't need to
3903	 allocate on the stack.  */
3904      not_stack = partial - offset;
3905
3906      /* If the partial register-part of the arg counts in its stack size,
3907	 skip the part of stack space corresponding to the registers.
3908	 Otherwise, start copying to the beginning of the stack space,
3909	 by setting SKIP to 0.  */
3910      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3911
3912      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3913	x = validize_mem (force_const_mem (mode, x));
3914
3915      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3916	 SUBREGs of such registers are not allowed.  */
3917      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3918	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3919	x = copy_to_reg (x);
3920
3921      /* Loop over all the words allocated on the stack for this arg.  */
3922      /* We can do it by words, because any scalar bigger than a word
3923	 has a size a multiple of a word.  */
3924#ifndef PUSH_ARGS_REVERSED
3925      for (i = not_stack; i < size; i++)
3926#else
3927      for (i = size - 1; i >= not_stack; i--)
3928#endif
3929	if (i >= not_stack + offset)
3930	  emit_push_insn (operand_subword_force (x, i, mode),
3931			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3932			  0, args_addr,
3933			  GEN_INT (args_offset + ((i - not_stack + skip)
3934						  * UNITS_PER_WORD)),
3935			  reg_parm_stack_space, alignment_pad);
3936    }
3937  else
3938    {
3939      rtx addr;
3940      rtx target = NULL_RTX;
3941      rtx dest;
3942
3943      /* Push padding now if padding above and stack grows down,
3944	 or if padding below and stack grows up.
3945	 But if space already allocated, this has already been done.  */
3946      if (extra && args_addr == 0
3947	  && where_pad != none && where_pad != stack_direction)
3948	anti_adjust_stack (GEN_INT (extra));
3949
3950#ifdef PUSH_ROUNDING
3951      if (args_addr == 0 && PUSH_ARGS)
3952	emit_single_push_insn (mode, x, type);
3953      else
3954#endif
3955	{
3956	  if (GET_CODE (args_so_far) == CONST_INT)
3957	    addr
3958	      = memory_address (mode,
3959				plus_constant (args_addr,
3960					       INTVAL (args_so_far)));
3961	  else
3962	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3963						       args_so_far));
3964	  target = addr;
3965	  dest = gen_rtx_MEM (mode, addr);
3966	  if (type != 0)
3967	    {
3968	      set_mem_attributes (dest, type, 1);
3969	      /* Function incoming arguments may overlap with sibling call
3970		 outgoing arguments and we cannot allow reordering of reads
3971		 from function arguments with stores to outgoing arguments
3972		 of sibling calls.  */
3973	      set_mem_alias_set (dest, 0);
3974	    }
3975
3976	  emit_move_insn (dest, x);
3977	}
3978    }
3979
3980  /* If part should go in registers, copy that part
3981     into the appropriate registers.  Do this now, at the end,
3982     since mem-to-mem copies above may do function calls.  */
3983  if (partial > 0 && reg != 0)
3984    {
3985      /* Handle calls that pass values in multiple non-contiguous locations.
3986	 The Irix 6 ABI has examples of this.  */
3987      if (GET_CODE (reg) == PARALLEL)
3988	emit_group_load (reg, x, -1);  /* ??? size? */
3989      else
3990	move_block_to_reg (REGNO (reg), x, partial, mode);
3991    }
3992
3993  if (extra && args_addr == 0 && where_pad == stack_direction)
3994    anti_adjust_stack (GEN_INT (extra));
3995
3996  if (alignment_pad && args_addr == 0)
3997    anti_adjust_stack (alignment_pad);
3998}
3999
4000/* Return X if X can be used as a subtarget in a sequence of arithmetic
4001   operations.  */
4002
4003static rtx
4004get_subtarget (x)
4005     rtx x;
4006{
4007  return ((x == 0
4008	   /* Only registers can be subtargets.  */
4009	   || GET_CODE (x) != REG
4010	   /* If the register is readonly, it can't be set more than once.  */
4011	   || RTX_UNCHANGING_P (x)
4012	   /* Don't use hard regs to avoid extending their life.  */
4013	   || REGNO (x) < FIRST_PSEUDO_REGISTER
4014	   /* Avoid subtargets inside loops,
4015	      since they hide some invariant expressions.  */
4016	   || preserve_subexpressions_p ())
4017	  ? 0 : x);
4018}
4019
4020/* Expand an assignment that stores the value of FROM into TO.
4021   If WANT_VALUE is nonzero, return an rtx for the value of TO.
4022   (This may contain a QUEUED rtx;
4023   if the value is constant, this rtx is a constant.)
4024   Otherwise, the returned value is NULL_RTX.
4025
4026   SUGGEST_REG is no longer actually used.
4027   It used to mean, copy the value through a register
4028   and return that register, if that is possible.
4029   We now use WANT_VALUE to decide whether to do this.  */
4030
4031rtx
4032expand_assignment (to, from, want_value, suggest_reg)
4033     tree to, from;
4034     int want_value;
4035     int suggest_reg ATTRIBUTE_UNUSED;
4036{
4037  rtx to_rtx = 0;
4038  rtx result;
4039
4040  /* Don't crash if the lhs of the assignment was erroneous.  */
4041
4042  if (TREE_CODE (to) == ERROR_MARK)
4043    {
4044      result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4045      return want_value ? result : NULL_RTX;
4046    }
4047
4048  /* Assignment of a structure component needs special treatment
4049     if the structure component's rtx is not simply a MEM.
4050     Assignment of an array element at a constant index, and assignment of
4051     an array element in an unaligned packed structure field, has the same
4052     problem.  */
4053
4054  if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4055      || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4056      || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4057    {
4058      enum machine_mode mode1;
4059      HOST_WIDE_INT bitsize, bitpos;
4060      rtx orig_to_rtx;
4061      tree offset;
4062      int unsignedp;
4063      int volatilep = 0;
4064      tree tem;
4065
4066      push_temp_slots ();
4067      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4068				 &unsignedp, &volatilep);
4069
4070      /* If we are going to use store_bit_field and extract_bit_field,
4071	 make sure to_rtx will be safe for multiple use.  */
4072
4073      if (mode1 == VOIDmode && want_value)
4074	tem = stabilize_reference (tem);
4075
4076      orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4077
4078      if (offset != 0)
4079	{
4080	  rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4081
4082	  if (GET_CODE (to_rtx) != MEM)
4083	    abort ();
4084
4085#ifdef POINTERS_EXTEND_UNSIGNED
4086	  if (GET_MODE (offset_rtx) != Pmode)
4087	    offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4088#else
4089	  if (GET_MODE (offset_rtx) != ptr_mode)
4090	    offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4091#endif
4092
4093	  /* A constant address in TO_RTX can have VOIDmode, we must not try
4094	     to call force_reg for that case.  Avoid that case.  */
4095	  if (GET_CODE (to_rtx) == MEM
4096	      && GET_MODE (to_rtx) == BLKmode
4097	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4098	      && bitsize > 0
4099	      && (bitpos % bitsize) == 0
4100	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4101	      && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4102	    {
4103	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4104	      bitpos = 0;
4105	    }
4106
4107	  to_rtx = offset_address (to_rtx, offset_rtx,
4108				   highest_pow2_factor_for_type (TREE_TYPE (to),
4109								 offset));
4110	}
4111
4112      if (GET_CODE (to_rtx) == MEM)
4113	{
4114	  /* If the field is at offset zero, we could have been given the
4115	     DECL_RTX of the parent struct.  Don't munge it.  */
4116	  to_rtx = shallow_copy_rtx (to_rtx);
4117
4118	  set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4119	}
4120
4121      /* Deal with volatile and readonly fields.  The former is only done
4122	 for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
4123      if (volatilep && GET_CODE (to_rtx) == MEM)
4124	{
4125	  if (to_rtx == orig_to_rtx)
4126	    to_rtx = copy_rtx (to_rtx);
4127	  MEM_VOLATILE_P (to_rtx) = 1;
4128	}
4129
4130      if (TREE_CODE (to) == COMPONENT_REF
4131	  && TREE_READONLY (TREE_OPERAND (to, 1)))
4132	{
4133	  if (to_rtx == orig_to_rtx)
4134	    to_rtx = copy_rtx (to_rtx);
4135	  RTX_UNCHANGING_P (to_rtx) = 1;
4136	}
4137
4138      if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4139	{
4140	  if (to_rtx == orig_to_rtx)
4141	    to_rtx = copy_rtx (to_rtx);
4142	  MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4143	}
4144
4145      result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4146			    (want_value
4147			     /* Spurious cast for HPUX compiler.  */
4148			     ? ((enum machine_mode)
4149				TYPE_MODE (TREE_TYPE (to)))
4150			     : VOIDmode),
4151			    unsignedp, TREE_TYPE (tem), get_alias_set (to));
4152
4153      preserve_temp_slots (result);
4154      free_temp_slots ();
4155      pop_temp_slots ();
4156
4157      /* If the value is meaningful, convert RESULT to the proper mode.
4158	 Otherwise, return nothing.  */
4159      return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4160					  TYPE_MODE (TREE_TYPE (from)),
4161					  result,
4162					  TREE_UNSIGNED (TREE_TYPE (to)))
4163	      : NULL_RTX);
4164    }
4165
4166  /* If the rhs is a function call and its value is not an aggregate,
4167     call the function before we start to compute the lhs.
4168     This is needed for correct code for cases such as
4169     val = setjmp (buf) on machines where reference to val
4170     requires loading up part of an address in a separate insn.
4171
4172     Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4173     since it might be a promoted variable where the zero- or sign- extension
4174     needs to be done.  Handling this in the normal way is safe because no
4175     computation is done before the call.  */
4176  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4177      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4178      && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4179	    && GET_CODE (DECL_RTL (to)) == REG))
4180    {
4181      rtx value;
4182
4183      push_temp_slots ();
4184      value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4185      if (to_rtx == 0)
4186	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4187
4188      /* Handle calls that return values in multiple non-contiguous locations.
4189	 The Irix 6 ABI has examples of this.  */
4190      if (GET_CODE (to_rtx) == PARALLEL)
4191	emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4192      else if (GET_MODE (to_rtx) == BLKmode)
4193	emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4194      else
4195	{
4196#ifdef POINTERS_EXTEND_UNSIGNED
4197	  if (POINTER_TYPE_P (TREE_TYPE (to))
4198	      && GET_MODE (to_rtx) != GET_MODE (value))
4199	    value = convert_memory_address (GET_MODE (to_rtx), value);
4200#endif
4201	  emit_move_insn (to_rtx, value);
4202	}
4203      preserve_temp_slots (to_rtx);
4204      free_temp_slots ();
4205      pop_temp_slots ();
4206      return want_value ? to_rtx : NULL_RTX;
4207    }
4208
4209  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
4210     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
4211
4212  if (to_rtx == 0)
4213    to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4214
4215  /* Don't move directly into a return register.  */
4216  if (TREE_CODE (to) == RESULT_DECL
4217      && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4218    {
4219      rtx temp;
4220
4221      push_temp_slots ();
4222      temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4223
4224      if (GET_CODE (to_rtx) == PARALLEL)
4225	emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4226      else
4227	emit_move_insn (to_rtx, temp);
4228
4229      preserve_temp_slots (to_rtx);
4230      free_temp_slots ();
4231      pop_temp_slots ();
4232      return want_value ? to_rtx : NULL_RTX;
4233    }
4234
4235  /* In case we are returning the contents of an object which overlaps
4236     the place the value is being stored, use a safe function when copying
4237     a value through a pointer into a structure value return block.  */
4238  if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4239      && current_function_returns_struct
4240      && !current_function_returns_pcc_struct)
4241    {
4242      rtx from_rtx, size;
4243
4244      push_temp_slots ();
4245      size = expr_size (from);
4246      from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4247
4248      if (TARGET_MEM_FUNCTIONS)
4249	emit_library_call (memmove_libfunc, LCT_NORMAL,
4250			   VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4251			   XEXP (from_rtx, 0), Pmode,
4252			   convert_to_mode (TYPE_MODE (sizetype),
4253					    size, TREE_UNSIGNED (sizetype)),
4254			   TYPE_MODE (sizetype));
4255      else
4256        emit_library_call (bcopy_libfunc, LCT_NORMAL,
4257			   VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4258			   XEXP (to_rtx, 0), Pmode,
4259			   convert_to_mode (TYPE_MODE (integer_type_node),
4260					    size,
4261					    TREE_UNSIGNED (integer_type_node)),
4262			   TYPE_MODE (integer_type_node));
4263
4264      preserve_temp_slots (to_rtx);
4265      free_temp_slots ();
4266      pop_temp_slots ();
4267      return want_value ? to_rtx : NULL_RTX;
4268    }
4269
4270  /* Compute FROM and store the value in the rtx we got.  */
4271
4272  push_temp_slots ();
4273  result = store_expr (from, to_rtx, want_value);
4274  preserve_temp_slots (result);
4275  free_temp_slots ();
4276  pop_temp_slots ();
4277  return want_value ? result : NULL_RTX;
4278}
4279
4280/* Generate code for computing expression EXP,
4281   and storing the value into TARGET.
4282   TARGET may contain a QUEUED rtx.
4283
4284   If WANT_VALUE & 1 is nonzero, return a copy of the value
4285   not in TARGET, so that we can be sure to use the proper
4286   value in a containing expression even if TARGET has something
4287   else stored in it.  If possible, we copy the value through a pseudo
4288   and return that pseudo.  Or, if the value is constant, we try to
4289   return the constant.  In some cases, we return a pseudo
4290   copied *from* TARGET.
4291
4292   If the mode is BLKmode then we may return TARGET itself.
4293   It turns out that in BLKmode it doesn't cause a problem.
4294   because C has no operators that could combine two different
4295   assignments into the same BLKmode object with different values
4296   with no sequence point.  Will other languages need this to
4297   be more thorough?
4298
4299   If WANT_VALUE & 1 is 0, we return NULL, to make sure
4300   to catch quickly any cases where the caller uses the value
4301   and fails to set WANT_VALUE.
4302
4303   If WANT_VALUE & 2 is set, this is a store into a call param on the
4304   stack, and block moves may need to be treated specially.  */
4305
4306rtx
4307store_expr (exp, target, want_value)
4308     tree exp;
4309     rtx target;
4310     int want_value;
4311{
4312  rtx temp;
4313  int dont_return_target = 0;
4314  int dont_store_target = 0;
4315
4316  if (VOID_TYPE_P (TREE_TYPE (exp)))
4317    {
4318      /* C++ can generate ?: expressions with a throw expression in one
4319	 branch and an rvalue in the other. Here, we resolve attempts to
4320	 store the throw expression's nonexistant result. */
4321      if (want_value)
4322	abort ();
4323      expand_expr (exp, const0_rtx, VOIDmode, 0);
4324      return NULL_RTX;
4325    }
4326  if (TREE_CODE (exp) == COMPOUND_EXPR)
4327    {
4328      /* Perform first part of compound expression, then assign from second
4329	 part.  */
4330      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4331		   want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4332      emit_queue ();
4333      return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4334    }
4335  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4336    {
4337      /* For conditional expression, get safe form of the target.  Then
4338	 test the condition, doing the appropriate assignment on either
4339	 side.  This avoids the creation of unnecessary temporaries.
4340	 For non-BLKmode, it is more efficient not to do this.  */
4341
4342      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4343
4344      emit_queue ();
4345      target = protect_from_queue (target, 1);
4346
4347      do_pending_stack_adjust ();
4348      NO_DEFER_POP;
4349      jumpifnot (TREE_OPERAND (exp, 0), lab1);
4350      start_cleanup_deferral ();
4351      store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4352      end_cleanup_deferral ();
4353      emit_queue ();
4354      emit_jump_insn (gen_jump (lab2));
4355      emit_barrier ();
4356      emit_label (lab1);
4357      start_cleanup_deferral ();
4358      store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4359      end_cleanup_deferral ();
4360      emit_queue ();
4361      emit_label (lab2);
4362      OK_DEFER_POP;
4363
4364      return want_value & 1 ? target : NULL_RTX;
4365    }
4366  else if (queued_subexp_p (target))
4367    /* If target contains a postincrement, let's not risk
4368       using it as the place to generate the rhs.  */
4369    {
4370      if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4371	{
4372	  /* Expand EXP into a new pseudo.  */
4373	  temp = gen_reg_rtx (GET_MODE (target));
4374	  temp = expand_expr (exp, temp, GET_MODE (target),
4375			      (want_value & 2
4376			       ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4377	}
4378      else
4379	temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4380			    (want_value & 2
4381			     ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4382
4383      /* If target is volatile, ANSI requires accessing the value
4384	 *from* the target, if it is accessed.  So make that happen.
4385	 In no case return the target itself.  */
4386      if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4387	dont_return_target = 1;
4388    }
4389  else if ((want_value & 1) != 0
4390	   && GET_CODE (target) == MEM
4391	   && ! MEM_VOLATILE_P (target)
4392	   && GET_MODE (target) != BLKmode)
4393    /* If target is in memory and caller wants value in a register instead,
4394       arrange that.  Pass TARGET as target for expand_expr so that,
4395       if EXP is another assignment, WANT_VALUE will be nonzero for it.
4396       We know expand_expr will not use the target in that case.
4397       Don't do this if TARGET is volatile because we are supposed
4398       to write it and then read it.  */
4399    {
4400      temp = expand_expr (exp, target, GET_MODE (target),
4401			  want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4402      if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4403	{
4404	  /* If TEMP is already in the desired TARGET, only copy it from
4405	     memory and don't store it there again.  */
4406	  if (temp == target
4407	      || (rtx_equal_p (temp, target)
4408		  && ! side_effects_p (temp) && ! side_effects_p (target)))
4409	    dont_store_target = 1;
4410	  temp = copy_to_reg (temp);
4411	}
4412      dont_return_target = 1;
4413    }
4414  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4415    /* If this is a scalar in a register that is stored in a wider mode
4416       than the declared mode, compute the result into its declared mode
4417       and then convert to the wider mode.  Our value is the computed
4418       expression.  */
4419    {
4420      rtx inner_target = 0;
4421
4422      /* If we don't want a value, we can do the conversion inside EXP,
4423	 which will often result in some optimizations.  Do the conversion
4424	 in two steps: first change the signedness, if needed, then
4425	 the extend.  But don't do this if the type of EXP is a subtype
4426	 of something else since then the conversion might involve
4427	 more than just converting modes.  */
4428      if ((want_value & 1) == 0
4429	  && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4430	  && TREE_TYPE (TREE_TYPE (exp)) == 0)
4431	{
4432	  if (TREE_UNSIGNED (TREE_TYPE (exp))
4433	      != SUBREG_PROMOTED_UNSIGNED_P (target))
4434	    exp = convert
4435	      ((*lang_hooks.types.signed_or_unsigned_type)
4436	       (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4437
4438	  exp = convert ((*lang_hooks.types.type_for_mode)
4439			 (GET_MODE (SUBREG_REG (target)),
4440			  SUBREG_PROMOTED_UNSIGNED_P (target)),
4441			 exp);
4442
4443	  inner_target = SUBREG_REG (target);
4444	}
4445
4446      temp = expand_expr (exp, inner_target, VOIDmode,
4447			  want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4448
4449      /* If TEMP is a volatile MEM and we want a result value, make
4450	 the access now so it gets done only once.  Likewise if
4451	 it contains TARGET.  */
4452      if (GET_CODE (temp) == MEM && (want_value & 1) != 0
4453	  && (MEM_VOLATILE_P (temp)
4454	      || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4455	temp = copy_to_reg (temp);
4456
4457      /* If TEMP is a VOIDmode constant, use convert_modes to make
4458	 sure that we properly convert it.  */
4459      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4460	{
4461	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4462				temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4463	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4464			        GET_MODE (target), temp,
4465			        SUBREG_PROMOTED_UNSIGNED_P (target));
4466	}
4467
4468      convert_move (SUBREG_REG (target), temp,
4469		    SUBREG_PROMOTED_UNSIGNED_P (target));
4470
4471      /* If we promoted a constant, change the mode back down to match
4472	 target.  Otherwise, the caller might get confused by a result whose
4473	 mode is larger than expected.  */
4474
4475      if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4476	{
4477	  if (GET_MODE (temp) != VOIDmode)
4478	    {
4479	      temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4480	      SUBREG_PROMOTED_VAR_P (temp) = 1;
4481	      SUBREG_PROMOTED_UNSIGNED_SET (temp,
4482		SUBREG_PROMOTED_UNSIGNED_P (target));
4483	    }
4484	  else
4485	    temp = convert_modes (GET_MODE (target),
4486				  GET_MODE (SUBREG_REG (target)),
4487				  temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4488	}
4489
4490      return want_value & 1 ? temp : NULL_RTX;
4491    }
4492  else
4493    {
4494      temp = expand_expr (exp, target, GET_MODE (target),
4495			  want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4496      /* Return TARGET if it's a specified hardware register.
4497	 If TARGET is a volatile mem ref, either return TARGET
4498	 or return a reg copied *from* TARGET; ANSI requires this.
4499
4500	 Otherwise, if TEMP is not TARGET, return TEMP
4501	 if it is constant (for efficiency),
4502	 or if we really want the correct value.  */
4503      if (!(target && GET_CODE (target) == REG
4504	    && REGNO (target) < FIRST_PSEUDO_REGISTER)
4505	  && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4506	  && ! rtx_equal_p (temp, target)
4507	  && (CONSTANT_P (temp) || (want_value & 1) != 0))
4508	dont_return_target = 1;
4509    }
4510
4511  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4512     the same as that of TARGET, adjust the constant.  This is needed, for
4513     example, in case it is a CONST_DOUBLE and we want only a word-sized
4514     value.  */
4515  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4516      && TREE_CODE (exp) != ERROR_MARK
4517      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4518    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4519			  temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4520
4521  /* If value was not generated in the target, store it there.
4522     Convert the value to TARGET's type first if necessary.
4523     If TEMP and TARGET compare equal according to rtx_equal_p, but
4524     one or both of them are volatile memory refs, we have to distinguish
4525     two cases:
4526     - expand_expr has used TARGET.  In this case, we must not generate
4527       another copy.  This can be detected by TARGET being equal according
4528       to == .
4529     - expand_expr has not used TARGET - that means that the source just
4530       happens to have the same RTX form.  Since temp will have been created
4531       by expand_expr, it will compare unequal according to == .
4532       We must generate a copy in this case, to reach the correct number
4533       of volatile memory references.  */
4534
4535  if ((! rtx_equal_p (temp, target)
4536       || (temp != target && (side_effects_p (temp)
4537			      || side_effects_p (target))))
4538      && TREE_CODE (exp) != ERROR_MARK
4539      && ! dont_store_target
4540	 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4541	    but TARGET is not valid memory reference, TEMP will differ
4542	    from TARGET although it is really the same location.  */
4543      && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4544	  || target != DECL_RTL_IF_SET (exp))
4545      /* If there's nothing to copy, don't bother.  Don't call expr_size
4546	 unless necessary, because some front-ends (C++) expr_size-hook
4547	 aborts on objects that are not supposed to be bit-copied or
4548	 bit-initialized.  */
4549      && expr_size (exp) != const0_rtx)
4550    {
4551      target = protect_from_queue (target, 1);
4552      if (GET_MODE (temp) != GET_MODE (target)
4553	  && GET_MODE (temp) != VOIDmode)
4554	{
4555	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4556	  if (dont_return_target)
4557	    {
4558	      /* In this case, we will return TEMP,
4559		 so make sure it has the proper mode.
4560		 But don't forget to store the value into TARGET.  */
4561	      temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4562	      emit_move_insn (target, temp);
4563	    }
4564	  else
4565	    convert_move (target, temp, unsignedp);
4566	}
4567
4568      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4569	{
4570	  /* Handle copying a string constant into an array.  The string
4571	     constant may be shorter than the array.  So copy just the string's
4572	     actual length, and clear the rest.  First get the size of the data
4573	     type of the string, which is actually the size of the target.  */
4574	  rtx size = expr_size (exp);
4575
4576	  if (GET_CODE (size) == CONST_INT
4577	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
4578	    emit_block_move (target, temp, size,
4579			     (want_value & 2
4580			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4581	  else
4582	    {
4583	      /* Compute the size of the data to copy from the string.  */
4584	      tree copy_size
4585		= size_binop (MIN_EXPR,
4586			      make_tree (sizetype, size),
4587			      size_int (TREE_STRING_LENGTH (exp)));
4588	      rtx copy_size_rtx
4589		= expand_expr (copy_size, NULL_RTX, VOIDmode,
4590			       (want_value & 2
4591				? EXPAND_STACK_PARM : EXPAND_NORMAL));
4592	      rtx label = 0;
4593
4594	      /* Copy that much.  */
4595	      copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4596					       TREE_UNSIGNED (sizetype));
4597	      emit_block_move (target, temp, copy_size_rtx,
4598			       (want_value & 2
4599				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4600
4601	      /* Figure out how much is left in TARGET that we have to clear.
4602		 Do all calculations in ptr_mode.  */
4603	      if (GET_CODE (copy_size_rtx) == CONST_INT)
4604		{
4605		  size = plus_constant (size, -INTVAL (copy_size_rtx));
4606		  target = adjust_address (target, BLKmode,
4607					   INTVAL (copy_size_rtx));
4608		}
4609	      else
4610		{
4611		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4612				       copy_size_rtx, NULL_RTX, 0,
4613				       OPTAB_LIB_WIDEN);
4614
4615#ifdef POINTERS_EXTEND_UNSIGNED
4616		  if (GET_MODE (copy_size_rtx) != Pmode)
4617		    copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4618						     TREE_UNSIGNED (sizetype));
4619#endif
4620
4621		  target = offset_address (target, copy_size_rtx,
4622					   highest_pow2_factor (copy_size));
4623		  label = gen_label_rtx ();
4624		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4625					   GET_MODE (size), 0, label);
4626		}
4627
4628	      if (size != const0_rtx)
4629		clear_storage (target, size);
4630
4631	      if (label)
4632		emit_label (label);
4633	    }
4634	}
4635      /* Handle calls that return values in multiple non-contiguous locations.
4636	 The Irix 6 ABI has examples of this.  */
4637      else if (GET_CODE (target) == PARALLEL)
4638	emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4639      else if (GET_MODE (temp) == BLKmode)
4640	emit_block_move (target, temp, expr_size (exp),
4641			 (want_value & 2
4642			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4643      else
4644	emit_move_insn (target, temp);
4645    }
4646
4647  /* If we don't want a value, return NULL_RTX.  */
4648  if ((want_value & 1) == 0)
4649    return NULL_RTX;
4650
4651  /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4652     ??? The latter test doesn't seem to make sense.  */
4653  else if (dont_return_target && GET_CODE (temp) != MEM)
4654    return temp;
4655
4656  /* Return TARGET itself if it is a hard register.  */
4657  else if ((want_value & 1) != 0
4658	   && GET_MODE (target) != BLKmode
4659	   && ! (GET_CODE (target) == REG
4660		 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4661    return copy_to_reg (target);
4662
4663  else
4664    return target;
4665}
4666
4667/* Return 1 if EXP just contains zeros.  */
4668
4669static int
4670is_zeros_p (exp)
4671     tree exp;
4672{
4673  tree elt;
4674
4675  switch (TREE_CODE (exp))
4676    {
4677    case CONVERT_EXPR:
4678    case NOP_EXPR:
4679    case NON_LVALUE_EXPR:
4680    case VIEW_CONVERT_EXPR:
4681      return is_zeros_p (TREE_OPERAND (exp, 0));
4682
4683    case INTEGER_CST:
4684      return integer_zerop (exp);
4685
4686    case COMPLEX_CST:
4687      return
4688	is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4689
4690    case REAL_CST:
4691      return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4692
4693    case VECTOR_CST:
4694      for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4695	   elt = TREE_CHAIN (elt))
4696	if (!is_zeros_p (TREE_VALUE (elt)))
4697	  return 0;
4698
4699      return 1;
4700
4701    case CONSTRUCTOR:
4702      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4703	return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4704      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4705	if (! is_zeros_p (TREE_VALUE (elt)))
4706	  return 0;
4707
4708      return 1;
4709
4710    default:
4711      return 0;
4712    }
4713}
4714
4715/* Return 1 if EXP contains mostly (3/4)  zeros.  */
4716
4717static int
4718mostly_zeros_p (exp)
4719     tree exp;
4720{
4721  if (TREE_CODE (exp) == CONSTRUCTOR)
4722    {
4723      int elts = 0, zeros = 0;
4724      tree elt = CONSTRUCTOR_ELTS (exp);
4725      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4726	{
4727	  /* If there are no ranges of true bits, it is all zero.  */
4728	  return elt == NULL_TREE;
4729	}
4730      for (; elt; elt = TREE_CHAIN (elt))
4731	{
4732	  /* We do not handle the case where the index is a RANGE_EXPR,
4733	     so the statistic will be somewhat inaccurate.
4734	     We do make a more accurate count in store_constructor itself,
4735	     so since this function is only used for nested array elements,
4736	     this should be close enough.  */
4737	  if (mostly_zeros_p (TREE_VALUE (elt)))
4738	    zeros++;
4739	  elts++;
4740	}
4741
4742      return 4 * zeros >= 3 * elts;
4743    }
4744
4745  return is_zeros_p (exp);
4746}
4747
4748/* Helper function for store_constructor.
4749   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4750   TYPE is the type of the CONSTRUCTOR, not the element type.
4751   CLEARED is as for store_constructor.
4752   ALIAS_SET is the alias set to use for any stores.
4753
4754   This provides a recursive shortcut back to store_constructor when it isn't
4755   necessary to go through store_field.  This is so that we can pass through
4756   the cleared field to let store_constructor know that we may not have to
4757   clear a substructure if the outer structure has already been cleared.  */
4758
4759static void
4760store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4761			 alias_set)
4762     rtx target;
4763     unsigned HOST_WIDE_INT bitsize;
4764     HOST_WIDE_INT bitpos;
4765     enum machine_mode mode;
4766     tree exp, type;
4767     int cleared;
4768     int alias_set;
4769{
4770  if (TREE_CODE (exp) == CONSTRUCTOR
4771      && bitpos % BITS_PER_UNIT == 0
4772      /* If we have a nonzero bitpos for a register target, then we just
4773	 let store_field do the bitfield handling.  This is unlikely to
4774	 generate unnecessary clear instructions anyways.  */
4775      && (bitpos == 0 || GET_CODE (target) == MEM))
4776    {
4777      if (GET_CODE (target) == MEM)
4778	target
4779	  = adjust_address (target,
4780			    GET_MODE (target) == BLKmode
4781			    || 0 != (bitpos
4782				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
4783			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4784
4785
4786      /* Update the alias set, if required.  */
4787      if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4788	  && MEM_ALIAS_SET (target) != 0)
4789	{
4790	  target = copy_rtx (target);
4791	  set_mem_alias_set (target, alias_set);
4792	}
4793
4794      store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4795    }
4796  else
4797    store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4798		 alias_set);
4799}
4800
4801/* Store the value of constructor EXP into the rtx TARGET.
4802   TARGET is either a REG or a MEM; we know it cannot conflict, since
4803   safe_from_p has been called.
4804   CLEARED is true if TARGET is known to have been zero'd.
4805   SIZE is the number of bytes of TARGET we are allowed to modify: this
4806   may not be the same as the size of EXP if we are assigning to a field
4807   which has been packed to exclude padding bits.  */
4808
4809static void
4810store_constructor (exp, target, cleared, size)
4811     tree exp;
4812     rtx target;
4813     int cleared;
4814     HOST_WIDE_INT size;
4815{
4816  tree type = TREE_TYPE (exp);
4817#ifdef WORD_REGISTER_OPERATIONS
4818  HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4819#endif
4820
4821  if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4822      || TREE_CODE (type) == QUAL_UNION_TYPE)
4823    {
4824      tree elt;
4825
4826      /* If size is zero or the target is already cleared, do nothing.  */
4827      if (size == 0 || cleared)
4828	cleared = 1;
4829      /* We either clear the aggregate or indicate the value is dead.  */
4830      else if ((TREE_CODE (type) == UNION_TYPE
4831		|| TREE_CODE (type) == QUAL_UNION_TYPE)
4832	       && ! CONSTRUCTOR_ELTS (exp))
4833	/* If the constructor is empty, clear the union.  */
4834	{
4835	  clear_storage (target, expr_size (exp));
4836	  cleared = 1;
4837	}
4838
4839      /* If we are building a static constructor into a register,
4840	 set the initial value as zero so we can fold the value into
4841	 a constant.  But if more than one register is involved,
4842	 this probably loses.  */
4843      else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4844	       && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4845	{
4846	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4847	  cleared = 1;
4848	}
4849
4850      /* If the constructor has fewer fields than the structure
4851	 or if we are initializing the structure to mostly zeros,
4852	 clear the whole structure first.  Don't do this if TARGET is a
4853	 register whose mode size isn't equal to SIZE since clear_storage
4854	 can't handle this case.  */
4855      else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4856		|| mostly_zeros_p (exp))
4857	       && (GET_CODE (target) != REG
4858		   || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4859		       == size)))
4860	{
4861	  clear_storage (target, GEN_INT (size));
4862	  cleared = 1;
4863	}
4864
4865      if (! cleared)
4866	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4867
4868      /* Store each element of the constructor into
4869	 the corresponding field of TARGET.  */
4870
4871      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4872	{
4873	  tree field = TREE_PURPOSE (elt);
4874	  tree value = TREE_VALUE (elt);
4875	  enum machine_mode mode;
4876	  HOST_WIDE_INT bitsize;
4877	  HOST_WIDE_INT bitpos = 0;
4878	  int unsignedp;
4879	  tree offset;
4880	  rtx to_rtx = target;
4881
4882	  /* Just ignore missing fields.
4883	     We cleared the whole structure, above,
4884	     if any fields are missing.  */
4885	  if (field == 0)
4886	    continue;
4887
4888	  if (cleared && is_zeros_p (value))
4889	    continue;
4890
4891	  if (host_integerp (DECL_SIZE (field), 1))
4892	    bitsize = tree_low_cst (DECL_SIZE (field), 1);
4893	  else
4894	    bitsize = -1;
4895
4896	  unsignedp = TREE_UNSIGNED (field);
4897	  mode = DECL_MODE (field);
4898	  if (DECL_BIT_FIELD (field))
4899	    mode = VOIDmode;
4900
4901	  offset = DECL_FIELD_OFFSET (field);
4902	  if (host_integerp (offset, 0)
4903	      && host_integerp (bit_position (field), 0))
4904	    {
4905	      bitpos = int_bit_position (field);
4906	      offset = 0;
4907	    }
4908	  else
4909	    bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4910
4911	  if (offset)
4912	    {
4913	      rtx offset_rtx;
4914
4915	      if (contains_placeholder_p (offset))
4916		offset = build (WITH_RECORD_EXPR, sizetype,
4917				offset, make_tree (TREE_TYPE (exp), target));
4918
4919	      offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4920	      if (GET_CODE (to_rtx) != MEM)
4921		abort ();
4922
4923#ifdef POINTERS_EXTEND_UNSIGNED
4924	      if (GET_MODE (offset_rtx) != Pmode)
4925		offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4926#else
4927	      if (GET_MODE (offset_rtx) != ptr_mode)
4928		offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4929#endif
4930
4931	      to_rtx = offset_address (to_rtx, offset_rtx,
4932				       highest_pow2_factor (offset));
4933	    }
4934
4935	  if (TREE_READONLY (field))
4936	    {
4937	      if (GET_CODE (to_rtx) == MEM)
4938		to_rtx = copy_rtx (to_rtx);
4939
4940	      RTX_UNCHANGING_P (to_rtx) = 1;
4941	    }
4942
4943#ifdef WORD_REGISTER_OPERATIONS
4944	  /* If this initializes a field that is smaller than a word, at the
4945	     start of a word, try to widen it to a full word.
4946	     This special case allows us to output C++ member function
4947	     initializations in a form that the optimizers can understand.  */
4948	  if (GET_CODE (target) == REG
4949	      && bitsize < BITS_PER_WORD
4950	      && bitpos % BITS_PER_WORD == 0
4951	      && GET_MODE_CLASS (mode) == MODE_INT
4952	      && TREE_CODE (value) == INTEGER_CST
4953	      && exp_size >= 0
4954	      && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4955	    {
4956	      tree type = TREE_TYPE (value);
4957
4958	      if (TYPE_PRECISION (type) < BITS_PER_WORD)
4959		{
4960		  type = (*lang_hooks.types.type_for_size)
4961		    (BITS_PER_WORD, TREE_UNSIGNED (type));
4962		  value = convert (type, value);
4963		}
4964
4965	      if (BYTES_BIG_ENDIAN)
4966		value
4967		  = fold (build (LSHIFT_EXPR, type, value,
4968				 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4969	      bitsize = BITS_PER_WORD;
4970	      mode = word_mode;
4971	    }
4972#endif
4973
4974	  if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4975	      && DECL_NONADDRESSABLE_P (field))
4976	    {
4977	      to_rtx = copy_rtx (to_rtx);
4978	      MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4979	    }
4980
4981	  store_constructor_field (to_rtx, bitsize, bitpos, mode,
4982				   value, type, cleared,
4983				   get_alias_set (TREE_TYPE (field)));
4984	}
4985    }
4986  else if (TREE_CODE (type) == ARRAY_TYPE
4987	   || TREE_CODE (type) == VECTOR_TYPE)
4988    {
4989      tree elt;
4990      int i;
4991      int need_to_clear;
4992      tree domain = TYPE_DOMAIN (type);
4993      tree elttype = TREE_TYPE (type);
4994      int const_bounds_p;
4995      HOST_WIDE_INT minelt = 0;
4996      HOST_WIDE_INT maxelt = 0;
4997
4998      /* Vectors are like arrays, but the domain is stored via an array
4999	 type indirectly.  */
5000      if (TREE_CODE (type) == VECTOR_TYPE)
5001	{
5002	  /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5003	     the same field as TYPE_DOMAIN, we are not guaranteed that
5004	     it always will.  */
5005	  domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5006	  domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5007	}
5008
5009      const_bounds_p = (TYPE_MIN_VALUE (domain)
5010			&& TYPE_MAX_VALUE (domain)
5011			&& host_integerp (TYPE_MIN_VALUE (domain), 0)
5012			&& host_integerp (TYPE_MAX_VALUE (domain), 0));
5013
5014      /* If we have constant bounds for the range of the type, get them.  */
5015      if (const_bounds_p)
5016	{
5017	  minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5018	  maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5019	}
5020
5021      /* If the constructor has fewer elements than the array,
5022         clear the whole array first.  Similarly if this is
5023         static constructor of a non-BLKmode object.  */
5024      if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5025	need_to_clear = 1;
5026      else
5027	{
5028	  HOST_WIDE_INT count = 0, zero_count = 0;
5029	  need_to_clear = ! const_bounds_p;
5030
5031	  /* This loop is a more accurate version of the loop in
5032	     mostly_zeros_p (it handles RANGE_EXPR in an index).
5033	     It is also needed to check for missing elements.  */
5034	  for (elt = CONSTRUCTOR_ELTS (exp);
5035	       elt != NULL_TREE && ! need_to_clear;
5036	       elt = TREE_CHAIN (elt))
5037	    {
5038	      tree index = TREE_PURPOSE (elt);
5039	      HOST_WIDE_INT this_node_count;
5040
5041	      if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5042		{
5043		  tree lo_index = TREE_OPERAND (index, 0);
5044		  tree hi_index = TREE_OPERAND (index, 1);
5045
5046		  if (! host_integerp (lo_index, 1)
5047		      || ! host_integerp (hi_index, 1))
5048		    {
5049		      need_to_clear = 1;
5050		      break;
5051		    }
5052
5053		  this_node_count = (tree_low_cst (hi_index, 1)
5054				     - tree_low_cst (lo_index, 1) + 1);
5055		}
5056	      else
5057		this_node_count = 1;
5058
5059	      count += this_node_count;
5060	      if (mostly_zeros_p (TREE_VALUE (elt)))
5061		zero_count += this_node_count;
5062	    }
5063
5064	  /* Clear the entire array first if there are any missing elements,
5065	     or if the incidence of zero elements is >= 75%.  */
5066	  if (! need_to_clear
5067	      && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5068	    need_to_clear = 1;
5069	}
5070
5071      if (need_to_clear && size > 0)
5072	{
5073	  if (! cleared)
5074	    {
5075	      if (REG_P (target))
5076		emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
5077	      else
5078		clear_storage (target, GEN_INT (size));
5079	    }
5080	  cleared = 1;
5081	}
5082      else if (REG_P (target))
5083	/* Inform later passes that the old value is dead.  */
5084	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5085
5086      /* Store each element of the constructor into
5087	 the corresponding element of TARGET, determined
5088	 by counting the elements.  */
5089      for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5090	   elt;
5091	   elt = TREE_CHAIN (elt), i++)
5092	{
5093	  enum machine_mode mode;
5094	  HOST_WIDE_INT bitsize;
5095	  HOST_WIDE_INT bitpos;
5096	  int unsignedp;
5097	  tree value = TREE_VALUE (elt);
5098	  tree index = TREE_PURPOSE (elt);
5099	  rtx xtarget = target;
5100
5101	  if (cleared && is_zeros_p (value))
5102	    continue;
5103
5104	  unsignedp = TREE_UNSIGNED (elttype);
5105	  mode = TYPE_MODE (elttype);
5106	  if (mode == BLKmode)
5107	    bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5108		       ? tree_low_cst (TYPE_SIZE (elttype), 1)
5109		       : -1);
5110	  else
5111	    bitsize = GET_MODE_BITSIZE (mode);
5112
5113	  if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5114	    {
5115	      tree lo_index = TREE_OPERAND (index, 0);
5116	      tree hi_index = TREE_OPERAND (index, 1);
5117	      rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
5118	      struct nesting *loop;
5119	      HOST_WIDE_INT lo, hi, count;
5120	      tree position;
5121
5122	      /* If the range is constant and "small", unroll the loop.  */
5123	      if (const_bounds_p
5124		  && host_integerp (lo_index, 0)
5125		  && host_integerp (hi_index, 0)
5126		  && (lo = tree_low_cst (lo_index, 0),
5127		      hi = tree_low_cst (hi_index, 0),
5128		      count = hi - lo + 1,
5129		      (GET_CODE (target) != MEM
5130		       || count <= 2
5131		       || (host_integerp (TYPE_SIZE (elttype), 1)
5132			   && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5133			       <= 40 * 8)))))
5134		{
5135		  lo -= minelt;  hi -= minelt;
5136		  for (; lo <= hi; lo++)
5137		    {
5138		      bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5139
5140		      if (GET_CODE (target) == MEM
5141			  && !MEM_KEEP_ALIAS_SET_P (target)
5142			  && TREE_CODE (type) == ARRAY_TYPE
5143			  && TYPE_NONALIASED_COMPONENT (type))
5144			{
5145			  target = copy_rtx (target);
5146			  MEM_KEEP_ALIAS_SET_P (target) = 1;
5147			}
5148
5149		      store_constructor_field
5150			(target, bitsize, bitpos, mode, value, type, cleared,
5151			 get_alias_set (elttype));
5152		    }
5153		}
5154	      else
5155		{
5156		  hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5157		  loop_top = gen_label_rtx ();
5158		  loop_end = gen_label_rtx ();
5159
5160		  unsignedp = TREE_UNSIGNED (domain);
5161
5162		  index = build_decl (VAR_DECL, NULL_TREE, domain);
5163
5164		  index_r
5165		    = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5166						 &unsignedp, 0));
5167		  SET_DECL_RTL (index, index_r);
5168		  if (TREE_CODE (value) == SAVE_EXPR
5169		      && SAVE_EXPR_RTL (value) == 0)
5170		    {
5171		      /* Make sure value gets expanded once before the
5172                         loop.  */
5173		      expand_expr (value, const0_rtx, VOIDmode, 0);
5174		      emit_queue ();
5175		    }
5176		  store_expr (lo_index, index_r, 0);
5177		  loop = expand_start_loop (0);
5178
5179		  /* Assign value to element index.  */
5180		  position
5181		    = convert (ssizetype,
5182			       fold (build (MINUS_EXPR, TREE_TYPE (index),
5183					    index, TYPE_MIN_VALUE (domain))));
5184		  position = size_binop (MULT_EXPR, position,
5185					 convert (ssizetype,
5186						  TYPE_SIZE_UNIT (elttype)));
5187
5188		  pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5189		  xtarget = offset_address (target, pos_rtx,
5190					    highest_pow2_factor (position));
5191		  xtarget = adjust_address (xtarget, mode, 0);
5192		  if (TREE_CODE (value) == CONSTRUCTOR)
5193		    store_constructor (value, xtarget, cleared,
5194				       bitsize / BITS_PER_UNIT);
5195		  else
5196		    store_expr (value, xtarget, 0);
5197
5198		  expand_exit_loop_if_false (loop,
5199					     build (LT_EXPR, integer_type_node,
5200						    index, hi_index));
5201
5202		  expand_increment (build (PREINCREMENT_EXPR,
5203					   TREE_TYPE (index),
5204					   index, integer_one_node), 0, 0);
5205		  expand_end_loop ();
5206		  emit_label (loop_end);
5207		}
5208	    }
5209	  else if ((index != 0 && ! host_integerp (index, 0))
5210		   || ! host_integerp (TYPE_SIZE (elttype), 1))
5211	    {
5212	      tree position;
5213
5214	      if (index == 0)
5215		index = ssize_int (1);
5216
5217	      if (minelt)
5218		index = convert (ssizetype,
5219				 fold (build (MINUS_EXPR, index,
5220					      TYPE_MIN_VALUE (domain))));
5221
5222	      position = size_binop (MULT_EXPR, index,
5223				     convert (ssizetype,
5224					      TYPE_SIZE_UNIT (elttype)));
5225	      xtarget = offset_address (target,
5226					expand_expr (position, 0, VOIDmode, 0),
5227					highest_pow2_factor (position));
5228	      xtarget = adjust_address (xtarget, mode, 0);
5229	      store_expr (value, xtarget, 0);
5230	    }
5231	  else
5232	    {
5233	      if (index != 0)
5234		bitpos = ((tree_low_cst (index, 0) - minelt)
5235			  * tree_low_cst (TYPE_SIZE (elttype), 1));
5236	      else
5237		bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5238
5239	      if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5240		  && TREE_CODE (type) == ARRAY_TYPE
5241		  && TYPE_NONALIASED_COMPONENT (type))
5242		{
5243		  target = copy_rtx (target);
5244		  MEM_KEEP_ALIAS_SET_P (target) = 1;
5245		}
5246
5247	      store_constructor_field (target, bitsize, bitpos, mode, value,
5248				       type, cleared, get_alias_set (elttype));
5249
5250	    }
5251	}
5252    }
5253
5254  /* Set constructor assignments.  */
5255  else if (TREE_CODE (type) == SET_TYPE)
5256    {
5257      tree elt = CONSTRUCTOR_ELTS (exp);
5258      unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5259      tree domain = TYPE_DOMAIN (type);
5260      tree domain_min, domain_max, bitlength;
5261
5262      /* The default implementation strategy is to extract the constant
5263	 parts of the constructor, use that to initialize the target,
5264	 and then "or" in whatever non-constant ranges we need in addition.
5265
5266	 If a large set is all zero or all ones, it is
5267	 probably better to set it using memset (if available) or bzero.
5268	 Also, if a large set has just a single range, it may also be
5269	 better to first clear all the first clear the set (using
5270	 bzero/memset), and set the bits we want.  */
5271
5272      /* Check for all zeros.  */
5273      if (elt == NULL_TREE && size > 0)
5274	{
5275	  if (!cleared)
5276	    clear_storage (target, GEN_INT (size));
5277	  return;
5278	}
5279
5280      domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5281      domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5282      bitlength = size_binop (PLUS_EXPR,
5283			      size_diffop (domain_max, domain_min),
5284			      ssize_int (1));
5285
5286      nbits = tree_low_cst (bitlength, 1);
5287
5288      /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5289	 are "complicated" (more than one range), initialize (the
5290	 constant parts) by copying from a constant.  */
5291      if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5292	  || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5293	{
5294	  unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5295	  enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5296	  char *bit_buffer = (char *) alloca (nbits);
5297	  HOST_WIDE_INT word = 0;
5298	  unsigned int bit_pos = 0;
5299	  unsigned int ibit = 0;
5300	  unsigned int offset = 0;  /* In bytes from beginning of set.  */
5301
5302	  elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5303	  for (;;)
5304	    {
5305	      if (bit_buffer[ibit])
5306		{
5307		  if (BYTES_BIG_ENDIAN)
5308		    word |= (1 << (set_word_size - 1 - bit_pos));
5309		  else
5310		    word |= 1 << bit_pos;
5311		}
5312
5313	      bit_pos++;  ibit++;
5314	      if (bit_pos >= set_word_size || ibit == nbits)
5315		{
5316		  if (word != 0 || ! cleared)
5317		    {
5318		      rtx datum = GEN_INT (word);
5319		      rtx to_rtx;
5320
5321		      /* The assumption here is that it is safe to use
5322			 XEXP if the set is multi-word, but not if
5323			 it's single-word.  */
5324		      if (GET_CODE (target) == MEM)
5325			to_rtx = adjust_address (target, mode, offset);
5326		      else if (offset == 0)
5327			to_rtx = target;
5328		      else
5329			abort ();
5330		      emit_move_insn (to_rtx, datum);
5331		    }
5332
5333		  if (ibit == nbits)
5334		    break;
5335		  word = 0;
5336		  bit_pos = 0;
5337		  offset += set_word_size / BITS_PER_UNIT;
5338		}
5339	    }
5340	}
5341      else if (!cleared)
5342	/* Don't bother clearing storage if the set is all ones.  */
5343	if (TREE_CHAIN (elt) != NULL_TREE
5344	    || (TREE_PURPOSE (elt) == NULL_TREE
5345		? nbits != 1
5346		: ( ! host_integerp (TREE_VALUE (elt), 0)
5347		   || ! host_integerp (TREE_PURPOSE (elt), 0)
5348		   || (tree_low_cst (TREE_VALUE (elt), 0)
5349		       - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5350		       != (HOST_WIDE_INT) nbits))))
5351	  clear_storage (target, expr_size (exp));
5352
5353      for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5354	{
5355	  /* Start of range of element or NULL.  */
5356	  tree startbit = TREE_PURPOSE (elt);
5357	  /* End of range of element, or element value.  */
5358	  tree endbit   = TREE_VALUE (elt);
5359	  HOST_WIDE_INT startb, endb;
5360	  rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5361
5362	  bitlength_rtx = expand_expr (bitlength,
5363				       NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5364
5365	  /* Handle non-range tuple element like [ expr ].  */
5366	  if (startbit == NULL_TREE)
5367	    {
5368	      startbit = save_expr (endbit);
5369	      endbit = startbit;
5370	    }
5371
5372	  startbit = convert (sizetype, startbit);
5373	  endbit = convert (sizetype, endbit);
5374	  if (! integer_zerop (domain_min))
5375	    {
5376	      startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5377	      endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5378	    }
5379	  startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5380				      EXPAND_CONST_ADDRESS);
5381	  endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5382				    EXPAND_CONST_ADDRESS);
5383
5384	  if (REG_P (target))
5385	    {
5386	      targetx
5387		= assign_temp
5388		  ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5389					  (GET_MODE (target), 0),
5390					  TYPE_QUAL_CONST)),
5391		   0, 1, 1);
5392	      emit_move_insn (targetx, target);
5393	    }
5394
5395	  else if (GET_CODE (target) == MEM)
5396	    targetx = target;
5397	  else
5398	    abort ();
5399
5400	  /* Optimization:  If startbit and endbit are constants divisible
5401	     by BITS_PER_UNIT, call memset instead.  */
5402	  if (TARGET_MEM_FUNCTIONS
5403	      && TREE_CODE (startbit) == INTEGER_CST
5404	      && TREE_CODE (endbit) == INTEGER_CST
5405	      && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5406	      && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5407	    {
5408	      emit_library_call (memset_libfunc, LCT_NORMAL,
5409				 VOIDmode, 3,
5410				 plus_constant (XEXP (targetx, 0),
5411						startb / BITS_PER_UNIT),
5412				 Pmode,
5413				 constm1_rtx, TYPE_MODE (integer_type_node),
5414				 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5415				 TYPE_MODE (sizetype));
5416	    }
5417	  else
5418	    emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5419			       LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5420			       Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5421			       startbit_rtx, TYPE_MODE (sizetype),
5422			       endbit_rtx, TYPE_MODE (sizetype));
5423
5424	  if (REG_P (target))
5425	    emit_move_insn (target, targetx);
5426	}
5427    }
5428
5429  else
5430    abort ();
5431}
5432
5433/* Store the value of EXP (an expression tree)
5434   into a subfield of TARGET which has mode MODE and occupies
5435   BITSIZE bits, starting BITPOS bits from the start of TARGET.
5436   If MODE is VOIDmode, it means that we are storing into a bit-field.
5437
5438   If VALUE_MODE is VOIDmode, return nothing in particular.
5439   UNSIGNEDP is not used in this case.
5440
5441   Otherwise, return an rtx for the value stored.  This rtx
5442   has mode VALUE_MODE if that is convenient to do.
5443   In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5444
5445   TYPE is the type of the underlying object,
5446
5447   ALIAS_SET is the alias set for the destination.  This value will
5448   (in general) be different from that for TARGET, since TARGET is a
5449   reference to the containing structure.  */
5450
5451static rtx
5452store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5453	     alias_set)
5454     rtx target;
5455     HOST_WIDE_INT bitsize;
5456     HOST_WIDE_INT bitpos;
5457     enum machine_mode mode;
5458     tree exp;
5459     enum machine_mode value_mode;
5460     int unsignedp;
5461     tree type;
5462     int alias_set;
5463{
5464  HOST_WIDE_INT width_mask = 0;
5465
5466  if (TREE_CODE (exp) == ERROR_MARK)
5467    return const0_rtx;
5468
5469  /* If we have nothing to store, do nothing unless the expression has
5470     side-effects.  */
5471  if (bitsize == 0)
5472    return expand_expr (exp, const0_rtx, VOIDmode, 0);
5473  else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5474    width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5475
5476  /* If we are storing into an unaligned field of an aligned union that is
5477     in a register, we may have the mode of TARGET being an integer mode but
5478     MODE == BLKmode.  In that case, get an aligned object whose size and
5479     alignment are the same as TARGET and store TARGET into it (we can avoid
5480     the store if the field being stored is the entire width of TARGET).  Then
5481     call ourselves recursively to store the field into a BLKmode version of
5482     that object.  Finally, load from the object into TARGET.  This is not
5483     very efficient in general, but should only be slightly more expensive
5484     than the otherwise-required unaligned accesses.  Perhaps this can be
5485     cleaned up later.  */
5486
5487  if (mode == BLKmode
5488      && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5489    {
5490      rtx object
5491	= assign_temp
5492	  (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5493	   0, 1, 1);
5494      rtx blk_object = adjust_address (object, BLKmode, 0);
5495
5496      if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5497	emit_move_insn (object, target);
5498
5499      store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5500		   alias_set);
5501
5502      emit_move_insn (target, object);
5503
5504      /* We want to return the BLKmode version of the data.  */
5505      return blk_object;
5506    }
5507
5508  if (GET_CODE (target) == CONCAT)
5509    {
5510      /* We're storing into a struct containing a single __complex.  */
5511
5512      if (bitpos != 0)
5513	abort ();
5514      return store_expr (exp, target, 0);
5515    }
5516
5517  /* If the structure is in a register or if the component
5518     is a bit field, we cannot use addressing to access it.
5519     Use bit-field techniques or SUBREG to store in it.  */
5520
5521  if (mode == VOIDmode
5522      || (mode != BLKmode && ! direct_store[(int) mode]
5523	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5524	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5525      || GET_CODE (target) == REG
5526      || GET_CODE (target) == SUBREG
5527      /* If the field isn't aligned enough to store as an ordinary memref,
5528	 store it as a bit field.  */
5529      || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5530	  && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5531	      || bitpos % GET_MODE_ALIGNMENT (mode)))
5532      /* If the RHS and field are a constant size and the size of the
5533	 RHS isn't the same size as the bitfield, we must use bitfield
5534	 operations.  */
5535      || (bitsize >= 0
5536	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5537	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5538    {
5539      rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5540
5541      /* If BITSIZE is narrower than the size of the type of EXP
5542	 we will be narrowing TEMP.  Normally, what's wanted are the
5543	 low-order bits.  However, if EXP's type is a record and this is
5544	 big-endian machine, we want the upper BITSIZE bits.  */
5545      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5546	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5547	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5548	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5549			     size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5550				       - bitsize),
5551			     temp, 1);
5552
5553      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5554	 MODE.  */
5555      if (mode != VOIDmode && mode != BLKmode
5556	  && mode != TYPE_MODE (TREE_TYPE (exp)))
5557	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5558
5559      /* If the modes of TARGET and TEMP are both BLKmode, both
5560	 must be in memory and BITPOS must be aligned on a byte
5561	 boundary.  If so, we simply do a block copy.  */
5562      if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5563	{
5564	  if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5565	      || bitpos % BITS_PER_UNIT != 0)
5566	    abort ();
5567
5568	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5569	  emit_block_move (target, temp,
5570			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5571				    / BITS_PER_UNIT),
5572			   BLOCK_OP_NORMAL);
5573
5574	  return value_mode == VOIDmode ? const0_rtx : target;
5575	}
5576
5577      /* Store the value in the bitfield.  */
5578      store_bit_field (target, bitsize, bitpos, mode, temp,
5579		       int_size_in_bytes (type));
5580
5581      if (value_mode != VOIDmode)
5582	{
5583	  /* The caller wants an rtx for the value.
5584	     If possible, avoid refetching from the bitfield itself.  */
5585	  if (width_mask != 0
5586	      && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5587	    {
5588	      tree count;
5589	      enum machine_mode tmode;
5590
5591	      tmode = GET_MODE (temp);
5592	      if (tmode == VOIDmode)
5593		tmode = value_mode;
5594
5595	      if (unsignedp)
5596		return expand_and (tmode, temp,
5597				   gen_int_mode (width_mask, tmode),
5598				   NULL_RTX);
5599
5600	      count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5601	      temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5602	      return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5603	    }
5604
5605	  return extract_bit_field (target, bitsize, bitpos, unsignedp,
5606				    NULL_RTX, value_mode, VOIDmode,
5607				    int_size_in_bytes (type));
5608	}
5609      return const0_rtx;
5610    }
5611  else
5612    {
5613      rtx addr = XEXP (target, 0);
5614      rtx to_rtx = target;
5615
5616      /* If a value is wanted, it must be the lhs;
5617	 so make the address stable for multiple use.  */
5618
5619      if (value_mode != VOIDmode && GET_CODE (addr) != REG
5620	  && ! CONSTANT_ADDRESS_P (addr)
5621	  /* A frame-pointer reference is already stable.  */
5622	  && ! (GET_CODE (addr) == PLUS
5623		&& GET_CODE (XEXP (addr, 1)) == CONST_INT
5624		&& (XEXP (addr, 0) == virtual_incoming_args_rtx
5625		    || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5626	to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5627
5628      /* Now build a reference to just the desired component.  */
5629
5630      to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5631
5632      if (to_rtx == target)
5633	to_rtx = copy_rtx (to_rtx);
5634
5635      MEM_SET_IN_STRUCT_P (to_rtx, 1);
5636      if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5637	set_mem_alias_set (to_rtx, alias_set);
5638
5639      return store_expr (exp, to_rtx, value_mode != VOIDmode);
5640    }
5641}
5642
5643/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5644   an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5645   codes and find the ultimate containing object, which we return.
5646
5647   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5648   bit position, and *PUNSIGNEDP to the signedness of the field.
5649   If the position of the field is variable, we store a tree
5650   giving the variable offset (in units) in *POFFSET.
5651   This offset is in addition to the bit position.
5652   If the position is not variable, we store 0 in *POFFSET.
5653
5654   If any of the extraction expressions is volatile,
5655   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
5656
5657   If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
5658   is a mode that can be used to access the field.  In that case, *PBITSIZE
5659   is redundant.
5660
5661   If the field describes a variable-sized object, *PMODE is set to
5662   VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
5663   this case, but the address of the object can be found.  */
5664
5665tree
5666get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5667		     punsignedp, pvolatilep)
5668     tree exp;
5669     HOST_WIDE_INT *pbitsize;
5670     HOST_WIDE_INT *pbitpos;
5671     tree *poffset;
5672     enum machine_mode *pmode;
5673     int *punsignedp;
5674     int *pvolatilep;
5675{
5676  tree size_tree = 0;
5677  enum machine_mode mode = VOIDmode;
5678  tree offset = size_zero_node;
5679  tree bit_offset = bitsize_zero_node;
5680  tree placeholder_ptr = 0;
5681  tree tem;
5682
5683  /* First get the mode, signedness, and size.  We do this from just the
5684     outermost expression.  */
5685  if (TREE_CODE (exp) == COMPONENT_REF)
5686    {
5687      size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5688      if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5689	mode = DECL_MODE (TREE_OPERAND (exp, 1));
5690
5691      *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5692    }
5693  else if (TREE_CODE (exp) == BIT_FIELD_REF)
5694    {
5695      size_tree = TREE_OPERAND (exp, 1);
5696      *punsignedp = TREE_UNSIGNED (exp);
5697    }
5698  else
5699    {
5700      mode = TYPE_MODE (TREE_TYPE (exp));
5701      *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5702
5703      if (mode == BLKmode)
5704	size_tree = TYPE_SIZE (TREE_TYPE (exp));
5705      else
5706	*pbitsize = GET_MODE_BITSIZE (mode);
5707    }
5708
5709  if (size_tree != 0)
5710    {
5711      if (! host_integerp (size_tree, 1))
5712	mode = BLKmode, *pbitsize = -1;
5713      else
5714	*pbitsize = tree_low_cst (size_tree, 1);
5715    }
5716
5717  /* Compute cumulative bit-offset for nested component-refs and array-refs,
5718     and find the ultimate containing object.  */
5719  while (1)
5720    {
5721      if (TREE_CODE (exp) == BIT_FIELD_REF)
5722	bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5723      else if (TREE_CODE (exp) == COMPONENT_REF)
5724	{
5725	  tree field = TREE_OPERAND (exp, 1);
5726	  tree this_offset = DECL_FIELD_OFFSET (field);
5727
5728	  /* If this field hasn't been filled in yet, don't go
5729	     past it.  This should only happen when folding expressions
5730	     made during type construction.  */
5731	  if (this_offset == 0)
5732	    break;
5733	  else if (! TREE_CONSTANT (this_offset)
5734		   && contains_placeholder_p (this_offset))
5735	    this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5736
5737	  offset = size_binop (PLUS_EXPR, offset, this_offset);
5738	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
5739				   DECL_FIELD_BIT_OFFSET (field));
5740
5741	  /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
5742	}
5743
5744      else if (TREE_CODE (exp) == ARRAY_REF
5745	       || TREE_CODE (exp) == ARRAY_RANGE_REF)
5746	{
5747	  tree index = TREE_OPERAND (exp, 1);
5748	  tree array = TREE_OPERAND (exp, 0);
5749	  tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5750	  tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5751	  tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5752
5753	  /* We assume all arrays have sizes that are a multiple of a byte.
5754	     First subtract the lower bound, if any, in the type of the
5755	     index, then convert to sizetype and multiply by the size of the
5756	     array element.  */
5757	  if (low_bound != 0 && ! integer_zerop (low_bound))
5758	    index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5759				 index, low_bound));
5760
5761	  /* If the index has a self-referential type, pass it to a
5762	     WITH_RECORD_EXPR; if the component size is, pass our
5763	     component to one.  */
5764	  if (! TREE_CONSTANT (index)
5765	      && contains_placeholder_p (index))
5766	    index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5767	  if (! TREE_CONSTANT (unit_size)
5768	      && contains_placeholder_p (unit_size))
5769	    unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5770
5771	  offset = size_binop (PLUS_EXPR, offset,
5772			       size_binop (MULT_EXPR,
5773					   convert (sizetype, index),
5774					   unit_size));
5775	}
5776
5777      else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5778	{
5779	  tree new = find_placeholder (exp, &placeholder_ptr);
5780
5781	  /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5782	     We might have been called from tree optimization where we
5783	     haven't set up an object yet.  */
5784	  if (new == 0)
5785	    break;
5786	  else
5787	    exp = new;
5788
5789	  continue;
5790	}
5791      else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5792	       && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5793	       && ! ((TREE_CODE (exp) == NOP_EXPR
5794		      || TREE_CODE (exp) == CONVERT_EXPR)
5795		     && (TYPE_MODE (TREE_TYPE (exp))
5796			 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5797	break;
5798
5799      /* If any reference in the chain is volatile, the effect is volatile.  */
5800      if (TREE_THIS_VOLATILE (exp))
5801	*pvolatilep = 1;
5802
5803      exp = TREE_OPERAND (exp, 0);
5804    }
5805
5806  /* If OFFSET is constant, see if we can return the whole thing as a
5807     constant bit position.  Otherwise, split it up.  */
5808  if (host_integerp (offset, 0)
5809      && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5810				 bitsize_unit_node))
5811      && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5812      && host_integerp (tem, 0))
5813    *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5814  else
5815    *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5816
5817  *pmode = mode;
5818  return exp;
5819}
5820
5821/* Return 1 if T is an expression that get_inner_reference handles.  */
5822
5823int
5824handled_component_p (t)
5825     tree t;
5826{
5827  switch (TREE_CODE (t))
5828    {
5829    case BIT_FIELD_REF:
5830    case COMPONENT_REF:
5831    case ARRAY_REF:
5832    case ARRAY_RANGE_REF:
5833    case NON_LVALUE_EXPR:
5834    case VIEW_CONVERT_EXPR:
5835      return 1;
5836
5837    case NOP_EXPR:
5838    case CONVERT_EXPR:
5839      return (TYPE_MODE (TREE_TYPE (t))
5840	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5841
5842    default:
5843      return 0;
5844    }
5845}
5846
5847/* Given an rtx VALUE that may contain additions and multiplications, return
5848   an equivalent value that just refers to a register, memory, or constant.
5849   This is done by generating instructions to perform the arithmetic and
5850   returning a pseudo-register containing the value.
5851
5852   The returned value may be a REG, SUBREG, MEM or constant.  */
5853
5854rtx
5855force_operand (value, target)
5856     rtx value, target;
5857{
5858  rtx op1, op2;
5859  /* Use subtarget as the target for operand 0 of a binary operation.  */
5860  rtx subtarget = get_subtarget (target);
5861  enum rtx_code code = GET_CODE (value);
5862
5863  /* Check for a PIC address load.  */
5864  if ((code == PLUS || code == MINUS)
5865      && XEXP (value, 0) == pic_offset_table_rtx
5866      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5867	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
5868	  || GET_CODE (XEXP (value, 1)) == CONST))
5869    {
5870      if (!subtarget)
5871	subtarget = gen_reg_rtx (GET_MODE (value));
5872      emit_move_insn (subtarget, value);
5873      return subtarget;
5874    }
5875
5876  if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5877    {
5878      if (!target)
5879	target = gen_reg_rtx (GET_MODE (value));
5880      convert_move (target, force_operand (XEXP (value, 0), NULL),
5881		    code == ZERO_EXTEND);
5882      return target;
5883    }
5884
5885  if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5886    {
5887      op2 = XEXP (value, 1);
5888      if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5889	subtarget = 0;
5890      if (code == MINUS && GET_CODE (op2) == CONST_INT)
5891	{
5892	  code = PLUS;
5893	  op2 = negate_rtx (GET_MODE (value), op2);
5894	}
5895
5896      /* Check for an addition with OP2 a constant integer and our first
5897         operand a PLUS of a virtual register and something else.  In that
5898         case, we want to emit the sum of the virtual register and the
5899         constant first and then add the other value.  This allows virtual
5900         register instantiation to simply modify the constant rather than
5901         creating another one around this addition.  */
5902      if (code == PLUS && GET_CODE (op2) == CONST_INT
5903	  && GET_CODE (XEXP (value, 0)) == PLUS
5904	  && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5905	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5906	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5907	{
5908	  rtx temp = expand_simple_binop (GET_MODE (value), code,
5909					  XEXP (XEXP (value, 0), 0), op2,
5910					  subtarget, 0, OPTAB_LIB_WIDEN);
5911	  return expand_simple_binop (GET_MODE (value), code, temp,
5912				      force_operand (XEXP (XEXP (value,
5913								 0), 1), 0),
5914				      target, 0, OPTAB_LIB_WIDEN);
5915	}
5916
5917      op1 = force_operand (XEXP (value, 0), subtarget);
5918      op2 = force_operand (op2, NULL_RTX);
5919      switch (code)
5920	{
5921	case MULT:
5922	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
5923	case DIV:
5924	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
5925	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
5926					target, 1, OPTAB_LIB_WIDEN);
5927	  else
5928	    return expand_divmod (0,
5929				  FLOAT_MODE_P (GET_MODE (value))
5930				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
5931				  GET_MODE (value), op1, op2, target, 0);
5932	  break;
5933	case MOD:
5934	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5935				target, 0);
5936	  break;
5937	case UDIV:
5938	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5939				target, 1);
5940	  break;
5941	case UMOD:
5942	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5943				target, 1);
5944	  break;
5945	case ASHIFTRT:
5946	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
5947				      target, 0, OPTAB_LIB_WIDEN);
5948	  break;
5949	default:
5950	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
5951				      target, 1, OPTAB_LIB_WIDEN);
5952	}
5953    }
5954  if (GET_RTX_CLASS (code) == '1')
5955    {
5956      op1 = force_operand (XEXP (value, 0), NULL_RTX);
5957      return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5958    }
5959
5960#ifdef INSN_SCHEDULING
5961  /* On machines that have insn scheduling, we want all memory reference to be
5962     explicit, so we need to deal with such paradoxical SUBREGs.  */
5963  if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5964      && (GET_MODE_SIZE (GET_MODE (value))
5965	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5966    value
5967      = simplify_gen_subreg (GET_MODE (value),
5968			     force_reg (GET_MODE (SUBREG_REG (value)),
5969					force_operand (SUBREG_REG (value),
5970						       NULL_RTX)),
5971			     GET_MODE (SUBREG_REG (value)),
5972			     SUBREG_BYTE (value));
5973#endif
5974
5975  return value;
5976}
5977
5978/* Subroutine of expand_expr: return nonzero iff there is no way that
5979   EXP can reference X, which is being modified.  TOP_P is nonzero if this
5980   call is going to be used to determine whether we need a temporary
5981   for EXP, as opposed to a recursive call to this function.
5982
5983   It is always safe for this routine to return zero since it merely
5984   searches for optimization opportunities.  */
5985
5986int
5987safe_from_p (x, exp, top_p)
5988     rtx x;
5989     tree exp;
5990     int top_p;
5991{
5992  rtx exp_rtl = 0;
5993  int i, nops;
5994  static tree save_expr_list;
5995
5996  if (x == 0
5997      /* If EXP has varying size, we MUST use a target since we currently
5998	 have no way of allocating temporaries of variable size
5999	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6000	 So we assume here that something at a higher level has prevented a
6001	 clash.  This is somewhat bogus, but the best we can do.  Only
6002	 do this when X is BLKmode and when we are at the top level.  */
6003      || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6004	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6005	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6006	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6007	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6008	      != INTEGER_CST)
6009	  && GET_MODE (x) == BLKmode)
6010      /* If X is in the outgoing argument area, it is always safe.  */
6011      || (GET_CODE (x) == MEM
6012	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
6013	      || (GET_CODE (XEXP (x, 0)) == PLUS
6014		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6015    return 1;
6016
6017  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6018     find the underlying pseudo.  */
6019  if (GET_CODE (x) == SUBREG)
6020    {
6021      x = SUBREG_REG (x);
6022      if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6023	return 0;
6024    }
6025
6026  /* A SAVE_EXPR might appear many times in the expression passed to the
6027     top-level safe_from_p call, and if it has a complex subexpression,
6028     examining it multiple times could result in a combinatorial explosion.
6029     E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6030     with optimization took about 28 minutes to compile -- even though it was
6031     only a few lines long.  So we mark each SAVE_EXPR we see with TREE_PRIVATE
6032     and turn that off when we are done.  We keep a list of the SAVE_EXPRs
6033     we have processed.  Note that the only test of top_p was above.  */
6034
6035  if (top_p)
6036    {
6037      int rtn;
6038      tree t;
6039
6040      save_expr_list = 0;
6041
6042      rtn = safe_from_p (x, exp, 0);
6043
6044      for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6045	TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6046
6047      return rtn;
6048    }
6049
6050  /* Now look at our tree code and possibly recurse.  */
6051  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6052    {
6053    case 'd':
6054      exp_rtl = DECL_RTL_IF_SET (exp);
6055      break;
6056
6057    case 'c':
6058      return 1;
6059
6060    case 'x':
6061      if (TREE_CODE (exp) == TREE_LIST)
6062	{
6063	  while (1)
6064	    {
6065	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6066		return 0;
6067	      exp = TREE_CHAIN (exp);
6068	      if (!exp)
6069		return 1;
6070	      if (TREE_CODE (exp) != TREE_LIST)
6071		return safe_from_p (x, exp, 0);
6072	    }
6073	}
6074      else if (TREE_CODE (exp) == ERROR_MARK)
6075	return 1;	/* An already-visited SAVE_EXPR? */
6076      else
6077	return 0;
6078
6079    case '2':
6080    case '<':
6081      if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6082	return 0;
6083      /* FALLTHRU */
6084
6085    case '1':
6086      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6087
6088    case 'e':
6089    case 'r':
6090      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
6091	 the expression.  If it is set, we conflict iff we are that rtx or
6092	 both are in memory.  Otherwise, we check all operands of the
6093	 expression recursively.  */
6094
6095      switch (TREE_CODE (exp))
6096	{
6097	case ADDR_EXPR:
6098	  /* If the operand is static or we are static, we can't conflict.
6099	     Likewise if we don't conflict with the operand at all.  */
6100	  if (staticp (TREE_OPERAND (exp, 0))
6101	      || TREE_STATIC (exp)
6102	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6103	    return 1;
6104
6105	  /* Otherwise, the only way this can conflict is if we are taking
6106	     the address of a DECL a that address if part of X, which is
6107	     very rare.  */
6108	  exp = TREE_OPERAND (exp, 0);
6109	  if (DECL_P (exp))
6110	    {
6111	      if (!DECL_RTL_SET_P (exp)
6112		  || GET_CODE (DECL_RTL (exp)) != MEM)
6113		return 0;
6114	      else
6115		exp_rtl = XEXP (DECL_RTL (exp), 0);
6116	    }
6117	  break;
6118
6119	case INDIRECT_REF:
6120	  if (GET_CODE (x) == MEM
6121	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6122					get_alias_set (exp)))
6123	    return 0;
6124	  break;
6125
6126	case CALL_EXPR:
6127	  /* Assume that the call will clobber all hard registers and
6128	     all of memory.  */
6129	  if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6130	      || GET_CODE (x) == MEM)
6131	    return 0;
6132	  break;
6133
6134	case RTL_EXPR:
6135	  /* If a sequence exists, we would have to scan every instruction
6136	     in the sequence to see if it was safe.  This is probably not
6137	     worthwhile.  */
6138	  if (RTL_EXPR_SEQUENCE (exp))
6139	    return 0;
6140
6141	  exp_rtl = RTL_EXPR_RTL (exp);
6142	  break;
6143
6144	case WITH_CLEANUP_EXPR:
6145	  exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6146	  break;
6147
6148	case CLEANUP_POINT_EXPR:
6149	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6150
6151	case SAVE_EXPR:
6152	  exp_rtl = SAVE_EXPR_RTL (exp);
6153	  if (exp_rtl)
6154	    break;
6155
6156	  /* If we've already scanned this, don't do it again.  Otherwise,
6157	     show we've scanned it and record for clearing the flag if we're
6158	     going on.  */
6159	  if (TREE_PRIVATE (exp))
6160	    return 1;
6161
6162	  TREE_PRIVATE (exp) = 1;
6163	  if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6164	    {
6165	      TREE_PRIVATE (exp) = 0;
6166	      return 0;
6167	    }
6168
6169	  save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6170	  return 1;
6171
6172	case BIND_EXPR:
6173	  /* The only operand we look at is operand 1.  The rest aren't
6174	     part of the expression.  */
6175	  return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6176
6177	case METHOD_CALL_EXPR:
6178	  /* This takes an rtx argument, but shouldn't appear here.  */
6179	  abort ();
6180
6181	default:
6182	  break;
6183	}
6184
6185      /* If we have an rtx, we do not need to scan our operands.  */
6186      if (exp_rtl)
6187	break;
6188
6189      nops = first_rtl_op (TREE_CODE (exp));
6190      for (i = 0; i < nops; i++)
6191	if (TREE_OPERAND (exp, i) != 0
6192	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6193	  return 0;
6194
6195      /* If this is a language-specific tree code, it may require
6196	 special handling.  */
6197      if ((unsigned int) TREE_CODE (exp)
6198	  >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6199	  && !(*lang_hooks.safe_from_p) (x, exp))
6200	return 0;
6201    }
6202
6203  /* If we have an rtl, find any enclosed object.  Then see if we conflict
6204     with it.  */
6205  if (exp_rtl)
6206    {
6207      if (GET_CODE (exp_rtl) == SUBREG)
6208	{
6209	  exp_rtl = SUBREG_REG (exp_rtl);
6210	  if (GET_CODE (exp_rtl) == REG
6211	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6212	    return 0;
6213	}
6214
6215      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
6216	 are memory and they conflict.  */
6217      return ! (rtx_equal_p (x, exp_rtl)
6218		|| (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6219		    && true_dependence (exp_rtl, VOIDmode, x,
6220					rtx_addr_varies_p)));
6221    }
6222
6223  /* If we reach here, it is safe.  */
6224  return 1;
6225}
6226
6227/* Subroutine of expand_expr: return rtx if EXP is a
6228   variable or parameter; else return 0.  */
6229
6230static rtx
6231var_rtx (exp)
6232     tree exp;
6233{
6234  STRIP_NOPS (exp);
6235  switch (TREE_CODE (exp))
6236    {
6237    case PARM_DECL:
6238    case VAR_DECL:
6239      return DECL_RTL (exp);
6240    default:
6241      return 0;
6242    }
6243}
6244
6245#ifdef MAX_INTEGER_COMPUTATION_MODE
6246
6247void
6248check_max_integer_computation_mode (exp)
6249     tree exp;
6250{
6251  enum tree_code code;
6252  enum machine_mode mode;
6253
6254  /* Strip any NOPs that don't change the mode.  */
6255  STRIP_NOPS (exp);
6256  code = TREE_CODE (exp);
6257
6258  /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE.  */
6259  if (code == NOP_EXPR
6260      && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6261    return;
6262
6263  /* First check the type of the overall operation.   We need only look at
6264     unary, binary and relational operations.  */
6265  if (TREE_CODE_CLASS (code) == '1'
6266      || TREE_CODE_CLASS (code) == '2'
6267      || TREE_CODE_CLASS (code) == '<')
6268    {
6269      mode = TYPE_MODE (TREE_TYPE (exp));
6270      if (GET_MODE_CLASS (mode) == MODE_INT
6271	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6272	internal_error ("unsupported wide integer operation");
6273    }
6274
6275  /* Check operand of a unary op.  */
6276  if (TREE_CODE_CLASS (code) == '1')
6277    {
6278      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6279      if (GET_MODE_CLASS (mode) == MODE_INT
6280	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6281	internal_error ("unsupported wide integer operation");
6282    }
6283
6284  /* Check operands of a binary/comparison op.  */
6285  if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6286    {
6287      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6288      if (GET_MODE_CLASS (mode) == MODE_INT
6289	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6290	internal_error ("unsupported wide integer operation");
6291
6292      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6293      if (GET_MODE_CLASS (mode) == MODE_INT
6294	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6295	internal_error ("unsupported wide integer operation");
6296    }
6297}
6298#endif
6299
6300/* Return the highest power of two that EXP is known to be a multiple of.
6301   This is used in updating alignment of MEMs in array references.  */
6302
6303static HOST_WIDE_INT
6304highest_pow2_factor (exp)
6305     tree exp;
6306{
6307  HOST_WIDE_INT c0, c1;
6308
6309  switch (TREE_CODE (exp))
6310    {
6311    case INTEGER_CST:
6312      /* We can find the lowest bit that's a one.  If the low
6313	 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6314	 We need to handle this case since we can find it in a COND_EXPR,
6315	 a MIN_EXPR, or a MAX_EXPR.  If the constant overlows, we have an
6316	 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6317	 later ICE.  */
6318      if (TREE_CONSTANT_OVERFLOW (exp))
6319	return BIGGEST_ALIGNMENT;
6320      else
6321	{
6322	  /* Note: tree_low_cst is intentionally not used here,
6323	     we don't care about the upper bits.  */
6324	  c0 = TREE_INT_CST_LOW (exp);
6325	  c0 &= -c0;
6326	  return c0 ? c0 : BIGGEST_ALIGNMENT;
6327	}
6328      break;
6329
6330    case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
6331      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6332      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6333      return MIN (c0, c1);
6334
6335    case MULT_EXPR:
6336      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6337      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6338      return c0 * c1;
6339
6340    case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
6341    case CEIL_DIV_EXPR:
6342      if (integer_pow2p (TREE_OPERAND (exp, 1))
6343	  && host_integerp (TREE_OPERAND (exp, 1), 1))
6344	{
6345	  c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6346	  c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6347	  return MAX (1, c0 / c1);
6348	}
6349      break;
6350
6351    case NON_LVALUE_EXPR:  case NOP_EXPR:  case CONVERT_EXPR:
6352    case SAVE_EXPR: case WITH_RECORD_EXPR:
6353      return highest_pow2_factor (TREE_OPERAND (exp, 0));
6354
6355    case COMPOUND_EXPR:
6356      return highest_pow2_factor (TREE_OPERAND (exp, 1));
6357
6358    case COND_EXPR:
6359      c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6360      c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6361      return MIN (c0, c1);
6362
6363    default:
6364      break;
6365    }
6366
6367  return 1;
6368}
6369
6370/* Similar, except that it is known that the expression must be a multiple
6371   of the alignment of TYPE.  */
6372
6373static HOST_WIDE_INT
6374highest_pow2_factor_for_type (type, exp)
6375     tree type;
6376     tree exp;
6377{
6378  HOST_WIDE_INT type_align, factor;
6379
6380  factor = highest_pow2_factor (exp);
6381  type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6382  return MAX (factor, type_align);
6383}
6384
6385/* Return an object on the placeholder list that matches EXP, a
6386   PLACEHOLDER_EXPR.  An object "matches" if it is of the type of the
6387   PLACEHOLDER_EXPR or a pointer type to it.  For further information, see
6388   tree.def.  If no such object is found, return 0.  If PLIST is nonzero, it
6389   is a location which initially points to a starting location in the
6390   placeholder list (zero means start of the list) and where a pointer into
6391   the placeholder list at which the object is found is placed.  */
6392
6393tree
6394find_placeholder (exp, plist)
6395     tree exp;
6396     tree *plist;
6397{
6398  tree type = TREE_TYPE (exp);
6399  tree placeholder_expr;
6400
6401  for (placeholder_expr
6402       = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6403       placeholder_expr != 0;
6404       placeholder_expr = TREE_CHAIN (placeholder_expr))
6405    {
6406      tree need_type = TYPE_MAIN_VARIANT (type);
6407      tree elt;
6408
6409      /* Find the outermost reference that is of the type we want.  If none,
6410	 see if any object has a type that is a pointer to the type we
6411	 want.  */
6412      for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6413	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6414		   || TREE_CODE (elt) == COND_EXPR)
6415		  ? TREE_OPERAND (elt, 1)
6416		  : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6417		     || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6418		     || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6419		     || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6420		  ? TREE_OPERAND (elt, 0) : 0))
6421	if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6422	  {
6423	    if (plist)
6424	      *plist = placeholder_expr;
6425	    return elt;
6426	  }
6427
6428      for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6429	   elt
6430	   = ((TREE_CODE (elt) == COMPOUND_EXPR
6431	       || TREE_CODE (elt) == COND_EXPR)
6432	      ? TREE_OPERAND (elt, 1)
6433	      : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6434		 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6435		 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6436		 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6437	      ? TREE_OPERAND (elt, 0) : 0))
6438	if (POINTER_TYPE_P (TREE_TYPE (elt))
6439	    && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6440		== need_type))
6441	  {
6442	    if (plist)
6443	      *plist = placeholder_expr;
6444	    return build1 (INDIRECT_REF, need_type, elt);
6445	  }
6446    }
6447
6448  return 0;
6449}
6450
6451/* expand_expr: generate code for computing expression EXP.
6452   An rtx for the computed value is returned.  The value is never null.
6453   In the case of a void EXP, const0_rtx is returned.
6454
6455   The value may be stored in TARGET if TARGET is nonzero.
6456   TARGET is just a suggestion; callers must assume that
6457   the rtx returned may not be the same as TARGET.
6458
6459   If TARGET is CONST0_RTX, it means that the value will be ignored.
6460
6461   If TMODE is not VOIDmode, it suggests generating the
6462   result in mode TMODE.  But this is done only when convenient.
6463   Otherwise, TMODE is ignored and the value generated in its natural mode.
6464   TMODE is just a suggestion; callers must assume that
6465   the rtx returned may not have mode TMODE.
6466
6467   Note that TARGET may have neither TMODE nor MODE.  In that case, it
6468   probably will not be used.
6469
6470   If MODIFIER is EXPAND_SUM then when EXP is an addition
6471   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6472   or a nest of (PLUS ...) and (MINUS ...) where the terms are
6473   products as above, or REG or MEM, or constant.
6474   Ordinarily in such cases we would output mul or add instructions
6475   and then return a pseudo reg containing the sum.
6476
6477   EXPAND_INITIALIZER is much like EXPAND_SUM except that
6478   it also marks a label as absolutely required (it can't be dead).
6479   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6480   This is used for outputting expressions used in initializers.
6481
6482   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6483   with a constant address even if that address is not normally legitimate.
6484   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6485
6486   EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6487   a call parameter.  Such targets require special care as we haven't yet
6488   marked TARGET so that it's safe from being trashed by libcalls.  We
6489   don't want to use TARGET for anything but the final result;
6490   Intermediate values must go elsewhere.   Additionally, calls to
6491   emit_block_move will be flagged with BLOCK_OP_CALL_PARM.  */
6492
6493rtx
6494expand_expr (exp, target, tmode, modifier)
6495     tree exp;
6496     rtx target;
6497     enum machine_mode tmode;
6498     enum expand_modifier modifier;
6499{
6500  rtx op0, op1, temp;
6501  tree type = TREE_TYPE (exp);
6502  int unsignedp = TREE_UNSIGNED (type);
6503  enum machine_mode mode;
6504  enum tree_code code = TREE_CODE (exp);
6505  optab this_optab;
6506  rtx subtarget, original_target;
6507  int ignore;
6508  tree context;
6509
6510  /* Handle ERROR_MARK before anybody tries to access its type.  */
6511  if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6512    {
6513      op0 = CONST0_RTX (tmode);
6514      if (op0 != 0)
6515	return op0;
6516      return const0_rtx;
6517    }
6518
6519  mode = TYPE_MODE (type);
6520  /* Use subtarget as the target for operand 0 of a binary operation.  */
6521  subtarget = get_subtarget (target);
6522  original_target = target;
6523  ignore = (target == const0_rtx
6524	    || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6525		 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6526		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6527		&& TREE_CODE (type) == VOID_TYPE));
6528
6529  /* If we are going to ignore this result, we need only do something
6530     if there is a side-effect somewhere in the expression.  If there
6531     is, short-circuit the most common cases here.  Note that we must
6532     not call expand_expr with anything but const0_rtx in case this
6533     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
6534
6535  if (ignore)
6536    {
6537      if (! TREE_SIDE_EFFECTS (exp))
6538	return const0_rtx;
6539
6540      /* Ensure we reference a volatile object even if value is ignored, but
6541	 don't do this if all we are doing is taking its address.  */
6542      if (TREE_THIS_VOLATILE (exp)
6543	  && TREE_CODE (exp) != FUNCTION_DECL
6544	  && mode != VOIDmode && mode != BLKmode
6545	  && modifier != EXPAND_CONST_ADDRESS)
6546	{
6547	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6548	  if (GET_CODE (temp) == MEM)
6549	    temp = copy_to_reg (temp);
6550	  return const0_rtx;
6551	}
6552
6553      if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6554	  || code == INDIRECT_REF || code == BUFFER_REF)
6555	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6556			    modifier);
6557
6558      else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6559	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6560	{
6561	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6562	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6563	  return const0_rtx;
6564	}
6565      else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6566	       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6567	/* If the second operand has no side effects, just evaluate
6568	   the first.  */
6569	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6570			    modifier);
6571      else if (code == BIT_FIELD_REF)
6572	{
6573	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6574	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6575	  expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6576	  return const0_rtx;
6577	}
6578
6579      target = 0;
6580    }
6581
6582#ifdef MAX_INTEGER_COMPUTATION_MODE
6583  /* Only check stuff here if the mode we want is different from the mode
6584     of the expression; if it's the same, check_max_integer_computiation_mode
6585     will handle it.  Do we really need to check this stuff at all?  */
6586
6587  if (target
6588      && GET_MODE (target) != mode
6589      && TREE_CODE (exp) != INTEGER_CST
6590      && TREE_CODE (exp) != PARM_DECL
6591      && TREE_CODE (exp) != ARRAY_REF
6592      && TREE_CODE (exp) != ARRAY_RANGE_REF
6593      && TREE_CODE (exp) != COMPONENT_REF
6594      && TREE_CODE (exp) != BIT_FIELD_REF
6595      && TREE_CODE (exp) != INDIRECT_REF
6596      && TREE_CODE (exp) != CALL_EXPR
6597      && TREE_CODE (exp) != VAR_DECL
6598      && TREE_CODE (exp) != RTL_EXPR)
6599    {
6600      enum machine_mode mode = GET_MODE (target);
6601
6602      if (GET_MODE_CLASS (mode) == MODE_INT
6603	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6604	internal_error ("unsupported wide integer operation");
6605    }
6606
6607  if (tmode != mode
6608      && TREE_CODE (exp) != INTEGER_CST
6609      && TREE_CODE (exp) != PARM_DECL
6610      && TREE_CODE (exp) != ARRAY_REF
6611      && TREE_CODE (exp) != ARRAY_RANGE_REF
6612      && TREE_CODE (exp) != COMPONENT_REF
6613      && TREE_CODE (exp) != BIT_FIELD_REF
6614      && TREE_CODE (exp) != INDIRECT_REF
6615      && TREE_CODE (exp) != VAR_DECL
6616      && TREE_CODE (exp) != CALL_EXPR
6617      && TREE_CODE (exp) != RTL_EXPR
6618      && GET_MODE_CLASS (tmode) == MODE_INT
6619      && tmode > MAX_INTEGER_COMPUTATION_MODE)
6620    internal_error ("unsupported wide integer operation");
6621
6622  check_max_integer_computation_mode (exp);
6623#endif
6624
6625  /* If will do cse, generate all results into pseudo registers
6626     since 1) that allows cse to find more things
6627     and 2) otherwise cse could produce an insn the machine
6628     cannot support.  And exception is a CONSTRUCTOR into a multi-word
6629     MEM: that's much more likely to be most efficient into the MEM.  */
6630
6631  if (! cse_not_expected && mode != BLKmode && target
6632      && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6633      && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6634    target = 0;
6635
6636  switch (code)
6637    {
6638    case LABEL_DECL:
6639      {
6640	tree function = decl_function_context (exp);
6641	/* Handle using a label in a containing function.  */
6642	if (function != current_function_decl
6643	    && function != inline_function_decl && function != 0)
6644	  {
6645	    struct function *p = find_function_data (function);
6646	    p->expr->x_forced_labels
6647	      = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6648				   p->expr->x_forced_labels);
6649	  }
6650	else
6651	  {
6652	    if (modifier == EXPAND_INITIALIZER)
6653	      forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6654						 label_rtx (exp),
6655						 forced_labels);
6656	  }
6657
6658	temp = gen_rtx_MEM (FUNCTION_MODE,
6659			    gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6660	if (function != current_function_decl
6661	    && function != inline_function_decl && function != 0)
6662	  LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6663	return temp;
6664      }
6665
6666    case PARM_DECL:
6667      if (!DECL_RTL_SET_P (exp))
6668	{
6669	  error_with_decl (exp, "prior parameter's size depends on `%s'");
6670	  return CONST0_RTX (mode);
6671	}
6672
6673      /* ... fall through ...  */
6674
6675    case VAR_DECL:
6676      /* If a static var's type was incomplete when the decl was written,
6677	 but the type is complete now, lay out the decl now.  */
6678      if (DECL_SIZE (exp) == 0
6679	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6680	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6681	layout_decl (exp, 0);
6682
6683      /* ... fall through ...  */
6684
6685    case FUNCTION_DECL:
6686    case RESULT_DECL:
6687      if (DECL_RTL (exp) == 0)
6688	abort ();
6689
6690      /* Ensure variable marked as used even if it doesn't go through
6691	 a parser.  If it hasn't be used yet, write out an external
6692	 definition.  */
6693      if (! TREE_USED (exp))
6694	{
6695	  assemble_external (exp);
6696	  TREE_USED (exp) = 1;
6697	}
6698
6699      /* Show we haven't gotten RTL for this yet.  */
6700      temp = 0;
6701
6702      /* Handle variables inherited from containing functions.  */
6703      context = decl_function_context (exp);
6704
6705      /* We treat inline_function_decl as an alias for the current function
6706	 because that is the inline function whose vars, types, etc.
6707	 are being merged into the current function.
6708	 See expand_inline_function.  */
6709
6710      if (context != 0 && context != current_function_decl
6711	  && context != inline_function_decl
6712	  /* If var is static, we don't need a static chain to access it.  */
6713	  && ! (GET_CODE (DECL_RTL (exp)) == MEM
6714		&& CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6715	{
6716	  rtx addr;
6717
6718	  /* Mark as non-local and addressable.  */
6719	  DECL_NONLOCAL (exp) = 1;
6720	  if (DECL_NO_STATIC_CHAIN (current_function_decl))
6721	    abort ();
6722	  (*lang_hooks.mark_addressable) (exp);
6723	  if (GET_CODE (DECL_RTL (exp)) != MEM)
6724	    abort ();
6725	  addr = XEXP (DECL_RTL (exp), 0);
6726	  if (GET_CODE (addr) == MEM)
6727	    addr
6728	      = replace_equiv_address (addr,
6729				       fix_lexical_addr (XEXP (addr, 0), exp));
6730	  else
6731	    addr = fix_lexical_addr (addr, exp);
6732
6733	  temp = replace_equiv_address (DECL_RTL (exp), addr);
6734	}
6735
6736      /* This is the case of an array whose size is to be determined
6737	 from its initializer, while the initializer is still being parsed.
6738	 See expand_decl.  */
6739
6740      else if (GET_CODE (DECL_RTL (exp)) == MEM
6741	       && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6742	temp = validize_mem (DECL_RTL (exp));
6743
6744      /* If DECL_RTL is memory, we are in the normal case and either
6745	 the address is not valid or it is not a register and -fforce-addr
6746	 is specified, get the address into a register.  */
6747
6748      else if (GET_CODE (DECL_RTL (exp)) == MEM
6749	       && modifier != EXPAND_CONST_ADDRESS
6750	       && modifier != EXPAND_SUM
6751	       && modifier != EXPAND_INITIALIZER
6752	       && (! memory_address_p (DECL_MODE (exp),
6753				       XEXP (DECL_RTL (exp), 0))
6754		   || (flag_force_addr
6755		       && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6756	temp = replace_equiv_address (DECL_RTL (exp),
6757				      copy_rtx (XEXP (DECL_RTL (exp), 0)));
6758
6759      /* If we got something, return it.  But first, set the alignment
6760	 if the address is a register.  */
6761      if (temp != 0)
6762	{
6763	  if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6764	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6765
6766	  return temp;
6767	}
6768
6769      /* If the mode of DECL_RTL does not match that of the decl, it
6770	 must be a promoted value.  We return a SUBREG of the wanted mode,
6771	 but mark it so that we know that it was already extended.  */
6772
6773      if (GET_CODE (DECL_RTL (exp)) == REG
6774	  && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6775	{
6776	  /* Get the signedness used for this variable.  Ensure we get the
6777	     same mode we got when the variable was declared.  */
6778	  if (GET_MODE (DECL_RTL (exp))
6779	      != promote_mode (type, DECL_MODE (exp), &unsignedp,
6780			       (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6781	    abort ();
6782
6783	  temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6784	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6785	  SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6786	  return temp;
6787	}
6788
6789      return DECL_RTL (exp);
6790
6791    case INTEGER_CST:
6792      temp = immed_double_const (TREE_INT_CST_LOW (exp),
6793				 TREE_INT_CST_HIGH (exp), mode);
6794
6795      /* ??? If overflow is set, fold will have done an incomplete job,
6796	 which can result in (plus xx (const_int 0)), which can get
6797	 simplified by validate_replace_rtx during virtual register
6798	 instantiation, which can result in unrecognizable insns.
6799	 Avoid this by forcing all overflows into registers.  */
6800      if (TREE_CONSTANT_OVERFLOW (exp)
6801	  && modifier != EXPAND_INITIALIZER)
6802	temp = force_reg (mode, temp);
6803
6804      return temp;
6805
6806    case VECTOR_CST:
6807      return const_vector_from_tree (exp);
6808
6809    case CONST_DECL:
6810      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6811
6812    case REAL_CST:
6813      /* If optimized, generate immediate CONST_DOUBLE
6814	 which will be turned into memory by reload if necessary.
6815
6816	 We used to force a register so that loop.c could see it.  But
6817	 this does not allow gen_* patterns to perform optimizations with
6818	 the constants.  It also produces two insns in cases like "x = 1.0;".
6819	 On most machines, floating-point constants are not permitted in
6820	 many insns, so we'd end up copying it to a register in any case.
6821
6822	 Now, we do the copying in expand_binop, if appropriate.  */
6823      return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6824					   TYPE_MODE (TREE_TYPE (exp)));
6825
6826    case COMPLEX_CST:
6827    case STRING_CST:
6828      if (! TREE_CST_RTL (exp))
6829	output_constant_def (exp, 1);
6830
6831      /* TREE_CST_RTL probably contains a constant address.
6832	 On RISC machines where a constant address isn't valid,
6833	 make some insns to get that address into a register.  */
6834      if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6835	  && modifier != EXPAND_CONST_ADDRESS
6836	  && modifier != EXPAND_INITIALIZER
6837	  && modifier != EXPAND_SUM
6838	  && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6839	      || (flag_force_addr
6840		  && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6841	return replace_equiv_address (TREE_CST_RTL (exp),
6842				      copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6843      return TREE_CST_RTL (exp);
6844
6845    case EXPR_WITH_FILE_LOCATION:
6846      {
6847	rtx to_return;
6848	const char *saved_input_filename = input_filename;
6849	int saved_lineno = lineno;
6850	input_filename = EXPR_WFL_FILENAME (exp);
6851	lineno = EXPR_WFL_LINENO (exp);
6852	if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6853	  emit_line_note (input_filename, lineno);
6854	/* Possibly avoid switching back and forth here.  */
6855	to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6856	input_filename = saved_input_filename;
6857	lineno = saved_lineno;
6858	return to_return;
6859      }
6860
6861    case SAVE_EXPR:
6862      context = decl_function_context (exp);
6863
6864      /* If this SAVE_EXPR was at global context, assume we are an
6865	 initialization function and move it into our context.  */
6866      if (context == 0)
6867	SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6868
6869      /* We treat inline_function_decl as an alias for the current function
6870	 because that is the inline function whose vars, types, etc.
6871	 are being merged into the current function.
6872	 See expand_inline_function.  */
6873      if (context == current_function_decl || context == inline_function_decl)
6874	context = 0;
6875
6876      /* If this is non-local, handle it.  */
6877      if (context)
6878	{
6879	  /* The following call just exists to abort if the context is
6880	     not of a containing function.  */
6881	  find_function_data (context);
6882
6883	  temp = SAVE_EXPR_RTL (exp);
6884	  if (temp && GET_CODE (temp) == REG)
6885	    {
6886	      put_var_into_stack (exp, /*rescan=*/true);
6887	      temp = SAVE_EXPR_RTL (exp);
6888	    }
6889	  if (temp == 0 || GET_CODE (temp) != MEM)
6890	    abort ();
6891	  return
6892	    replace_equiv_address (temp,
6893				   fix_lexical_addr (XEXP (temp, 0), exp));
6894	}
6895      if (SAVE_EXPR_RTL (exp) == 0)
6896	{
6897	  if (mode == VOIDmode)
6898	    temp = const0_rtx;
6899	  else
6900	    temp = assign_temp (build_qualified_type (type,
6901						      (TYPE_QUALS (type)
6902						       | TYPE_QUAL_CONST)),
6903				3, 0, 0);
6904
6905	  SAVE_EXPR_RTL (exp) = temp;
6906	  if (!optimize && GET_CODE (temp) == REG)
6907	    save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6908						save_expr_regs);
6909
6910	  /* If the mode of TEMP does not match that of the expression, it
6911	     must be a promoted value.  We pass store_expr a SUBREG of the
6912	     wanted mode but mark it so that we know that it was already
6913	     extended.  */
6914
6915	  if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6916	    {
6917	      temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6918	      promote_mode (type, mode, &unsignedp, 0);
6919	      SUBREG_PROMOTED_VAR_P (temp) = 1;
6920	      SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6921	    }
6922
6923	  if (temp == const0_rtx)
6924	    expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6925	  else
6926	    store_expr (TREE_OPERAND (exp, 0), temp,
6927			modifier == EXPAND_STACK_PARM ? 2 : 0);
6928
6929	  TREE_USED (exp) = 1;
6930	}
6931
6932      /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6933	 must be a promoted value.  We return a SUBREG of the wanted mode,
6934	 but mark it so that we know that it was already extended.  */
6935
6936      if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6937	  && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6938	{
6939	  /* Compute the signedness and make the proper SUBREG.  */
6940	  promote_mode (type, mode, &unsignedp, 0);
6941	  temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6942	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6943	  SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6944	  return temp;
6945	}
6946
6947      return SAVE_EXPR_RTL (exp);
6948
6949    case UNSAVE_EXPR:
6950      {
6951	rtx temp;
6952	temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6953	TREE_OPERAND (exp, 0)
6954	  = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6955	return temp;
6956      }
6957
6958    case PLACEHOLDER_EXPR:
6959      {
6960	tree old_list = placeholder_list;
6961	tree placeholder_expr = 0;
6962
6963	exp = find_placeholder (exp, &placeholder_expr);
6964	if (exp == 0)
6965	  abort ();
6966
6967	placeholder_list = TREE_CHAIN (placeholder_expr);
6968	temp = expand_expr (exp, original_target, tmode, modifier);
6969	placeholder_list = old_list;
6970	return temp;
6971      }
6972
6973    case WITH_RECORD_EXPR:
6974      /* Put the object on the placeholder list, expand our first operand,
6975	 and pop the list.  */
6976      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6977				    placeholder_list);
6978      target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6979			    modifier);
6980      placeholder_list = TREE_CHAIN (placeholder_list);
6981      return target;
6982
6983    case GOTO_EXPR:
6984      if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6985	expand_goto (TREE_OPERAND (exp, 0));
6986      else
6987	expand_computed_goto (TREE_OPERAND (exp, 0));
6988      return const0_rtx;
6989
6990    case EXIT_EXPR:
6991      expand_exit_loop_if_false (NULL,
6992				 invert_truthvalue (TREE_OPERAND (exp, 0)));
6993      return const0_rtx;
6994
6995    case LABELED_BLOCK_EXPR:
6996      if (LABELED_BLOCK_BODY (exp))
6997	expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6998      /* Should perhaps use expand_label, but this is simpler and safer.  */
6999      do_pending_stack_adjust ();
7000      emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7001      return const0_rtx;
7002
7003    case EXIT_BLOCK_EXPR:
7004      if (EXIT_BLOCK_RETURN (exp))
7005	sorry ("returned value in block_exit_expr");
7006      expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7007      return const0_rtx;
7008
7009    case LOOP_EXPR:
7010      push_temp_slots ();
7011      expand_start_loop (1);
7012      expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7013      expand_end_loop ();
7014      pop_temp_slots ();
7015
7016      return const0_rtx;
7017
7018    case BIND_EXPR:
7019      {
7020	tree vars = TREE_OPERAND (exp, 0);
7021	int vars_need_expansion = 0;
7022
7023	/* Need to open a binding contour here because
7024	   if there are any cleanups they must be contained here.  */
7025	expand_start_bindings (2);
7026
7027	/* Mark the corresponding BLOCK for output in its proper place.  */
7028	if (TREE_OPERAND (exp, 2) != 0
7029	    && ! TREE_USED (TREE_OPERAND (exp, 2)))
7030	  (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7031
7032	/* If VARS have not yet been expanded, expand them now.  */
7033	while (vars)
7034	  {
7035	    if (!DECL_RTL_SET_P (vars))
7036	      {
7037		vars_need_expansion = 1;
7038		expand_decl (vars);
7039	      }
7040	    expand_decl_init (vars);
7041	    vars = TREE_CHAIN (vars);
7042	  }
7043
7044	temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7045
7046	expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7047
7048	return temp;
7049      }
7050
7051    case RTL_EXPR:
7052      if (RTL_EXPR_SEQUENCE (exp))
7053	{
7054	  if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7055	    abort ();
7056	  emit_insn (RTL_EXPR_SEQUENCE (exp));
7057	  RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7058	}
7059      preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7060      free_temps_for_rtl_expr (exp);
7061      return RTL_EXPR_RTL (exp);
7062
7063    case CONSTRUCTOR:
7064      /* If we don't need the result, just ensure we evaluate any
7065	 subexpressions.  */
7066      if (ignore)
7067	{
7068	  tree elt;
7069
7070	  for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7071	    expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7072
7073	  return const0_rtx;
7074	}
7075
7076      /* All elts simple constants => refer to a constant in memory.  But
7077	 if this is a non-BLKmode mode, let it store a field at a time
7078	 since that should make a CONST_INT or CONST_DOUBLE when we
7079	 fold.  Likewise, if we have a target we can use, it is best to
7080	 store directly into the target unless the type is large enough
7081	 that memcpy will be used.  If we are making an initializer and
7082	 all operands are constant, put it in memory as well.
7083
7084	FIXME: Avoid trying to fill vector constructors piece-meal.
7085	Output them with output_constant_def below unless we're sure
7086	they're zeros.  This should go away when vector initializers
7087	are treated like VECTOR_CST instead of arrays.
7088      */
7089      else if ((TREE_STATIC (exp)
7090		&& ((mode == BLKmode
7091		     && ! (target != 0 && safe_from_p (target, exp, 1)))
7092		    || TREE_ADDRESSABLE (exp)
7093		    || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7094			&& (! MOVE_BY_PIECES_P
7095			    (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7096			     TYPE_ALIGN (type)))
7097			&& ((TREE_CODE (type) == VECTOR_TYPE
7098			     && !is_zeros_p (exp))
7099			    || ! mostly_zeros_p (exp)))))
7100	       || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7101	{
7102	  rtx constructor = output_constant_def (exp, 1);
7103
7104	  if (modifier != EXPAND_CONST_ADDRESS
7105	      && modifier != EXPAND_INITIALIZER
7106	      && modifier != EXPAND_SUM)
7107	    constructor = validize_mem (constructor);
7108
7109	  return constructor;
7110	}
7111      else
7112	{
7113	  /* Handle calls that pass values in multiple non-contiguous
7114	     locations.  The Irix 6 ABI has examples of this.  */
7115	  if (target == 0 || ! safe_from_p (target, exp, 1)
7116	      || GET_CODE (target) == PARALLEL
7117	      || modifier == EXPAND_STACK_PARM)
7118	    target
7119	      = assign_temp (build_qualified_type (type,
7120						   (TYPE_QUALS (type)
7121						    | (TREE_READONLY (exp)
7122						       * TYPE_QUAL_CONST))),
7123			     0, TREE_ADDRESSABLE (exp), 1);
7124
7125	  store_constructor (exp, target, 0, int_expr_size (exp));
7126	  return target;
7127	}
7128
7129    case INDIRECT_REF:
7130      {
7131	tree exp1 = TREE_OPERAND (exp, 0);
7132	tree index;
7133	tree string = string_constant (exp1, &index);
7134
7135	/* Try to optimize reads from const strings.  */
7136	if (string
7137	    && TREE_CODE (string) == STRING_CST
7138	    && TREE_CODE (index) == INTEGER_CST
7139	    && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7140	    && GET_MODE_CLASS (mode) == MODE_INT
7141	    && GET_MODE_SIZE (mode) == 1
7142	    && modifier != EXPAND_WRITE)
7143	  return gen_int_mode (TREE_STRING_POINTER (string)
7144			       [TREE_INT_CST_LOW (index)], mode);
7145
7146	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7147	op0 = memory_address (mode, op0);
7148	temp = gen_rtx_MEM (mode, op0);
7149	set_mem_attributes (temp, exp, 0);
7150
7151	/* If we are writing to this object and its type is a record with
7152	   readonly fields, we must mark it as readonly so it will
7153	   conflict with readonly references to those fields.  */
7154	if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7155	  RTX_UNCHANGING_P (temp) = 1;
7156
7157	return temp;
7158      }
7159
7160    case ARRAY_REF:
7161      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7162	abort ();
7163
7164      {
7165	tree array = TREE_OPERAND (exp, 0);
7166	tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7167	tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7168	tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7169	HOST_WIDE_INT i;
7170
7171	/* Optimize the special-case of a zero lower bound.
7172
7173	   We convert the low_bound to sizetype to avoid some problems
7174	   with constant folding.  (E.g. suppose the lower bound is 1,
7175	   and its mode is QI.  Without the conversion,  (ARRAY
7176	   +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7177	   +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
7178
7179	if (! integer_zerop (low_bound))
7180	  index = size_diffop (index, convert (sizetype, low_bound));
7181
7182	/* Fold an expression like: "foo"[2].
7183	   This is not done in fold so it won't happen inside &.
7184	   Don't fold if this is for wide characters since it's too
7185	   difficult to do correctly and this is a very rare case.  */
7186
7187	if (modifier != EXPAND_CONST_ADDRESS
7188	    && modifier != EXPAND_INITIALIZER
7189	    && modifier != EXPAND_MEMORY
7190	    && TREE_CODE (array) == STRING_CST
7191	    && TREE_CODE (index) == INTEGER_CST
7192	    && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7193	    && GET_MODE_CLASS (mode) == MODE_INT
7194	    && GET_MODE_SIZE (mode) == 1)
7195	  return gen_int_mode (TREE_STRING_POINTER (array)
7196			       [TREE_INT_CST_LOW (index)], mode);
7197
7198	/* If this is a constant index into a constant array,
7199	   just get the value from the array.  Handle both the cases when
7200	   we have an explicit constructor and when our operand is a variable
7201	   that was declared const.  */
7202
7203	if (modifier != EXPAND_CONST_ADDRESS
7204	    && modifier != EXPAND_INITIALIZER
7205	    && modifier != EXPAND_MEMORY
7206	    && TREE_CODE (array) == CONSTRUCTOR
7207	    && ! TREE_SIDE_EFFECTS (array)
7208	    && TREE_CODE (index) == INTEGER_CST
7209	    && 0 > compare_tree_int (index,
7210				     list_length (CONSTRUCTOR_ELTS
7211						  (TREE_OPERAND (exp, 0)))))
7212	  {
7213	    tree elem;
7214
7215	    for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7216		 i = TREE_INT_CST_LOW (index);
7217		 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7218	      ;
7219
7220	    if (elem)
7221	      return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7222				  modifier);
7223	  }
7224
7225	else if (optimize >= 1
7226		 && modifier != EXPAND_CONST_ADDRESS
7227		 && modifier != EXPAND_INITIALIZER
7228		 && modifier != EXPAND_MEMORY
7229		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7230		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7231		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7232	  {
7233	    if (TREE_CODE (index) == INTEGER_CST)
7234	      {
7235		tree init = DECL_INITIAL (array);
7236
7237		if (TREE_CODE (init) == CONSTRUCTOR)
7238		  {
7239		    tree elem;
7240
7241		    for (elem = CONSTRUCTOR_ELTS (init);
7242			 (elem
7243			  && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7244			 elem = TREE_CHAIN (elem))
7245		      ;
7246
7247		    if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7248		      return expand_expr (fold (TREE_VALUE (elem)), target,
7249					  tmode, modifier);
7250		  }
7251		else if (TREE_CODE (init) == STRING_CST
7252			 && 0 > compare_tree_int (index,
7253						  TREE_STRING_LENGTH (init)))
7254		  {
7255		    tree type = TREE_TYPE (TREE_TYPE (init));
7256		    enum machine_mode mode = TYPE_MODE (type);
7257
7258		    if (GET_MODE_CLASS (mode) == MODE_INT
7259			&& GET_MODE_SIZE (mode) == 1)
7260		      return gen_int_mode (TREE_STRING_POINTER (init)
7261					   [TREE_INT_CST_LOW (index)], mode);
7262		  }
7263	      }
7264	  }
7265      }
7266      /* Fall through.  */
7267
7268    case COMPONENT_REF:
7269    case BIT_FIELD_REF:
7270    case ARRAY_RANGE_REF:
7271      /* If the operand is a CONSTRUCTOR, we can just extract the
7272	 appropriate field if it is present.  Don't do this if we have
7273	 already written the data since we want to refer to that copy
7274	 and varasm.c assumes that's what we'll do.  */
7275      if (code == COMPONENT_REF
7276	  && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7277	  && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7278	{
7279	  tree elt;
7280
7281	  for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7282	       elt = TREE_CHAIN (elt))
7283	    if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7284		/* We can normally use the value of the field in the
7285		   CONSTRUCTOR.  However, if this is a bitfield in
7286		   an integral mode that we can fit in a HOST_WIDE_INT,
7287		   we must mask only the number of bits in the bitfield,
7288		   since this is done implicitly by the constructor.  If
7289		   the bitfield does not meet either of those conditions,
7290		   we can't do this optimization.  */
7291		&& (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7292		    || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7293			 == MODE_INT)
7294			&& (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7295			    <= HOST_BITS_PER_WIDE_INT))))
7296	      {
7297		if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7298		    && modifier == EXPAND_STACK_PARM)
7299		  target = 0;
7300		op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7301		if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7302		  {
7303		    HOST_WIDE_INT bitsize
7304		      = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7305		    enum machine_mode imode
7306		      = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7307
7308		    if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7309		      {
7310			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7311			op0 = expand_and (imode, op0, op1, target);
7312		      }
7313		    else
7314		      {
7315			tree count
7316			  = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7317					 0);
7318
7319			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7320					    target, 0);
7321			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7322					    target, 0);
7323		      }
7324		  }
7325
7326		return op0;
7327	      }
7328	}
7329
7330      {
7331	enum machine_mode mode1;
7332	HOST_WIDE_INT bitsize, bitpos;
7333	tree offset;
7334	int volatilep = 0;
7335	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7336					&mode1, &unsignedp, &volatilep);
7337	rtx orig_op0;
7338
7339	/* If we got back the original object, something is wrong.  Perhaps
7340	   we are evaluating an expression too early.  In any event, don't
7341	   infinitely recurse.  */
7342	if (tem == exp)
7343	  abort ();
7344
7345	/* If TEM's type is a union of variable size, pass TARGET to the inner
7346	   computation, since it will need a temporary and TARGET is known
7347	   to have to do.  This occurs in unchecked conversion in Ada.  */
7348
7349	orig_op0 = op0
7350	  = expand_expr (tem,
7351			 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7352			  && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7353			      != INTEGER_CST)
7354			  && modifier != EXPAND_STACK_PARM
7355			  ? target : NULL_RTX),
7356			 VOIDmode,
7357			 (modifier == EXPAND_INITIALIZER
7358			  || modifier == EXPAND_CONST_ADDRESS
7359			  || modifier == EXPAND_STACK_PARM)
7360			 ? modifier : EXPAND_NORMAL);
7361
7362	/* If this is a constant, put it into a register if it is a
7363	   legitimate constant and OFFSET is 0 and memory if it isn't.  */
7364	if (CONSTANT_P (op0))
7365	  {
7366	    enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7367	    if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7368		&& offset == 0)
7369	      op0 = force_reg (mode, op0);
7370	    else
7371	      op0 = validize_mem (force_const_mem (mode, op0));
7372	  }
7373
7374	if (offset != 0)
7375	  {
7376	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7377					  EXPAND_SUM);
7378
7379	    /* If this object is in a register, put it into memory.
7380	       This case can't occur in C, but can in Ada if we have
7381	       unchecked conversion of an expression from a scalar type to
7382	       an array or record type.  */
7383	    if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7384		|| GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7385	      {
7386		/* If the operand is a SAVE_EXPR, we can deal with this by
7387		   forcing the SAVE_EXPR into memory.  */
7388		if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7389		  {
7390		    put_var_into_stack (TREE_OPERAND (exp, 0),
7391					/*rescan=*/true);
7392		    op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7393		  }
7394		else
7395		  {
7396		    tree nt
7397		      = build_qualified_type (TREE_TYPE (tem),
7398					      (TYPE_QUALS (TREE_TYPE (tem))
7399					       | TYPE_QUAL_CONST));
7400		    rtx memloc = assign_temp (nt, 1, 1, 1);
7401
7402		    emit_move_insn (memloc, op0);
7403		    op0 = memloc;
7404		  }
7405	      }
7406
7407	    if (GET_CODE (op0) != MEM)
7408	      abort ();
7409
7410#ifdef POINTERS_EXTEND_UNSIGNED
7411	    if (GET_MODE (offset_rtx) != Pmode)
7412	      offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7413#else
7414	    if (GET_MODE (offset_rtx) != ptr_mode)
7415	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7416#endif
7417
7418	    /* A constant address in OP0 can have VOIDmode, we must not try
7419	       to call force_reg for that case.  Avoid that case.  */
7420	    if (GET_CODE (op0) == MEM
7421		&& GET_MODE (op0) == BLKmode
7422		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
7423		&& bitsize != 0
7424		&& (bitpos % bitsize) == 0
7425		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7426		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7427	      {
7428		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7429		bitpos = 0;
7430	      }
7431
7432	    op0 = offset_address (op0, offset_rtx,
7433				  highest_pow2_factor (offset));
7434	  }
7435
7436	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7437	   record its alignment as BIGGEST_ALIGNMENT.  */
7438	if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7439	    && is_aligning_offset (offset, tem))
7440	  set_mem_align (op0, BIGGEST_ALIGNMENT);
7441
7442	/* Don't forget about volatility even if this is a bitfield.  */
7443	if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7444	  {
7445	    if (op0 == orig_op0)
7446	      op0 = copy_rtx (op0);
7447
7448	    MEM_VOLATILE_P (op0) = 1;
7449	  }
7450
7451	/* The following code doesn't handle CONCAT.
7452	   Assume only bitpos == 0 can be used for CONCAT, due to
7453	   one element arrays having the same mode as its element.  */
7454	if (GET_CODE (op0) == CONCAT)
7455	  {
7456	    if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7457	      abort ();
7458	    return op0;
7459	  }
7460
7461	/* In cases where an aligned union has an unaligned object
7462	   as a field, we might be extracting a BLKmode value from
7463	   an integer-mode (e.g., SImode) object.  Handle this case
7464	   by doing the extract into an object as wide as the field
7465	   (which we know to be the width of a basic mode), then
7466	   storing into memory, and changing the mode to BLKmode.  */
7467	if (mode1 == VOIDmode
7468	    || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7469	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
7470		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7471		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7472		&& modifier != EXPAND_CONST_ADDRESS
7473		&& modifier != EXPAND_INITIALIZER)
7474	    /* If the field isn't aligned enough to fetch as a memref,
7475	       fetch it as a bit field.  */
7476	    || (mode1 != BLKmode
7477		&& SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7478		&& ((TYPE_ALIGN (TREE_TYPE (tem))
7479		     < GET_MODE_ALIGNMENT (mode))
7480		    || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7481	    /* If the type and the field are a constant size and the
7482	       size of the type isn't the same size as the bitfield,
7483	       we must use bitfield operations.  */
7484	    || (bitsize >= 0
7485		&& (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7486		    == INTEGER_CST)
7487		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7488					  bitsize)))
7489	  {
7490	    enum machine_mode ext_mode = mode;
7491
7492	    if (ext_mode == BLKmode
7493		&& ! (target != 0 && GET_CODE (op0) == MEM
7494		      && GET_CODE (target) == MEM
7495		      && bitpos % BITS_PER_UNIT == 0))
7496	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7497
7498	    if (ext_mode == BLKmode)
7499	      {
7500		/* In this case, BITPOS must start at a byte boundary and
7501		   TARGET, if specified, must be a MEM.  */
7502		if (GET_CODE (op0) != MEM
7503		    || (target != 0 && GET_CODE (target) != MEM)
7504		    || bitpos % BITS_PER_UNIT != 0)
7505		  abort ();
7506
7507		op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7508		if (target == 0)
7509		  target = assign_temp (type, 0, 1, 1);
7510
7511		emit_block_move (target, op0,
7512				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7513					  / BITS_PER_UNIT),
7514				 (modifier == EXPAND_STACK_PARM
7515				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7516
7517		return target;
7518	      }
7519
7520	    op0 = validize_mem (op0);
7521
7522	    if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7523	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7524
7525	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7526				     (modifier == EXPAND_STACK_PARM
7527				      ? NULL_RTX : target),
7528				     ext_mode, ext_mode,
7529				     int_size_in_bytes (TREE_TYPE (tem)));
7530
7531	    /* If the result is a record type and BITSIZE is narrower than
7532	       the mode of OP0, an integral mode, and this is a big endian
7533	       machine, we must put the field into the high-order bits.  */
7534	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7535		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7536		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7537	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7538				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7539					    - bitsize),
7540				  op0, 1);
7541
7542	    if (mode == BLKmode)
7543	      {
7544		rtx new = assign_temp (build_qualified_type
7545				       ((*lang_hooks.types.type_for_mode)
7546					(ext_mode, 0),
7547					TYPE_QUAL_CONST), 0, 1, 1);
7548
7549		emit_move_insn (new, op0);
7550		op0 = copy_rtx (new);
7551		PUT_MODE (op0, BLKmode);
7552		set_mem_attributes (op0, exp, 1);
7553	      }
7554
7555	    return op0;
7556	  }
7557
7558	/* If the result is BLKmode, use that to access the object
7559	   now as well.  */
7560	if (mode == BLKmode)
7561	  mode1 = BLKmode;
7562
7563	/* Get a reference to just this component.  */
7564	if (modifier == EXPAND_CONST_ADDRESS
7565	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7566	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7567	else
7568	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7569
7570	if (op0 == orig_op0)
7571	  op0 = copy_rtx (op0);
7572
7573	set_mem_attributes (op0, exp, 0);
7574	if (GET_CODE (XEXP (op0, 0)) == REG)
7575	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7576
7577	MEM_VOLATILE_P (op0) |= volatilep;
7578	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7579	    || modifier == EXPAND_CONST_ADDRESS
7580	    || modifier == EXPAND_INITIALIZER)
7581	  return op0;
7582	else if (target == 0)
7583	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7584
7585	convert_move (target, op0, unsignedp);
7586	return target;
7587      }
7588
7589    case VTABLE_REF:
7590      {
7591	rtx insn, before = get_last_insn (), vtbl_ref;
7592
7593	/* Evaluate the interior expression.  */
7594	subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7595				 tmode, modifier);
7596
7597	/* Get or create an instruction off which to hang a note.  */
7598	if (REG_P (subtarget))
7599	  {
7600	    target = subtarget;
7601	    insn = get_last_insn ();
7602	    if (insn == before)
7603	      abort ();
7604	    if (! INSN_P (insn))
7605	      insn = prev_nonnote_insn (insn);
7606	  }
7607	else
7608	  {
7609	    target = gen_reg_rtx (GET_MODE (subtarget));
7610	    insn = emit_move_insn (target, subtarget);
7611	  }
7612
7613	/* Collect the data for the note.  */
7614	vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7615	vtbl_ref = plus_constant (vtbl_ref,
7616				  tree_low_cst (TREE_OPERAND (exp, 2), 0));
7617	/* Discard the initial CONST that was added.  */
7618	vtbl_ref = XEXP (vtbl_ref, 0);
7619
7620	REG_NOTES (insn)
7621	  = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7622
7623	return target;
7624      }
7625
7626      /* Intended for a reference to a buffer of a file-object in Pascal.
7627	 But it's not certain that a special tree code will really be
7628	 necessary for these.  INDIRECT_REF might work for them.  */
7629    case BUFFER_REF:
7630      abort ();
7631
7632    case IN_EXPR:
7633      {
7634	/* Pascal set IN expression.
7635
7636	   Algorithm:
7637	       rlo       = set_low - (set_low%bits_per_word);
7638	       the_word  = set [ (index - rlo)/bits_per_word ];
7639	       bit_index = index % bits_per_word;
7640	       bitmask   = 1 << bit_index;
7641	       return !!(the_word & bitmask);  */
7642
7643	tree set = TREE_OPERAND (exp, 0);
7644	tree index = TREE_OPERAND (exp, 1);
7645	int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7646	tree set_type = TREE_TYPE (set);
7647	tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7648	tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7649	rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7650	rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7651	rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7652	rtx setval = expand_expr (set, 0, VOIDmode, 0);
7653	rtx setaddr = XEXP (setval, 0);
7654	enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7655	rtx rlow;
7656	rtx diff, quo, rem, addr, bit, result;
7657
7658	/* If domain is empty, answer is no.  Likewise if index is constant
7659	   and out of bounds.  */
7660	if (((TREE_CODE (set_high_bound) == INTEGER_CST
7661	     && TREE_CODE (set_low_bound) == INTEGER_CST
7662	     && tree_int_cst_lt (set_high_bound, set_low_bound))
7663	     || (TREE_CODE (index) == INTEGER_CST
7664		 && TREE_CODE (set_low_bound) == INTEGER_CST
7665		 && tree_int_cst_lt (index, set_low_bound))
7666	     || (TREE_CODE (set_high_bound) == INTEGER_CST
7667		 && TREE_CODE (index) == INTEGER_CST
7668		 && tree_int_cst_lt (set_high_bound, index))))
7669	  return const0_rtx;
7670
7671	if (target == 0)
7672	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7673
7674	/* If we get here, we have to generate the code for both cases
7675	   (in range and out of range).  */
7676
7677	op0 = gen_label_rtx ();
7678	op1 = gen_label_rtx ();
7679
7680	if (! (GET_CODE (index_val) == CONST_INT
7681	       && GET_CODE (lo_r) == CONST_INT))
7682	  emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7683				   GET_MODE (index_val), iunsignedp, op1);
7684
7685	if (! (GET_CODE (index_val) == CONST_INT
7686	       && GET_CODE (hi_r) == CONST_INT))
7687	  emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7688				   GET_MODE (index_val), iunsignedp, op1);
7689
7690	/* Calculate the element number of bit zero in the first word
7691	   of the set.  */
7692	if (GET_CODE (lo_r) == CONST_INT)
7693	  rlow = GEN_INT (INTVAL (lo_r)
7694			  & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7695	else
7696	  rlow = expand_binop (index_mode, and_optab, lo_r,
7697			       GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7698			       NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7699
7700	diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7701			     NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7702
7703	quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7704			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7705	rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7706			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7707
7708	addr = memory_address (byte_mode,
7709			       expand_binop (index_mode, add_optab, diff,
7710					     setaddr, NULL_RTX, iunsignedp,
7711					     OPTAB_LIB_WIDEN));
7712
7713	/* Extract the bit we want to examine.  */
7714	bit = expand_shift (RSHIFT_EXPR, byte_mode,
7715			    gen_rtx_MEM (byte_mode, addr),
7716			    make_tree (TREE_TYPE (index), rem),
7717			    NULL_RTX, 1);
7718	result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7719			       GET_MODE (target) == byte_mode ? target : 0,
7720			       1, OPTAB_LIB_WIDEN);
7721
7722	if (result != target)
7723	  convert_move (target, result, 1);
7724
7725	/* Output the code to handle the out-of-range case.  */
7726	emit_jump (op0);
7727	emit_label (op1);
7728	emit_move_insn (target, const0_rtx);
7729	emit_label (op0);
7730	return target;
7731      }
7732
7733    case WITH_CLEANUP_EXPR:
7734      if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7735	{
7736	  WITH_CLEANUP_EXPR_RTL (exp)
7737	    = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7738	  expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7739				  CLEANUP_EH_ONLY (exp));
7740
7741	  /* That's it for this cleanup.  */
7742	  TREE_OPERAND (exp, 1) = 0;
7743	}
7744      return WITH_CLEANUP_EXPR_RTL (exp);
7745
7746    case CLEANUP_POINT_EXPR:
7747      {
7748	/* Start a new binding layer that will keep track of all cleanup
7749	   actions to be performed.  */
7750	expand_start_bindings (2);
7751
7752	target_temp_slot_level = temp_slot_level;
7753
7754	op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7755	/* If we're going to use this value, load it up now.  */
7756	if (! ignore)
7757	  op0 = force_not_mem (op0);
7758	preserve_temp_slots (op0);
7759	expand_end_bindings (NULL_TREE, 0, 0);
7760      }
7761      return op0;
7762
7763    case CALL_EXPR:
7764      /* Check for a built-in function.  */
7765      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7766	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7767	      == FUNCTION_DECL)
7768	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7769	{
7770	  if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7771	      == BUILT_IN_FRONTEND)
7772	    return (*lang_hooks.expand_expr) (exp, original_target,
7773					      tmode, modifier);
7774	  else
7775	    return expand_builtin (exp, target, subtarget, tmode, ignore);
7776	}
7777
7778      return expand_call (exp, target, ignore);
7779
7780    case NON_LVALUE_EXPR:
7781    case NOP_EXPR:
7782    case CONVERT_EXPR:
7783    case REFERENCE_EXPR:
7784      if (TREE_OPERAND (exp, 0) == error_mark_node)
7785	return const0_rtx;
7786
7787      if (TREE_CODE (type) == UNION_TYPE)
7788	{
7789	  tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7790
7791	  /* If both input and output are BLKmode, this conversion isn't doing
7792	     anything except possibly changing memory attribute.  */
7793	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7794	    {
7795	      rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7796					modifier);
7797
7798	      result = copy_rtx (result);
7799	      set_mem_attributes (result, exp, 0);
7800	      return result;
7801	    }
7802
7803	  if (target == 0)
7804	    target = assign_temp (type, 0, 1, 1);
7805
7806	  if (GET_CODE (target) == MEM)
7807	    /* Store data into beginning of memory target.  */
7808	    store_expr (TREE_OPERAND (exp, 0),
7809			adjust_address (target, TYPE_MODE (valtype), 0),
7810			modifier == EXPAND_STACK_PARM ? 2 : 0);
7811
7812	  else if (GET_CODE (target) == REG)
7813	    /* Store this field into a union of the proper type.  */
7814	    store_field (target,
7815			 MIN ((int_size_in_bytes (TREE_TYPE
7816						  (TREE_OPERAND (exp, 0)))
7817			       * BITS_PER_UNIT),
7818			      (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7819			 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7820			 VOIDmode, 0, type, 0);
7821	  else
7822	    abort ();
7823
7824	  /* Return the entire union.  */
7825	  return target;
7826	}
7827
7828      if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7829	{
7830	  op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7831			     modifier);
7832
7833	  /* If the signedness of the conversion differs and OP0 is
7834	     a promoted SUBREG, clear that indication since we now
7835	     have to do the proper extension.  */
7836	  if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7837	      && GET_CODE (op0) == SUBREG)
7838	    SUBREG_PROMOTED_VAR_P (op0) = 0;
7839
7840	  return op0;
7841	}
7842
7843      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7844      if (GET_MODE (op0) == mode)
7845	return op0;
7846
7847      /* If OP0 is a constant, just convert it into the proper mode.  */
7848      if (CONSTANT_P (op0))
7849	{
7850	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7851	  enum machine_mode inner_mode = TYPE_MODE (inner_type);
7852
7853	  if (modifier == EXPAND_INITIALIZER)
7854	    return simplify_gen_subreg (mode, op0, inner_mode,
7855					subreg_lowpart_offset (mode,
7856							       inner_mode));
7857	  else
7858	    return convert_modes (mode, inner_mode, op0,
7859				  TREE_UNSIGNED (inner_type));
7860	}
7861
7862      if (modifier == EXPAND_INITIALIZER)
7863	return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7864
7865      if (target == 0)
7866	return
7867	  convert_to_mode (mode, op0,
7868			   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7869      else
7870	convert_move (target, op0,
7871		      TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7872      return target;
7873
7874    case VIEW_CONVERT_EXPR:
7875      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7876
7877      /* If the input and output modes are both the same, we are done.
7878	 Otherwise, if neither mode is BLKmode and both are within a word, we
7879	 can use gen_lowpart.  If neither is true, make sure the operand is
7880	 in memory and convert the MEM to the new mode.  */
7881      if (TYPE_MODE (type) == GET_MODE (op0))
7882	;
7883      else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7884	       && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7885	       && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7886	op0 = gen_lowpart (TYPE_MODE (type), op0);
7887      else if (GET_CODE (op0) != MEM)
7888	{
7889	  /* If the operand is not a MEM, force it into memory.  Since we
7890	     are going to be be changing the mode of the MEM, don't call
7891	     force_const_mem for constants because we don't allow pool
7892	     constants to change mode.  */
7893	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7894
7895	  if (TREE_ADDRESSABLE (exp))
7896	    abort ();
7897
7898	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7899	    target
7900	      = assign_stack_temp_for_type
7901		(TYPE_MODE (inner_type),
7902		 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7903
7904	  emit_move_insn (target, op0);
7905	  op0 = target;
7906	}
7907
7908      /* At this point, OP0 is in the correct mode.  If the output type is such
7909	 that the operand is known to be aligned, indicate that it is.
7910	 Otherwise, we need only be concerned about alignment for non-BLKmode
7911	 results.  */
7912      if (GET_CODE (op0) == MEM)
7913	{
7914	  op0 = copy_rtx (op0);
7915
7916	  if (TYPE_ALIGN_OK (type))
7917	    set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7918	  else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7919		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7920	    {
7921	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7922	      HOST_WIDE_INT temp_size
7923		= MAX (int_size_in_bytes (inner_type),
7924		       (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7925	      rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7926						    temp_size, 0, type);
7927	      rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7928
7929	      if (TREE_ADDRESSABLE (exp))
7930		abort ();
7931
7932	      if (GET_MODE (op0) == BLKmode)
7933		emit_block_move (new_with_op0_mode, op0,
7934				 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7935				 (modifier == EXPAND_STACK_PARM
7936				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7937	      else
7938		emit_move_insn (new_with_op0_mode, op0);
7939
7940	      op0 = new;
7941	    }
7942
7943	  op0 = adjust_address (op0, TYPE_MODE (type), 0);
7944	}
7945
7946      return op0;
7947
7948    case PLUS_EXPR:
7949      this_optab = ! unsignedp && flag_trapv
7950                   && (GET_MODE_CLASS (mode) == MODE_INT)
7951                   ? addv_optab : add_optab;
7952
7953      /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7954	 something else, make sure we add the register to the constant and
7955	 then to the other thing.  This case can occur during strength
7956	 reduction and doing it this way will produce better code if the
7957	 frame pointer or argument pointer is eliminated.
7958
7959	 fold-const.c will ensure that the constant is always in the inner
7960	 PLUS_EXPR, so the only case we need to do anything about is if
7961	 sp, ap, or fp is our second argument, in which case we must swap
7962	 the innermost first argument and our second argument.  */
7963
7964      if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7965	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7966	  && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7967	  && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7968	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7969	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7970	{
7971	  tree t = TREE_OPERAND (exp, 1);
7972
7973	  TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7974	  TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7975	}
7976
7977      /* If the result is to be ptr_mode and we are adding an integer to
7978	 something, we might be forming a constant.  So try to use
7979	 plus_constant.  If it produces a sum and we can't accept it,
7980	 use force_operand.  This allows P = &ARR[const] to generate
7981	 efficient code on machines where a SYMBOL_REF is not a valid
7982	 address.
7983
7984	 If this is an EXPAND_SUM call, always return the sum.  */
7985      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7986	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7987	{
7988	  if (modifier == EXPAND_STACK_PARM)
7989	    target = 0;
7990	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7991	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7992	      && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7993	    {
7994	      rtx constant_part;
7995
7996	      op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7997				 EXPAND_SUM);
7998	      /* Use immed_double_const to ensure that the constant is
7999		 truncated according to the mode of OP1, then sign extended
8000		 to a HOST_WIDE_INT.  Using the constant directly can result
8001		 in non-canonical RTL in a 64x32 cross compile.  */
8002	      constant_part
8003		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8004				      (HOST_WIDE_INT) 0,
8005				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8006	      op1 = plus_constant (op1, INTVAL (constant_part));
8007	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8008		op1 = force_operand (op1, target);
8009	      return op1;
8010	    }
8011
8012	  else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8013		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8014		   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8015	    {
8016	      rtx constant_part;
8017
8018	      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8019				 (modifier == EXPAND_INITIALIZER
8020				 ? EXPAND_INITIALIZER : EXPAND_SUM));
8021	      if (! CONSTANT_P (op0))
8022		{
8023		  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8024				     VOIDmode, modifier);
8025		  /* Don't go to both_summands if modifier
8026		     says it's not right to return a PLUS.  */
8027		  if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8028		    goto binop2;
8029		  goto both_summands;
8030		}
8031	      /* Use immed_double_const to ensure that the constant is
8032		 truncated according to the mode of OP1, then sign extended
8033		 to a HOST_WIDE_INT.  Using the constant directly can result
8034		 in non-canonical RTL in a 64x32 cross compile.  */
8035	      constant_part
8036		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8037				      (HOST_WIDE_INT) 0,
8038				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8039	      op0 = plus_constant (op0, INTVAL (constant_part));
8040	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8041		op0 = force_operand (op0, target);
8042	      return op0;
8043	    }
8044	}
8045
8046      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8047	subtarget = 0;
8048
8049      /* No sense saving up arithmetic to be done
8050	 if it's all in the wrong mode to form part of an address.
8051	 And force_operand won't know whether to sign-extend or
8052	 zero-extend.  */
8053      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8054	  || mode != ptr_mode)
8055	{
8056	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8057	  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8058	  if (op0 == const0_rtx)
8059	    return op1;
8060	  if (op1 == const0_rtx)
8061	    return op0;
8062	  goto binop2;
8063	}
8064
8065      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8066      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8067
8068      /* We come here from MINUS_EXPR when the second operand is a
8069         constant.  */
8070    both_summands:
8071      /* Make sure any term that's a sum with a constant comes last.  */
8072      if (GET_CODE (op0) == PLUS
8073	  && CONSTANT_P (XEXP (op0, 1)))
8074	{
8075	  temp = op0;
8076	  op0 = op1;
8077	  op1 = temp;
8078	}
8079      /* If adding to a sum including a constant,
8080	 associate it to put the constant outside.  */
8081      if (GET_CODE (op1) == PLUS
8082	  && CONSTANT_P (XEXP (op1, 1)))
8083	{
8084	  rtx constant_term = const0_rtx;
8085
8086	  temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8087	  if (temp != 0)
8088	    op0 = temp;
8089	  /* Ensure that MULT comes first if there is one.  */
8090	  else if (GET_CODE (op0) == MULT)
8091	    op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8092	  else
8093	    op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8094
8095	  /* Let's also eliminate constants from op0 if possible.  */
8096	  op0 = eliminate_constant_term (op0, &constant_term);
8097
8098	  /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8099	     their sum should be a constant.  Form it into OP1, since the
8100	     result we want will then be OP0 + OP1.  */
8101
8102	  temp = simplify_binary_operation (PLUS, mode, constant_term,
8103					    XEXP (op1, 1));
8104	  if (temp != 0)
8105	    op1 = temp;
8106	  else
8107	    op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8108	}
8109
8110      /* Put a constant term last and put a multiplication first.  */
8111      if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8112	temp = op1, op1 = op0, op0 = temp;
8113
8114      temp = simplify_binary_operation (PLUS, mode, op0, op1);
8115      return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8116
8117    case MINUS_EXPR:
8118      /* For initializers, we are allowed to return a MINUS of two
8119	 symbolic constants.  Here we handle all cases when both operands
8120	 are constant.  */
8121      /* Handle difference of two symbolic constants,
8122	 for the sake of an initializer.  */
8123      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8124	  && really_constant_p (TREE_OPERAND (exp, 0))
8125	  && really_constant_p (TREE_OPERAND (exp, 1)))
8126	{
8127	  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8128				 modifier);
8129	  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8130				 modifier);
8131
8132	  /* If the last operand is a CONST_INT, use plus_constant of
8133	     the negated constant.  Else make the MINUS.  */
8134	  if (GET_CODE (op1) == CONST_INT)
8135	    return plus_constant (op0, - INTVAL (op1));
8136	  else
8137	    return gen_rtx_MINUS (mode, op0, op1);
8138	}
8139
8140      this_optab = ! unsignedp && flag_trapv
8141                   && (GET_MODE_CLASS(mode) == MODE_INT)
8142                   ? subv_optab : sub_optab;
8143
8144      /* No sense saving up arithmetic to be done
8145	 if it's all in the wrong mode to form part of an address.
8146	 And force_operand won't know whether to sign-extend or
8147	 zero-extend.  */
8148      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8149	  || mode != ptr_mode)
8150	goto binop;
8151
8152      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8153	subtarget = 0;
8154
8155      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8156      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8157
8158      /* Convert A - const to A + (-const).  */
8159      if (GET_CODE (op1) == CONST_INT)
8160	{
8161	  op1 = negate_rtx (mode, op1);
8162	  goto both_summands;
8163	}
8164
8165      goto binop2;
8166
8167    case MULT_EXPR:
8168      /* If first operand is constant, swap them.
8169	 Thus the following special case checks need only
8170	 check the second operand.  */
8171      if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8172	{
8173	  tree t1 = TREE_OPERAND (exp, 0);
8174	  TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8175	  TREE_OPERAND (exp, 1) = t1;
8176	}
8177
8178      /* Attempt to return something suitable for generating an
8179	 indexed address, for machines that support that.  */
8180
8181      if (modifier == EXPAND_SUM && mode == ptr_mode
8182	  && host_integerp (TREE_OPERAND (exp, 1), 0))
8183	{
8184	  tree exp1 = TREE_OPERAND (exp, 1);
8185
8186	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8187			     EXPAND_SUM);
8188
8189	  /* If we knew for certain that this is arithmetic for an array
8190	     reference, and we knew the bounds of the array, then we could
8191	     apply the distributive law across (PLUS X C) for constant C.
8192	     Without such knowledge, we risk overflowing the computation
8193	     when both X and C are large, but X+C isn't.  */
8194	  /* ??? Could perhaps special-case EXP being unsigned and C being
8195	     positive.  In that case we are certain that X+C is no smaller
8196	     than X and so the transformed expression will overflow iff the
8197	     original would have.  */
8198
8199	  if (GET_CODE (op0) != REG)
8200	    op0 = force_operand (op0, NULL_RTX);
8201	  if (GET_CODE (op0) != REG)
8202	    op0 = copy_to_mode_reg (mode, op0);
8203
8204	  return gen_rtx_MULT (mode, op0,
8205			       gen_int_mode (tree_low_cst (exp1, 0),
8206					     TYPE_MODE (TREE_TYPE (exp1))));
8207	}
8208
8209      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8210	subtarget = 0;
8211
8212      if (modifier == EXPAND_STACK_PARM)
8213	target = 0;
8214
8215      /* Check for multiplying things that have been extended
8216	 from a narrower type.  If this machine supports multiplying
8217	 in that narrower type with a result in the desired type,
8218	 do it that way, and avoid the explicit type-conversion.  */
8219      if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8220	  && TREE_CODE (type) == INTEGER_TYPE
8221	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8222	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8223	  && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8224	       && int_fits_type_p (TREE_OPERAND (exp, 1),
8225				   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8226	       /* Don't use a widening multiply if a shift will do.  */
8227	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8228		    > HOST_BITS_PER_WIDE_INT)
8229		   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8230	      ||
8231	      (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8232	       && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8233		   ==
8234		   TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8235	       /* If both operands are extended, they must either both
8236		  be zero-extended or both be sign-extended.  */
8237	       && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8238		   ==
8239		   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8240	{
8241	  enum machine_mode innermode
8242	    = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8243	  optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8244			? smul_widen_optab : umul_widen_optab);
8245	  this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8246			? umul_widen_optab : smul_widen_optab);
8247	  if (mode == GET_MODE_WIDER_MODE (innermode))
8248	    {
8249	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8250		{
8251		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8252				     NULL_RTX, VOIDmode, 0);
8253		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8254		    op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8255				       VOIDmode, 0);
8256		  else
8257		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8258				       NULL_RTX, VOIDmode, 0);
8259		  goto binop2;
8260		}
8261	      else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8262		       && innermode == word_mode)
8263		{
8264		  rtx htem;
8265		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8266				     NULL_RTX, VOIDmode, 0);
8267		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8268		    op1 = convert_modes (innermode, mode,
8269					 expand_expr (TREE_OPERAND (exp, 1),
8270						      NULL_RTX, VOIDmode, 0),
8271					 unsignedp);
8272		  else
8273		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8274				       NULL_RTX, VOIDmode, 0);
8275		  temp = expand_binop (mode, other_optab, op0, op1, target,
8276				       unsignedp, OPTAB_LIB_WIDEN);
8277		  htem = expand_mult_highpart_adjust (innermode,
8278						      gen_highpart (innermode, temp),
8279						      op0, op1,
8280						      gen_highpart (innermode, temp),
8281						      unsignedp);
8282		  emit_move_insn (gen_highpart (innermode, temp), htem);
8283		  return temp;
8284		}
8285	    }
8286	}
8287      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8288      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8289      return expand_mult (mode, op0, op1, target, unsignedp);
8290
8291    case TRUNC_DIV_EXPR:
8292    case FLOOR_DIV_EXPR:
8293    case CEIL_DIV_EXPR:
8294    case ROUND_DIV_EXPR:
8295    case EXACT_DIV_EXPR:
8296      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8297	subtarget = 0;
8298      if (modifier == EXPAND_STACK_PARM)
8299	target = 0;
8300      /* Possible optimization: compute the dividend with EXPAND_SUM
8301	 then if the divisor is constant can optimize the case
8302	 where some terms of the dividend have coeffs divisible by it.  */
8303      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8304      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8305      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8306
8307    case RDIV_EXPR:
8308      /* Emit a/b as a*(1/b).  Later we may manage CSE the reciprocal saving
8309         expensive divide.  If not, combine will rebuild the original
8310         computation.  */
8311      if (flag_unsafe_math_optimizations && optimize && !optimize_size
8312	  && TREE_CODE (type) == REAL_TYPE
8313	  && !real_onep (TREE_OPERAND (exp, 0)))
8314        return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8315				   build (RDIV_EXPR, type,
8316					  build_real (type, dconst1),
8317					  TREE_OPERAND (exp, 1))),
8318			    target, tmode, modifier);
8319      this_optab = sdiv_optab;
8320      goto binop;
8321
8322    case TRUNC_MOD_EXPR:
8323    case FLOOR_MOD_EXPR:
8324    case CEIL_MOD_EXPR:
8325    case ROUND_MOD_EXPR:
8326      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8327	subtarget = 0;
8328      if (modifier == EXPAND_STACK_PARM)
8329	target = 0;
8330      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8331      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8332      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8333
8334    case FIX_ROUND_EXPR:
8335    case FIX_FLOOR_EXPR:
8336    case FIX_CEIL_EXPR:
8337      abort ();			/* Not used for C.  */
8338
8339    case FIX_TRUNC_EXPR:
8340      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8341      if (target == 0 || modifier == EXPAND_STACK_PARM)
8342	target = gen_reg_rtx (mode);
8343      expand_fix (target, op0, unsignedp);
8344      return target;
8345
8346    case FLOAT_EXPR:
8347      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8348      if (target == 0 || modifier == EXPAND_STACK_PARM)
8349	target = gen_reg_rtx (mode);
8350      /* expand_float can't figure out what to do if FROM has VOIDmode.
8351	 So give it the correct mode.  With -O, cse will optimize this.  */
8352      if (GET_MODE (op0) == VOIDmode)
8353	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8354				op0);
8355      expand_float (target, op0,
8356		    TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8357      return target;
8358
8359    case NEGATE_EXPR:
8360      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8361      if (modifier == EXPAND_STACK_PARM)
8362	target = 0;
8363      temp = expand_unop (mode,
8364			  ! unsignedp && flag_trapv
8365			  && (GET_MODE_CLASS(mode) == MODE_INT)
8366			  ? negv_optab : neg_optab, op0, target, 0);
8367      if (temp == 0)
8368	abort ();
8369      return temp;
8370
8371    case ABS_EXPR:
8372      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8373      if (modifier == EXPAND_STACK_PARM)
8374	target = 0;
8375
8376      /* Handle complex values specially.  */
8377      if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8378	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8379	return expand_complex_abs (mode, op0, target, unsignedp);
8380
8381      /* Unsigned abs is simply the operand.  Testing here means we don't
8382	 risk generating incorrect code below.  */
8383      if (TREE_UNSIGNED (type))
8384	return op0;
8385
8386      return expand_abs (mode, op0, target, unsignedp,
8387			 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8388
8389    case MAX_EXPR:
8390    case MIN_EXPR:
8391      target = original_target;
8392      if (target == 0
8393	  || modifier == EXPAND_STACK_PARM
8394	  || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8395	  || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8396	  || GET_MODE (target) != mode
8397	  || (GET_CODE (target) == REG
8398	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
8399	target = gen_reg_rtx (mode);
8400      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8401      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8402
8403      /* First try to do it with a special MIN or MAX instruction.
8404	 If that does not win, use a conditional jump to select the proper
8405	 value.  */
8406      this_optab = (TREE_UNSIGNED (type)
8407		    ? (code == MIN_EXPR ? umin_optab : umax_optab)
8408		    : (code == MIN_EXPR ? smin_optab : smax_optab));
8409
8410      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8411			   OPTAB_WIDEN);
8412      if (temp != 0)
8413	return temp;
8414
8415      /* At this point, a MEM target is no longer useful; we will get better
8416	 code without it.  */
8417
8418      if (GET_CODE (target) == MEM)
8419	target = gen_reg_rtx (mode);
8420
8421      if (target != op0)
8422	emit_move_insn (target, op0);
8423
8424      op0 = gen_label_rtx ();
8425
8426      /* If this mode is an integer too wide to compare properly,
8427	 compare word by word.  Rely on cse to optimize constant cases.  */
8428      if (GET_MODE_CLASS (mode) == MODE_INT
8429	  && ! can_compare_p (GE, mode, ccp_jump))
8430	{
8431	  if (code == MAX_EXPR)
8432	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8433					  target, op1, NULL_RTX, op0);
8434	  else
8435	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8436					  op1, target, NULL_RTX, op0);
8437	}
8438      else
8439	{
8440	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8441	  do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8442				   unsignedp, mode, NULL_RTX, NULL_RTX,
8443				   op0);
8444	}
8445      emit_move_insn (target, op1);
8446      emit_label (op0);
8447      return target;
8448
8449    case BIT_NOT_EXPR:
8450      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8451      if (modifier == EXPAND_STACK_PARM)
8452	target = 0;
8453      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8454      if (temp == 0)
8455	abort ();
8456      return temp;
8457
8458    case FFS_EXPR:
8459      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8460      if (modifier == EXPAND_STACK_PARM)
8461	target = 0;
8462      temp = expand_unop (mode, ffs_optab, op0, target, 1);
8463      if (temp == 0)
8464	abort ();
8465      return temp;
8466
8467      /* ??? Can optimize bitwise operations with one arg constant.
8468	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8469	 and (a bitwise1 b) bitwise2 b (etc)
8470	 but that is probably not worth while.  */
8471
8472      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
8473	 boolean values when we want in all cases to compute both of them.  In
8474	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8475	 as actual zero-or-1 values and then bitwise anding.  In cases where
8476	 there cannot be any side effects, better code would be made by
8477	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8478	 how to recognize those cases.  */
8479
8480    case TRUTH_AND_EXPR:
8481    case BIT_AND_EXPR:
8482      this_optab = and_optab;
8483      goto binop;
8484
8485    case TRUTH_OR_EXPR:
8486    case BIT_IOR_EXPR:
8487      this_optab = ior_optab;
8488      goto binop;
8489
8490    case TRUTH_XOR_EXPR:
8491    case BIT_XOR_EXPR:
8492      this_optab = xor_optab;
8493      goto binop;
8494
8495    case LSHIFT_EXPR:
8496    case RSHIFT_EXPR:
8497    case LROTATE_EXPR:
8498    case RROTATE_EXPR:
8499      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8500	subtarget = 0;
8501      if (modifier == EXPAND_STACK_PARM)
8502	target = 0;
8503      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8504      return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8505			   unsignedp);
8506
8507      /* Could determine the answer when only additive constants differ.  Also,
8508	 the addition of one can be handled by changing the condition.  */
8509    case LT_EXPR:
8510    case LE_EXPR:
8511    case GT_EXPR:
8512    case GE_EXPR:
8513    case EQ_EXPR:
8514    case NE_EXPR:
8515    case UNORDERED_EXPR:
8516    case ORDERED_EXPR:
8517    case UNLT_EXPR:
8518    case UNLE_EXPR:
8519    case UNGT_EXPR:
8520    case UNGE_EXPR:
8521    case UNEQ_EXPR:
8522      temp = do_store_flag (exp,
8523			    modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8524			    tmode != VOIDmode ? tmode : mode, 0);
8525      if (temp != 0)
8526	return temp;
8527
8528      /* For foo != 0, load foo, and if it is nonzero load 1 instead.  */
8529      if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8530	  && original_target
8531	  && GET_CODE (original_target) == REG
8532	  && (GET_MODE (original_target)
8533	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8534	{
8535	  temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8536			      VOIDmode, 0);
8537
8538	  /* If temp is constant, we can just compute the result.  */
8539	  if (GET_CODE (temp) == CONST_INT)
8540	    {
8541	      if (INTVAL (temp) != 0)
8542	        emit_move_insn (target, const1_rtx);
8543	      else
8544	        emit_move_insn (target, const0_rtx);
8545
8546	      return target;
8547	    }
8548
8549	  if (temp != original_target)
8550	    {
8551	      enum machine_mode mode1 = GET_MODE (temp);
8552	      if (mode1 == VOIDmode)
8553		mode1 = tmode != VOIDmode ? tmode : mode;
8554
8555	      temp = copy_to_mode_reg (mode1, temp);
8556	    }
8557
8558	  op1 = gen_label_rtx ();
8559	  emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8560				   GET_MODE (temp), unsignedp, op1);
8561	  emit_move_insn (temp, const1_rtx);
8562	  emit_label (op1);
8563	  return temp;
8564	}
8565
8566      /* If no set-flag instruction, must generate a conditional
8567	 store into a temporary variable.  Drop through
8568	 and handle this like && and ||.  */
8569
8570    case TRUTH_ANDIF_EXPR:
8571    case TRUTH_ORIF_EXPR:
8572      if (! ignore
8573	  && (target == 0
8574	      || modifier == EXPAND_STACK_PARM
8575	      || ! safe_from_p (target, exp, 1)
8576	      /* Make sure we don't have a hard reg (such as function's return
8577		 value) live across basic blocks, if not optimizing.  */
8578	      || (!optimize && GET_CODE (target) == REG
8579		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8580	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8581
8582      if (target)
8583	emit_clr_insn (target);
8584
8585      op1 = gen_label_rtx ();
8586      jumpifnot (exp, op1);
8587
8588      if (target)
8589	emit_0_to_1_insn (target);
8590
8591      emit_label (op1);
8592      return ignore ? const0_rtx : target;
8593
8594    case TRUTH_NOT_EXPR:
8595      if (modifier == EXPAND_STACK_PARM)
8596	target = 0;
8597      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8598      /* The parser is careful to generate TRUTH_NOT_EXPR
8599	 only with operands that are always zero or one.  */
8600      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8601			   target, 1, OPTAB_LIB_WIDEN);
8602      if (temp == 0)
8603	abort ();
8604      return temp;
8605
8606    case COMPOUND_EXPR:
8607      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8608      emit_queue ();
8609      return expand_expr (TREE_OPERAND (exp, 1),
8610			  (ignore ? const0_rtx : target),
8611			  VOIDmode, modifier);
8612
8613    case COND_EXPR:
8614      /* If we would have a "singleton" (see below) were it not for a
8615	 conversion in each arm, bring that conversion back out.  */
8616      if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8617	  && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8618	  && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8619	      == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8620	{
8621	  tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8622	  tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8623
8624	  if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8625	       && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8626	      || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8627		  && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8628	      || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8629		  && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8630	      || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8631		  && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8632	    return expand_expr (build1 (NOP_EXPR, type,
8633					build (COND_EXPR, TREE_TYPE (iftrue),
8634					       TREE_OPERAND (exp, 0),
8635					       iftrue, iffalse)),
8636				target, tmode, modifier);
8637	}
8638
8639      {
8640	/* Note that COND_EXPRs whose type is a structure or union
8641	   are required to be constructed to contain assignments of
8642	   a temporary variable, so that we can evaluate them here
8643	   for side effect only.  If type is void, we must do likewise.  */
8644
8645	/* If an arm of the branch requires a cleanup,
8646	   only that cleanup is performed.  */
8647
8648	tree singleton = 0;
8649	tree binary_op = 0, unary_op = 0;
8650
8651	/* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8652	   convert it to our mode, if necessary.  */
8653	if (integer_onep (TREE_OPERAND (exp, 1))
8654	    && integer_zerop (TREE_OPERAND (exp, 2))
8655	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8656	  {
8657	    if (ignore)
8658	      {
8659		expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8660			     modifier);
8661		return const0_rtx;
8662	      }
8663
8664	    if (modifier == EXPAND_STACK_PARM)
8665	      target = 0;
8666	    op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8667	    if (GET_MODE (op0) == mode)
8668	      return op0;
8669
8670	    if (target == 0)
8671	      target = gen_reg_rtx (mode);
8672	    convert_move (target, op0, unsignedp);
8673	    return target;
8674	  }
8675
8676	/* Check for X ? A + B : A.  If we have this, we can copy A to the
8677	   output and conditionally add B.  Similarly for unary operations.
8678	   Don't do this if X has side-effects because those side effects
8679	   might affect A or B and the "?" operation is a sequence point in
8680	   ANSI.  (operand_equal_p tests for side effects.)  */
8681
8682	if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8683	    && operand_equal_p (TREE_OPERAND (exp, 2),
8684				TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8685	  singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8686	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8687		 && operand_equal_p (TREE_OPERAND (exp, 1),
8688				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8689	  singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8690	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8691		 && operand_equal_p (TREE_OPERAND (exp, 2),
8692				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8693	  singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8694	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8695		 && operand_equal_p (TREE_OPERAND (exp, 1),
8696				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8697	  singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8698
8699	/* If we are not to produce a result, we have no target.  Otherwise,
8700	   if a target was specified use it; it will not be used as an
8701	   intermediate target unless it is safe.  If no target, use a
8702	   temporary.  */
8703
8704	if (ignore)
8705	  temp = 0;
8706	else if (modifier == EXPAND_STACK_PARM)
8707	  temp = assign_temp (type, 0, 0, 1);
8708	else if (original_target
8709		 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8710		     || (singleton && GET_CODE (original_target) == REG
8711			 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8712			 && original_target == var_rtx (singleton)))
8713		 && GET_MODE (original_target) == mode
8714#ifdef HAVE_conditional_move
8715		 && (! can_conditionally_move_p (mode)
8716		     || GET_CODE (original_target) == REG
8717		     || TREE_ADDRESSABLE (type))
8718#endif
8719		 && (GET_CODE (original_target) != MEM
8720		     || TREE_ADDRESSABLE (type)))
8721	  temp = original_target;
8722	else if (TREE_ADDRESSABLE (type))
8723	  abort ();
8724	else
8725	  temp = assign_temp (type, 0, 0, 1);
8726
8727	/* If we had X ? A + C : A, with C a constant power of 2, and we can
8728	   do the test of X as a store-flag operation, do this as
8729	   A + ((X != 0) << log C).  Similarly for other simple binary
8730	   operators.  Only do for C == 1 if BRANCH_COST is low.  */
8731	if (temp && singleton && binary_op
8732	    && (TREE_CODE (binary_op) == PLUS_EXPR
8733		|| TREE_CODE (binary_op) == MINUS_EXPR
8734		|| TREE_CODE (binary_op) == BIT_IOR_EXPR
8735		|| TREE_CODE (binary_op) == BIT_XOR_EXPR)
8736	    && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8737		: integer_onep (TREE_OPERAND (binary_op, 1)))
8738	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8739	  {
8740	    rtx result;
8741	    tree cond;
8742	    optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8743			    ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8744			       ? addv_optab : add_optab)
8745			    : TREE_CODE (binary_op) == MINUS_EXPR
8746			    ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8747			       ? subv_optab : sub_optab)
8748			    : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8749			    : xor_optab);
8750
8751	    /* If we had X ? A : A + 1, do this as A + (X == 0).  */
8752	    if (singleton == TREE_OPERAND (exp, 1))
8753	      cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8754	    else
8755	      cond = TREE_OPERAND (exp, 0);
8756
8757	    result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8758					   ? temp : NULL_RTX),
8759				    mode, BRANCH_COST <= 1);
8760
8761	    if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8762	      result = expand_shift (LSHIFT_EXPR, mode, result,
8763				     build_int_2 (tree_log2
8764						  (TREE_OPERAND
8765						   (binary_op, 1)),
8766						  0),
8767				     (safe_from_p (temp, singleton, 1)
8768				      ? temp : NULL_RTX), 0);
8769
8770	    if (result)
8771	      {
8772		op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8773		return expand_binop (mode, boptab, op1, result, temp,
8774				     unsignedp, OPTAB_LIB_WIDEN);
8775	      }
8776	  }
8777
8778	do_pending_stack_adjust ();
8779	NO_DEFER_POP;
8780	op0 = gen_label_rtx ();
8781
8782	if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8783	  {
8784	    if (temp != 0)
8785	      {
8786		/* If the target conflicts with the other operand of the
8787		   binary op, we can't use it.  Also, we can't use the target
8788		   if it is a hard register, because evaluating the condition
8789		   might clobber it.  */
8790		if ((binary_op
8791		     && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8792		    || (GET_CODE (temp) == REG
8793			&& REGNO (temp) < FIRST_PSEUDO_REGISTER))
8794		  temp = gen_reg_rtx (mode);
8795		store_expr (singleton, temp,
8796			    modifier == EXPAND_STACK_PARM ? 2 : 0);
8797	      }
8798	    else
8799	      expand_expr (singleton,
8800			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8801	    if (singleton == TREE_OPERAND (exp, 1))
8802	      jumpif (TREE_OPERAND (exp, 0), op0);
8803	    else
8804	      jumpifnot (TREE_OPERAND (exp, 0), op0);
8805
8806	    start_cleanup_deferral ();
8807	    if (binary_op && temp == 0)
8808	      /* Just touch the other operand.  */
8809	      expand_expr (TREE_OPERAND (binary_op, 1),
8810			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8811	    else if (binary_op)
8812	      store_expr (build (TREE_CODE (binary_op), type,
8813				 make_tree (type, temp),
8814				 TREE_OPERAND (binary_op, 1)),
8815			  temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8816	    else
8817	      store_expr (build1 (TREE_CODE (unary_op), type,
8818				  make_tree (type, temp)),
8819			  temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8820	    op1 = op0;
8821	  }
8822	/* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8823	   comparison operator.  If we have one of these cases, set the
8824	   output to A, branch on A (cse will merge these two references),
8825	   then set the output to FOO.  */
8826	else if (temp
8827		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8828		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8829		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8830				     TREE_OPERAND (exp, 1), 0)
8831		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8832		     || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8833		 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8834	  {
8835	    if (GET_CODE (temp) == REG
8836		&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
8837	      temp = gen_reg_rtx (mode);
8838	    store_expr (TREE_OPERAND (exp, 1), temp,
8839			modifier == EXPAND_STACK_PARM ? 2 : 0);
8840	    jumpif (TREE_OPERAND (exp, 0), op0);
8841
8842	    start_cleanup_deferral ();
8843	    store_expr (TREE_OPERAND (exp, 2), temp,
8844			modifier == EXPAND_STACK_PARM ? 2 : 0);
8845	    op1 = op0;
8846	  }
8847	else if (temp
8848		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8849		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8850		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8851				     TREE_OPERAND (exp, 2), 0)
8852		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8853		     || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8854		 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8855	  {
8856	    if (GET_CODE (temp) == REG
8857		&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
8858	      temp = gen_reg_rtx (mode);
8859	    store_expr (TREE_OPERAND (exp, 2), temp,
8860			modifier == EXPAND_STACK_PARM ? 2 : 0);
8861	    jumpifnot (TREE_OPERAND (exp, 0), op0);
8862
8863	    start_cleanup_deferral ();
8864	    store_expr (TREE_OPERAND (exp, 1), temp,
8865			modifier == EXPAND_STACK_PARM ? 2 : 0);
8866	    op1 = op0;
8867	  }
8868	else
8869	  {
8870	    op1 = gen_label_rtx ();
8871	    jumpifnot (TREE_OPERAND (exp, 0), op0);
8872
8873	    start_cleanup_deferral ();
8874
8875	    /* One branch of the cond can be void, if it never returns. For
8876	       example A ? throw : E  */
8877	    if (temp != 0
8878		&& TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8879	      store_expr (TREE_OPERAND (exp, 1), temp,
8880			  modifier == EXPAND_STACK_PARM ? 2 : 0);
8881	    else
8882	      expand_expr (TREE_OPERAND (exp, 1),
8883			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8884	    end_cleanup_deferral ();
8885	    emit_queue ();
8886	    emit_jump_insn (gen_jump (op1));
8887	    emit_barrier ();
8888	    emit_label (op0);
8889	    start_cleanup_deferral ();
8890	    if (temp != 0
8891		&& TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8892	      store_expr (TREE_OPERAND (exp, 2), temp,
8893			  modifier == EXPAND_STACK_PARM ? 2 : 0);
8894	    else
8895	      expand_expr (TREE_OPERAND (exp, 2),
8896			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8897	  }
8898
8899	end_cleanup_deferral ();
8900
8901	emit_queue ();
8902	emit_label (op1);
8903	OK_DEFER_POP;
8904
8905	return temp;
8906      }
8907
8908    case TARGET_EXPR:
8909      {
8910	/* Something needs to be initialized, but we didn't know
8911	   where that thing was when building the tree.  For example,
8912	   it could be the return value of a function, or a parameter
8913	   to a function which lays down in the stack, or a temporary
8914	   variable which must be passed by reference.
8915
8916	   We guarantee that the expression will either be constructed
8917	   or copied into our original target.  */
8918
8919	tree slot = TREE_OPERAND (exp, 0);
8920	tree cleanups = NULL_TREE;
8921	tree exp1;
8922
8923	if (TREE_CODE (slot) != VAR_DECL)
8924	  abort ();
8925
8926	if (! ignore)
8927	  target = original_target;
8928
8929	/* Set this here so that if we get a target that refers to a
8930	   register variable that's already been used, put_reg_into_stack
8931	   knows that it should fix up those uses.  */
8932	TREE_USED (slot) = 1;
8933
8934	if (target == 0)
8935	  {
8936	    if (DECL_RTL_SET_P (slot))
8937	      {
8938		target = DECL_RTL (slot);
8939		/* If we have already expanded the slot, so don't do
8940		   it again.  (mrs)  */
8941		if (TREE_OPERAND (exp, 1) == NULL_TREE)
8942		  return target;
8943	      }
8944	    else
8945	      {
8946		target = assign_temp (type, 2, 0, 1);
8947		/* All temp slots at this level must not conflict.  */
8948		preserve_temp_slots (target);
8949		SET_DECL_RTL (slot, target);
8950		if (TREE_ADDRESSABLE (slot))
8951		  put_var_into_stack (slot, /*rescan=*/false);
8952
8953		/* Since SLOT is not known to the called function
8954		   to belong to its stack frame, we must build an explicit
8955		   cleanup.  This case occurs when we must build up a reference
8956		   to pass the reference as an argument.  In this case,
8957		   it is very likely that such a reference need not be
8958		   built here.  */
8959
8960		if (TREE_OPERAND (exp, 2) == 0)
8961		  TREE_OPERAND (exp, 2)
8962		    = (*lang_hooks.maybe_build_cleanup) (slot);
8963		cleanups = TREE_OPERAND (exp, 2);
8964	      }
8965	  }
8966	else
8967	  {
8968	    /* This case does occur, when expanding a parameter which
8969	       needs to be constructed on the stack.  The target
8970	       is the actual stack address that we want to initialize.
8971	       The function we call will perform the cleanup in this case.  */
8972
8973	    /* If we have already assigned it space, use that space,
8974	       not target that we were passed in, as our target
8975	       parameter is only a hint.  */
8976	    if (DECL_RTL_SET_P (slot))
8977	      {
8978		target = DECL_RTL (slot);
8979		/* If we have already expanded the slot, so don't do
8980                   it again.  (mrs)  */
8981		if (TREE_OPERAND (exp, 1) == NULL_TREE)
8982		  return target;
8983	      }
8984	    else
8985	      {
8986		SET_DECL_RTL (slot, target);
8987		/* If we must have an addressable slot, then make sure that
8988		   the RTL that we just stored in slot is OK.  */
8989		if (TREE_ADDRESSABLE (slot))
8990		  put_var_into_stack (slot, /*rescan=*/true);
8991	      }
8992	  }
8993
8994	exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8995	/* Mark it as expanded.  */
8996	TREE_OPERAND (exp, 1) = NULL_TREE;
8997
8998	store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8999
9000	expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9001
9002	return target;
9003      }
9004
9005    case INIT_EXPR:
9006      {
9007	tree lhs = TREE_OPERAND (exp, 0);
9008	tree rhs = TREE_OPERAND (exp, 1);
9009
9010	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9011	return temp;
9012      }
9013
9014    case MODIFY_EXPR:
9015      {
9016	/* If lhs is complex, expand calls in rhs before computing it.
9017	   That's so we don't compute a pointer and save it over a
9018	   call.  If lhs is simple, compute it first so we can give it
9019	   as a target if the rhs is just a call.  This avoids an
9020	   extra temp and copy and that prevents a partial-subsumption
9021	   which makes bad code.  Actually we could treat
9022	   component_ref's of vars like vars.  */
9023
9024	tree lhs = TREE_OPERAND (exp, 0);
9025	tree rhs = TREE_OPERAND (exp, 1);
9026
9027	temp = 0;
9028
9029	/* Check for |= or &= of a bitfield of size one into another bitfield
9030	   of size 1.  In this case, (unless we need the result of the
9031	   assignment) we can do this more efficiently with a
9032	   test followed by an assignment, if necessary.
9033
9034	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
9035	   things change so we do, this code should be enhanced to
9036	   support it.  */
9037	if (ignore
9038	    && TREE_CODE (lhs) == COMPONENT_REF
9039	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
9040		|| TREE_CODE (rhs) == BIT_AND_EXPR)
9041	    && TREE_OPERAND (rhs, 0) == lhs
9042	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9043	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9044	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9045	  {
9046	    rtx label = gen_label_rtx ();
9047
9048	    do_jump (TREE_OPERAND (rhs, 1),
9049		     TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9050		     TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9051	    expand_assignment (lhs, convert (TREE_TYPE (rhs),
9052					     (TREE_CODE (rhs) == BIT_IOR_EXPR
9053					      ? integer_one_node
9054					      : integer_zero_node)),
9055			       0, 0);
9056	    do_pending_stack_adjust ();
9057	    emit_label (label);
9058	    return const0_rtx;
9059	  }
9060
9061	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9062
9063	return temp;
9064      }
9065
9066    case RETURN_EXPR:
9067      if (!TREE_OPERAND (exp, 0))
9068	expand_null_return ();
9069      else
9070	expand_return (TREE_OPERAND (exp, 0));
9071      return const0_rtx;
9072
9073    case PREINCREMENT_EXPR:
9074    case PREDECREMENT_EXPR:
9075      return expand_increment (exp, 0, ignore);
9076
9077    case POSTINCREMENT_EXPR:
9078    case POSTDECREMENT_EXPR:
9079      /* Faster to treat as pre-increment if result is not used.  */
9080      return expand_increment (exp, ! ignore, ignore);
9081
9082    case ADDR_EXPR:
9083      if (modifier == EXPAND_STACK_PARM)
9084	target = 0;
9085      /* Are we taking the address of a nested function?  */
9086      if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9087	  && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9088	  && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9089	  && ! TREE_STATIC (exp))
9090	{
9091	  op0 = trampoline_address (TREE_OPERAND (exp, 0));
9092	  op0 = force_operand (op0, target);
9093	}
9094      /* If we are taking the address of something erroneous, just
9095	 return a zero.  */
9096      else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9097	return const0_rtx;
9098      /* If we are taking the address of a constant and are at the
9099	 top level, we have to use output_constant_def since we can't
9100	 call force_const_mem at top level.  */
9101      else if (cfun == 0
9102	       && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9103		   || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9104		       == 'c')))
9105	op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9106      else
9107	{
9108	  /* We make sure to pass const0_rtx down if we came in with
9109	     ignore set, to avoid doing the cleanups twice for something.  */
9110	  op0 = expand_expr (TREE_OPERAND (exp, 0),
9111			     ignore ? const0_rtx : NULL_RTX, VOIDmode,
9112			     (modifier == EXPAND_INITIALIZER
9113			      ? modifier : EXPAND_CONST_ADDRESS));
9114
9115	  /* If we are going to ignore the result, OP0 will have been set
9116	     to const0_rtx, so just return it.  Don't get confused and
9117	     think we are taking the address of the constant.  */
9118	  if (ignore)
9119	    return op0;
9120
9121	  /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9122	     clever and returns a REG when given a MEM.  */
9123	  op0 = protect_from_queue (op0, 1);
9124
9125	  /* We would like the object in memory.  If it is a constant, we can
9126	     have it be statically allocated into memory.  For a non-constant,
9127	     we need to allocate some memory and store the value into it.  */
9128
9129	  if (CONSTANT_P (op0))
9130	    op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9131				   op0);
9132	  else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9133		   || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9134		   || GET_CODE (op0) == PARALLEL)
9135	    {
9136	      /* If the operand is a SAVE_EXPR, we can deal with this by
9137		 forcing the SAVE_EXPR into memory.  */
9138	      if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9139		{
9140		  put_var_into_stack (TREE_OPERAND (exp, 0),
9141				      /*rescan=*/true);
9142		  op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9143		}
9144	      else
9145		{
9146		  /* If this object is in a register, it can't be BLKmode.  */
9147		  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9148		  rtx memloc = assign_temp (inner_type, 1, 1, 1);
9149
9150		  if (GET_CODE (op0) == PARALLEL)
9151		    /* Handle calls that pass values in multiple
9152		       non-contiguous locations.  The Irix 6 ABI has examples
9153		       of this.  */
9154		    emit_group_store (memloc, op0,
9155				      int_size_in_bytes (inner_type));
9156		  else
9157		    emit_move_insn (memloc, op0);
9158
9159		  op0 = memloc;
9160		}
9161	    }
9162
9163	  if (GET_CODE (op0) != MEM)
9164	    abort ();
9165
9166	  mark_temp_addr_taken (op0);
9167	  if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9168	    {
9169	      op0 = XEXP (op0, 0);
9170#ifdef POINTERS_EXTEND_UNSIGNED
9171	      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9172		  && mode == ptr_mode)
9173		op0 = convert_memory_address (ptr_mode, op0);
9174#endif
9175	      return op0;
9176	    }
9177
9178	  /* If OP0 is not aligned as least as much as the type requires, we
9179	     need to make a temporary, copy OP0 to it, and take the address of
9180	     the temporary.  We want to use the alignment of the type, not of
9181	     the operand.  Note that this is incorrect for FUNCTION_TYPE, but
9182	     the test for BLKmode means that can't happen.  The test for
9183	     BLKmode is because we never make mis-aligned MEMs with
9184	     non-BLKmode.
9185
9186	     We don't need to do this at all if the machine doesn't have
9187	     strict alignment.  */
9188	  if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9189	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9190		  > MEM_ALIGN (op0))
9191	      && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9192	    {
9193	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9194	      rtx new;
9195
9196	      if (TYPE_ALIGN_OK (inner_type))
9197		abort ();
9198
9199	      if (TREE_ADDRESSABLE (inner_type))
9200		{
9201		  /* We can't make a bitwise copy of this object, so fail.  */
9202		  error ("cannot take the address of an unaligned member");
9203		  return const0_rtx;
9204		}
9205
9206	      new = assign_stack_temp_for_type
9207		(TYPE_MODE (inner_type),
9208		 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9209		 : int_size_in_bytes (inner_type),
9210		 1, build_qualified_type (inner_type,
9211					  (TYPE_QUALS (inner_type)
9212					   | TYPE_QUAL_CONST)));
9213
9214	      emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9215			       (modifier == EXPAND_STACK_PARM
9216				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9217
9218	      op0 = new;
9219	    }
9220
9221	  op0 = force_operand (XEXP (op0, 0), target);
9222	}
9223
9224      if (flag_force_addr
9225	  && GET_CODE (op0) != REG
9226	  && modifier != EXPAND_CONST_ADDRESS
9227	  && modifier != EXPAND_INITIALIZER
9228	  && modifier != EXPAND_SUM)
9229	op0 = force_reg (Pmode, op0);
9230
9231      if (GET_CODE (op0) == REG
9232	  && ! REG_USERVAR_P (op0))
9233	mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9234
9235#ifdef POINTERS_EXTEND_UNSIGNED
9236      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9237	  && mode == ptr_mode)
9238	op0 = convert_memory_address (ptr_mode, op0);
9239#endif
9240
9241      return op0;
9242
9243    case ENTRY_VALUE_EXPR:
9244      abort ();
9245
9246    /* COMPLEX type for Extended Pascal & Fortran  */
9247    case COMPLEX_EXPR:
9248      {
9249	enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9250	rtx insns;
9251
9252	/* Get the rtx code of the operands.  */
9253	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9254	op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9255
9256	if (! target)
9257	  target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9258
9259	start_sequence ();
9260
9261	/* Move the real (op0) and imaginary (op1) parts to their location.  */
9262	emit_move_insn (gen_realpart (mode, target), op0);
9263	emit_move_insn (gen_imagpart (mode, target), op1);
9264
9265	insns = get_insns ();
9266	end_sequence ();
9267
9268	/* Complex construction should appear as a single unit.  */
9269	/* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9270	   each with a separate pseudo as destination.
9271	   It's not correct for flow to treat them as a unit.  */
9272	if (GET_CODE (target) != CONCAT)
9273	  emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9274	else
9275	  emit_insn (insns);
9276
9277	return target;
9278      }
9279
9280    case REALPART_EXPR:
9281      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9282      return gen_realpart (mode, op0);
9283
9284    case IMAGPART_EXPR:
9285      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9286      return gen_imagpart (mode, op0);
9287
9288    case CONJ_EXPR:
9289      {
9290	enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9291	rtx imag_t;
9292	rtx insns;
9293
9294	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9295
9296	if (! target)
9297	  target = gen_reg_rtx (mode);
9298
9299	start_sequence ();
9300
9301	/* Store the realpart and the negated imagpart to target.  */
9302	emit_move_insn (gen_realpart (partmode, target),
9303			gen_realpart (partmode, op0));
9304
9305	imag_t = gen_imagpart (partmode, target);
9306	temp = expand_unop (partmode,
9307			    ! unsignedp && flag_trapv
9308			    && (GET_MODE_CLASS(partmode) == MODE_INT)
9309			    ? negv_optab : neg_optab,
9310			    gen_imagpart (partmode, op0), imag_t, 0);
9311	if (temp != imag_t)
9312	  emit_move_insn (imag_t, temp);
9313
9314	insns = get_insns ();
9315	end_sequence ();
9316
9317	/* Conjugate should appear as a single unit
9318	   If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9319	   each with a separate pseudo as destination.
9320	   It's not correct for flow to treat them as a unit.  */
9321	if (GET_CODE (target) != CONCAT)
9322	  emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9323	else
9324	  emit_insn (insns);
9325
9326	return target;
9327      }
9328
9329    case TRY_CATCH_EXPR:
9330      {
9331	tree handler = TREE_OPERAND (exp, 1);
9332
9333	expand_eh_region_start ();
9334
9335	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9336
9337	expand_eh_region_end_cleanup (handler);
9338
9339	return op0;
9340      }
9341
9342    case TRY_FINALLY_EXPR:
9343      {
9344	tree try_block = TREE_OPERAND (exp, 0);
9345	tree finally_block = TREE_OPERAND (exp, 1);
9346
9347        if (!optimize || unsafe_for_reeval (finally_block) > 1)
9348	  {
9349	    /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9350	       is not sufficient, so we cannot expand the block twice.
9351	       So we play games with GOTO_SUBROUTINE_EXPR to let us
9352	       expand the thing only once.  */
9353	    /* When not optimizing, we go ahead with this form since
9354	       (1) user breakpoints operate more predictably without
9355		   code duplication, and
9356	       (2) we're not running any of the global optimizers
9357	           that would explode in time/space with the highly
9358		   connected CFG created by the indirect branching.  */
9359
9360	    rtx finally_label = gen_label_rtx ();
9361	    rtx done_label = gen_label_rtx ();
9362	    rtx return_link = gen_reg_rtx (Pmode);
9363	    tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9364			          (tree) finally_label, (tree) return_link);
9365	    TREE_SIDE_EFFECTS (cleanup) = 1;
9366
9367	    /* Start a new binding layer that will keep track of all cleanup
9368	       actions to be performed.  */
9369	    expand_start_bindings (2);
9370	    target_temp_slot_level = temp_slot_level;
9371
9372	    expand_decl_cleanup (NULL_TREE, cleanup);
9373	    op0 = expand_expr (try_block, target, tmode, modifier);
9374
9375	    preserve_temp_slots (op0);
9376	    expand_end_bindings (NULL_TREE, 0, 0);
9377	    emit_jump (done_label);
9378	    emit_label (finally_label);
9379	    expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9380	    emit_indirect_jump (return_link);
9381	    emit_label (done_label);
9382	  }
9383	else
9384	  {
9385	    expand_start_bindings (2);
9386	    target_temp_slot_level = temp_slot_level;
9387
9388	    expand_decl_cleanup (NULL_TREE, finally_block);
9389	    op0 = expand_expr (try_block, target, tmode, modifier);
9390
9391	    preserve_temp_slots (op0);
9392	    expand_end_bindings (NULL_TREE, 0, 0);
9393	  }
9394
9395	return op0;
9396      }
9397
9398    case GOTO_SUBROUTINE_EXPR:
9399      {
9400	rtx subr = (rtx) TREE_OPERAND (exp, 0);
9401	rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9402	rtx return_address = gen_label_rtx ();
9403	emit_move_insn (return_link,
9404			gen_rtx_LABEL_REF (Pmode, return_address));
9405	emit_jump (subr);
9406	emit_label (return_address);
9407	return const0_rtx;
9408      }
9409
9410    case VA_ARG_EXPR:
9411      return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9412
9413    case EXC_PTR_EXPR:
9414      return get_exception_pointer (cfun);
9415
9416    case FDESC_EXPR:
9417      /* Function descriptors are not valid except for as
9418	 initialization constants, and should not be expanded.  */
9419      abort ();
9420
9421    default:
9422      return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9423    }
9424
9425  /* Here to do an ordinary binary operator, generating an instruction
9426     from the optab already placed in `this_optab'.  */
9427 binop:
9428  if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9429    subtarget = 0;
9430  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9431  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9432 binop2:
9433  if (modifier == EXPAND_STACK_PARM)
9434    target = 0;
9435  temp = expand_binop (mode, this_optab, op0, op1, target,
9436		       unsignedp, OPTAB_LIB_WIDEN);
9437  if (temp == 0)
9438    abort ();
9439  return temp;
9440}
9441
9442/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9443   when applied to the address of EXP produces an address known to be
9444   aligned more than BIGGEST_ALIGNMENT.  */
9445
9446static int
9447is_aligning_offset (offset, exp)
9448     tree offset;
9449     tree exp;
9450{
9451  /* Strip off any conversions and WITH_RECORD_EXPR nodes.  */
9452  while (TREE_CODE (offset) == NON_LVALUE_EXPR
9453	 || TREE_CODE (offset) == NOP_EXPR
9454	 || TREE_CODE (offset) == CONVERT_EXPR
9455	 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9456    offset = TREE_OPERAND (offset, 0);
9457
9458  /* We must now have a BIT_AND_EXPR with a constant that is one less than
9459     power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
9460  if (TREE_CODE (offset) != BIT_AND_EXPR
9461      || !host_integerp (TREE_OPERAND (offset, 1), 1)
9462      || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9463      || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9464    return 0;
9465
9466  /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9467     It must be NEGATE_EXPR.  Then strip any more conversions.  */
9468  offset = TREE_OPERAND (offset, 0);
9469  while (TREE_CODE (offset) == NON_LVALUE_EXPR
9470	 || TREE_CODE (offset) == NOP_EXPR
9471	 || TREE_CODE (offset) == CONVERT_EXPR)
9472    offset = TREE_OPERAND (offset, 0);
9473
9474  if (TREE_CODE (offset) != NEGATE_EXPR)
9475    return 0;
9476
9477  offset = TREE_OPERAND (offset, 0);
9478  while (TREE_CODE (offset) == NON_LVALUE_EXPR
9479	 || TREE_CODE (offset) == NOP_EXPR
9480	 || TREE_CODE (offset) == CONVERT_EXPR)
9481    offset = TREE_OPERAND (offset, 0);
9482
9483  /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9484     whose type is the same as EXP.  */
9485  return (TREE_CODE (offset) == ADDR_EXPR
9486	  && (TREE_OPERAND (offset, 0) == exp
9487	      || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9488		  && (TREE_TYPE (TREE_OPERAND (offset, 0))
9489		      == TREE_TYPE (exp)))));
9490}
9491
9492/* Return the tree node if an ARG corresponds to a string constant or zero
9493   if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
9494   in bytes within the string that ARG is accessing.  The type of the
9495   offset will be `sizetype'.  */
9496
9497tree
9498string_constant (arg, ptr_offset)
9499     tree arg;
9500     tree *ptr_offset;
9501{
9502  STRIP_NOPS (arg);
9503
9504  if (TREE_CODE (arg) == ADDR_EXPR
9505      && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9506    {
9507      *ptr_offset = size_zero_node;
9508      return TREE_OPERAND (arg, 0);
9509    }
9510  else if (TREE_CODE (arg) == PLUS_EXPR)
9511    {
9512      tree arg0 = TREE_OPERAND (arg, 0);
9513      tree arg1 = TREE_OPERAND (arg, 1);
9514
9515      STRIP_NOPS (arg0);
9516      STRIP_NOPS (arg1);
9517
9518      if (TREE_CODE (arg0) == ADDR_EXPR
9519	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9520	{
9521	  *ptr_offset = convert (sizetype, arg1);
9522	  return TREE_OPERAND (arg0, 0);
9523	}
9524      else if (TREE_CODE (arg1) == ADDR_EXPR
9525	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9526	{
9527	  *ptr_offset = convert (sizetype, arg0);
9528	  return TREE_OPERAND (arg1, 0);
9529	}
9530    }
9531
9532  return 0;
9533}
9534
9535/* Expand code for a post- or pre- increment or decrement
9536   and return the RTX for the result.
9537   POST is 1 for postinc/decrements and 0 for preinc/decrements.  */
9538
9539static rtx
9540expand_increment (exp, post, ignore)
9541     tree exp;
9542     int post, ignore;
9543{
9544  rtx op0, op1;
9545  rtx temp, value;
9546  tree incremented = TREE_OPERAND (exp, 0);
9547  optab this_optab = add_optab;
9548  int icode;
9549  enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9550  int op0_is_copy = 0;
9551  int single_insn = 0;
9552  /* 1 means we can't store into OP0 directly,
9553     because it is a subreg narrower than a word,
9554     and we don't dare clobber the rest of the word.  */
9555  int bad_subreg = 0;
9556
9557  /* Stabilize any component ref that might need to be
9558     evaluated more than once below.  */
9559  if (!post
9560      || TREE_CODE (incremented) == BIT_FIELD_REF
9561      || (TREE_CODE (incremented) == COMPONENT_REF
9562	  && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9563	      || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9564    incremented = stabilize_reference (incremented);
9565  /* Nested *INCREMENT_EXPRs can happen in C++.  We must force innermost
9566     ones into save exprs so that they don't accidentally get evaluated
9567     more than once by the code below.  */
9568  if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9569      || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9570    incremented = save_expr (incremented);
9571
9572  /* Compute the operands as RTX.
9573     Note whether OP0 is the actual lvalue or a copy of it:
9574     I believe it is a copy iff it is a register or subreg
9575     and insns were generated in computing it.  */
9576
9577  temp = get_last_insn ();
9578  op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9579
9580  /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9581     in place but instead must do sign- or zero-extension during assignment,
9582     so we copy it into a new register and let the code below use it as
9583     a copy.
9584
9585     Note that we can safely modify this SUBREG since it is know not to be
9586     shared (it was made by the expand_expr call above).  */
9587
9588  if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9589    {
9590      if (post)
9591	SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9592      else
9593	bad_subreg = 1;
9594    }
9595  else if (GET_CODE (op0) == SUBREG
9596	   && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9597    {
9598      /* We cannot increment this SUBREG in place.  If we are
9599	 post-incrementing, get a copy of the old value.  Otherwise,
9600	 just mark that we cannot increment in place.  */
9601      if (post)
9602	op0 = copy_to_reg (op0);
9603      else
9604	bad_subreg = 1;
9605    }
9606
9607  op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9608		 && temp != get_last_insn ());
9609  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9610
9611  /* Decide whether incrementing or decrementing.  */
9612  if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9613      || TREE_CODE (exp) == PREDECREMENT_EXPR)
9614    this_optab = sub_optab;
9615
9616  /* Convert decrement by a constant into a negative increment.  */
9617  if (this_optab == sub_optab
9618      && GET_CODE (op1) == CONST_INT)
9619    {
9620      op1 = GEN_INT (-INTVAL (op1));
9621      this_optab = add_optab;
9622    }
9623
9624  if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9625    this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9626
9627  /* For a preincrement, see if we can do this with a single instruction.  */
9628  if (!post)
9629    {
9630      icode = (int) this_optab->handlers[(int) mode].insn_code;
9631      if (icode != (int) CODE_FOR_nothing
9632	  /* Make sure that OP0 is valid for operands 0 and 1
9633	     of the insn we want to queue.  */
9634	  && (*insn_data[icode].operand[0].predicate) (op0, mode)
9635	  && (*insn_data[icode].operand[1].predicate) (op0, mode)
9636	  && (*insn_data[icode].operand[2].predicate) (op1, mode))
9637	single_insn = 1;
9638    }
9639
9640  /* If OP0 is not the actual lvalue, but rather a copy in a register,
9641     then we cannot just increment OP0.  We must therefore contrive to
9642     increment the original value.  Then, for postincrement, we can return
9643     OP0 since it is a copy of the old value.  For preincrement, expand here
9644     unless we can do it with a single insn.
9645
9646     Likewise if storing directly into OP0 would clobber high bits
9647     we need to preserve (bad_subreg).  */
9648  if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9649    {
9650      /* This is the easiest way to increment the value wherever it is.
9651	 Problems with multiple evaluation of INCREMENTED are prevented
9652	 because either (1) it is a component_ref or preincrement,
9653	 in which case it was stabilized above, or (2) it is an array_ref
9654	 with constant index in an array in a register, which is
9655	 safe to reevaluate.  */
9656      tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9657			     || TREE_CODE (exp) == PREDECREMENT_EXPR)
9658			    ? MINUS_EXPR : PLUS_EXPR),
9659			   TREE_TYPE (exp),
9660			   incremented,
9661			   TREE_OPERAND (exp, 1));
9662
9663      while (TREE_CODE (incremented) == NOP_EXPR
9664	     || TREE_CODE (incremented) == CONVERT_EXPR)
9665	{
9666	  newexp = convert (TREE_TYPE (incremented), newexp);
9667	  incremented = TREE_OPERAND (incremented, 0);
9668	}
9669
9670      temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9671      return post ? op0 : temp;
9672    }
9673
9674  if (post)
9675    {
9676      /* We have a true reference to the value in OP0.
9677	 If there is an insn to add or subtract in this mode, queue it.
9678	 Queueing the increment insn avoids the register shuffling
9679	 that often results if we must increment now and first save
9680	 the old value for subsequent use.  */
9681
9682#if 0  /* Turned off to avoid making extra insn for indexed memref.  */
9683      op0 = stabilize (op0);
9684#endif
9685
9686      icode = (int) this_optab->handlers[(int) mode].insn_code;
9687      if (icode != (int) CODE_FOR_nothing
9688	  /* Make sure that OP0 is valid for operands 0 and 1
9689	     of the insn we want to queue.  */
9690	  && (*insn_data[icode].operand[0].predicate) (op0, mode)
9691	  && (*insn_data[icode].operand[1].predicate) (op0, mode))
9692	{
9693	  if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9694	    op1 = force_reg (mode, op1);
9695
9696	  return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9697	}
9698      if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9699	{
9700	  rtx addr = (general_operand (XEXP (op0, 0), mode)
9701		      ? force_reg (Pmode, XEXP (op0, 0))
9702		      : copy_to_reg (XEXP (op0, 0)));
9703	  rtx temp, result;
9704
9705	  op0 = replace_equiv_address (op0, addr);
9706	  temp = force_reg (GET_MODE (op0), op0);
9707	  if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9708	    op1 = force_reg (mode, op1);
9709
9710	  /* The increment queue is LIFO, thus we have to `queue'
9711	     the instructions in reverse order.  */
9712	  enqueue_insn (op0, gen_move_insn (op0, temp));
9713	  result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9714	  return result;
9715	}
9716    }
9717
9718  /* Preincrement, or we can't increment with one simple insn.  */
9719  if (post)
9720    /* Save a copy of the value before inc or dec, to return it later.  */
9721    temp = value = copy_to_reg (op0);
9722  else
9723    /* Arrange to return the incremented value.  */
9724    /* Copy the rtx because expand_binop will protect from the queue,
9725       and the results of that would be invalid for us to return
9726       if our caller does emit_queue before using our result.  */
9727    temp = copy_rtx (value = op0);
9728
9729  /* Increment however we can.  */
9730  op1 = expand_binop (mode, this_optab, value, op1, op0,
9731		      TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9732
9733  /* Make sure the value is stored into OP0.  */
9734  if (op1 != op0)
9735    emit_move_insn (op0, op1);
9736
9737  return temp;
9738}
9739
9740/* At the start of a function, record that we have no previously-pushed
9741   arguments waiting to be popped.  */
9742
9743void
9744init_pending_stack_adjust ()
9745{
9746  pending_stack_adjust = 0;
9747}
9748
9749/* When exiting from function, if safe, clear out any pending stack adjust
9750   so the adjustment won't get done.
9751
9752   Note, if the current function calls alloca, then it must have a
9753   frame pointer regardless of the value of flag_omit_frame_pointer.  */
9754
9755void
9756clear_pending_stack_adjust ()
9757{
9758#ifdef EXIT_IGNORE_STACK
9759  if (optimize > 0
9760      && (! flag_omit_frame_pointer || current_function_calls_alloca)
9761      && EXIT_IGNORE_STACK
9762      && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9763      && ! flag_inline_functions)
9764    {
9765      stack_pointer_delta -= pending_stack_adjust,
9766      pending_stack_adjust = 0;
9767    }
9768#endif
9769}
9770
9771/* Pop any previously-pushed arguments that have not been popped yet.  */
9772
9773void
9774do_pending_stack_adjust ()
9775{
9776  if (inhibit_defer_pop == 0)
9777    {
9778      if (pending_stack_adjust != 0)
9779	adjust_stack (GEN_INT (pending_stack_adjust));
9780      pending_stack_adjust = 0;
9781    }
9782}
9783
9784/* Expand conditional expressions.  */
9785
9786/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9787   LABEL is an rtx of code CODE_LABEL, in this function and all the
9788   functions here.  */
9789
9790void
9791jumpifnot (exp, label)
9792     tree exp;
9793     rtx label;
9794{
9795  do_jump (exp, label, NULL_RTX);
9796}
9797
9798/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero.  */
9799
9800void
9801jumpif (exp, label)
9802     tree exp;
9803     rtx label;
9804{
9805  do_jump (exp, NULL_RTX, label);
9806}
9807
9808/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9809   the result is zero, or IF_TRUE_LABEL if the result is one.
9810   Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9811   meaning fall through in that case.
9812
9813   do_jump always does any pending stack adjust except when it does not
9814   actually perform a jump.  An example where there is no jump
9815   is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9816
9817   This function is responsible for optimizing cases such as
9818   &&, || and comparison operators in EXP.  */
9819
9820void
9821do_jump (exp, if_false_label, if_true_label)
9822     tree exp;
9823     rtx if_false_label, if_true_label;
9824{
9825  enum tree_code code = TREE_CODE (exp);
9826  /* Some cases need to create a label to jump to
9827     in order to properly fall through.
9828     These cases set DROP_THROUGH_LABEL nonzero.  */
9829  rtx drop_through_label = 0;
9830  rtx temp;
9831  int i;
9832  tree type;
9833  enum machine_mode mode;
9834
9835#ifdef MAX_INTEGER_COMPUTATION_MODE
9836  check_max_integer_computation_mode (exp);
9837#endif
9838
9839  emit_queue ();
9840
9841  switch (code)
9842    {
9843    case ERROR_MARK:
9844      break;
9845
9846    case INTEGER_CST:
9847      temp = integer_zerop (exp) ? if_false_label : if_true_label;
9848      if (temp)
9849	emit_jump (temp);
9850      break;
9851
9852#if 0
9853      /* This is not true with #pragma weak  */
9854    case ADDR_EXPR:
9855      /* The address of something can never be zero.  */
9856      if (if_true_label)
9857	emit_jump (if_true_label);
9858      break;
9859#endif
9860
9861    case UNSAVE_EXPR:
9862      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9863      TREE_OPERAND (exp, 0)
9864	= (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
9865      break;
9866
9867    case NOP_EXPR:
9868      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9869	  || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9870	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9871	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9872	goto normal;
9873    case CONVERT_EXPR:
9874      /* If we are narrowing the operand, we have to do the compare in the
9875	 narrower mode.  */
9876      if ((TYPE_PRECISION (TREE_TYPE (exp))
9877	   < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9878	goto normal;
9879    case NON_LVALUE_EXPR:
9880    case REFERENCE_EXPR:
9881    case ABS_EXPR:
9882    case NEGATE_EXPR:
9883    case LROTATE_EXPR:
9884    case RROTATE_EXPR:
9885      /* These cannot change zero->nonzero or vice versa.  */
9886      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9887      break;
9888
9889    case WITH_RECORD_EXPR:
9890      /* Put the object on the placeholder list, recurse through our first
9891	 operand, and pop the list.  */
9892      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9893				    placeholder_list);
9894      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9895      placeholder_list = TREE_CHAIN (placeholder_list);
9896      break;
9897
9898#if 0
9899      /* This is never less insns than evaluating the PLUS_EXPR followed by
9900	 a test and can be longer if the test is eliminated.  */
9901    case PLUS_EXPR:
9902      /* Reduce to minus.  */
9903      exp = build (MINUS_EXPR, TREE_TYPE (exp),
9904		   TREE_OPERAND (exp, 0),
9905		   fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9906				 TREE_OPERAND (exp, 1))));
9907      /* Process as MINUS.  */
9908#endif
9909
9910    case MINUS_EXPR:
9911      /* Nonzero iff operands of minus differ.  */
9912      do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9913				  TREE_OPERAND (exp, 0),
9914				  TREE_OPERAND (exp, 1)),
9915			   NE, NE, if_false_label, if_true_label);
9916      break;
9917
9918    case BIT_AND_EXPR:
9919      /* If we are AND'ing with a small constant, do this comparison in the
9920	 smallest type that fits.  If the machine doesn't have comparisons
9921	 that small, it will be converted back to the wider comparison.
9922	 This helps if we are testing the sign bit of a narrower object.
9923	 combine can't do this for us because it can't know whether a
9924	 ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */
9925
9926      if (! SLOW_BYTE_ACCESS
9927	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9928	  && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9929	  && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9930	  && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9931	  && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9932	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9933	  && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9934	      != CODE_FOR_nothing))
9935	{
9936	  do_jump (convert (type, exp), if_false_label, if_true_label);
9937	  break;
9938	}
9939      goto normal;
9940
9941    case TRUTH_NOT_EXPR:
9942      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9943      break;
9944
9945    case TRUTH_ANDIF_EXPR:
9946      if (if_false_label == 0)
9947	if_false_label = drop_through_label = gen_label_rtx ();
9948      do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9949      start_cleanup_deferral ();
9950      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9951      end_cleanup_deferral ();
9952      break;
9953
9954    case TRUTH_ORIF_EXPR:
9955      if (if_true_label == 0)
9956	if_true_label = drop_through_label = gen_label_rtx ();
9957      do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9958      start_cleanup_deferral ();
9959      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9960      end_cleanup_deferral ();
9961      break;
9962
9963    case COMPOUND_EXPR:
9964      push_temp_slots ();
9965      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9966      preserve_temp_slots (NULL_RTX);
9967      free_temp_slots ();
9968      pop_temp_slots ();
9969      emit_queue ();
9970      do_pending_stack_adjust ();
9971      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9972      break;
9973
9974    case COMPONENT_REF:
9975    case BIT_FIELD_REF:
9976    case ARRAY_REF:
9977    case ARRAY_RANGE_REF:
9978      {
9979	HOST_WIDE_INT bitsize, bitpos;
9980	int unsignedp;
9981	enum machine_mode mode;
9982	tree type;
9983	tree offset;
9984	int volatilep = 0;
9985
9986	/* Get description of this reference.  We don't actually care
9987	   about the underlying object here.  */
9988	get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9989			     &unsignedp, &volatilep);
9990
9991	type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9992	if (! SLOW_BYTE_ACCESS
9993	    && type != 0 && bitsize >= 0
9994	    && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9995	    && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9996		!= CODE_FOR_nothing))
9997	  {
9998	    do_jump (convert (type, exp), if_false_label, if_true_label);
9999	    break;
10000	  }
10001	goto normal;
10002      }
10003
10004    case COND_EXPR:
10005      /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases.  */
10006      if (integer_onep (TREE_OPERAND (exp, 1))
10007	  && integer_zerop (TREE_OPERAND (exp, 2)))
10008	do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10009
10010      else if (integer_zerop (TREE_OPERAND (exp, 1))
10011	       && integer_onep (TREE_OPERAND (exp, 2)))
10012	do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10013
10014      else
10015	{
10016	  rtx label1 = gen_label_rtx ();
10017	  drop_through_label = gen_label_rtx ();
10018
10019	  do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10020
10021	  start_cleanup_deferral ();
10022	  /* Now the THEN-expression.  */
10023	  do_jump (TREE_OPERAND (exp, 1),
10024		   if_false_label ? if_false_label : drop_through_label,
10025		   if_true_label ? if_true_label : drop_through_label);
10026	  /* In case the do_jump just above never jumps.  */
10027	  do_pending_stack_adjust ();
10028	  emit_label (label1);
10029
10030	  /* Now the ELSE-expression.  */
10031	  do_jump (TREE_OPERAND (exp, 2),
10032		   if_false_label ? if_false_label : drop_through_label,
10033		   if_true_label ? if_true_label : drop_through_label);
10034	  end_cleanup_deferral ();
10035	}
10036      break;
10037
10038    case EQ_EXPR:
10039      {
10040	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10041
10042	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10043	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10044	  {
10045	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10046	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10047	    do_jump
10048	      (fold
10049	       (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10050		       fold (build (EQ_EXPR, TREE_TYPE (exp),
10051				    fold (build1 (REALPART_EXPR,
10052						  TREE_TYPE (inner_type),
10053						  exp0)),
10054				    fold (build1 (REALPART_EXPR,
10055						  TREE_TYPE (inner_type),
10056						  exp1)))),
10057		       fold (build (EQ_EXPR, TREE_TYPE (exp),
10058				    fold (build1 (IMAGPART_EXPR,
10059						  TREE_TYPE (inner_type),
10060						  exp0)),
10061				    fold (build1 (IMAGPART_EXPR,
10062						  TREE_TYPE (inner_type),
10063						  exp1)))))),
10064	       if_false_label, if_true_label);
10065	  }
10066
10067	else if (integer_zerop (TREE_OPERAND (exp, 1)))
10068	  do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10069
10070	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10071		 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
10072	  do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10073	else
10074	  do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
10075	break;
10076      }
10077
10078    case NE_EXPR:
10079      {
10080	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10081
10082	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10083	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10084	  {
10085	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10086	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10087	    do_jump
10088	      (fold
10089	       (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10090		       fold (build (NE_EXPR, TREE_TYPE (exp),
10091				    fold (build1 (REALPART_EXPR,
10092						  TREE_TYPE (inner_type),
10093						  exp0)),
10094				    fold (build1 (REALPART_EXPR,
10095						  TREE_TYPE (inner_type),
10096						  exp1)))),
10097		       fold (build (NE_EXPR, TREE_TYPE (exp),
10098				    fold (build1 (IMAGPART_EXPR,
10099						  TREE_TYPE (inner_type),
10100						  exp0)),
10101				    fold (build1 (IMAGPART_EXPR,
10102						  TREE_TYPE (inner_type),
10103						  exp1)))))),
10104	       if_false_label, if_true_label);
10105	  }
10106
10107	else if (integer_zerop (TREE_OPERAND (exp, 1)))
10108	  do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10109
10110	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10111		 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
10112	  do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10113	else
10114	  do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
10115	break;
10116      }
10117
10118    case LT_EXPR:
10119      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10120      if (GET_MODE_CLASS (mode) == MODE_INT
10121	  && ! can_compare_p (LT, mode, ccp_jump))
10122	do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10123      else
10124	do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
10125      break;
10126
10127    case LE_EXPR:
10128      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10129      if (GET_MODE_CLASS (mode) == MODE_INT
10130	  && ! can_compare_p (LE, mode, ccp_jump))
10131	do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10132      else
10133	do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
10134      break;
10135
10136    case GT_EXPR:
10137      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10138      if (GET_MODE_CLASS (mode) == MODE_INT
10139	  && ! can_compare_p (GT, mode, ccp_jump))
10140	do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10141      else
10142	do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
10143      break;
10144
10145    case GE_EXPR:
10146      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10147      if (GET_MODE_CLASS (mode) == MODE_INT
10148	  && ! can_compare_p (GE, mode, ccp_jump))
10149	do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10150      else
10151	do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
10152      break;
10153
10154    case UNORDERED_EXPR:
10155    case ORDERED_EXPR:
10156      {
10157	enum rtx_code cmp, rcmp;
10158	int do_rev;
10159
10160	if (code == UNORDERED_EXPR)
10161	  cmp = UNORDERED, rcmp = ORDERED;
10162	else
10163	  cmp = ORDERED, rcmp = UNORDERED;
10164	mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10165
10166	do_rev = 0;
10167	if (! can_compare_p (cmp, mode, ccp_jump)
10168	    && (can_compare_p (rcmp, mode, ccp_jump)
10169		/* If the target doesn't provide either UNORDERED or ORDERED
10170		   comparisons, canonicalize on UNORDERED for the library.  */
10171		|| rcmp == UNORDERED))
10172	  do_rev = 1;
10173
10174	if (! do_rev)
10175	  do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
10176	else
10177	  do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
10178      }
10179      break;
10180
10181    {
10182      enum rtx_code rcode1;
10183      enum tree_code tcode2;
10184
10185      case UNLT_EXPR:
10186	rcode1 = UNLT;
10187	tcode2 = LT_EXPR;
10188	goto unordered_bcc;
10189      case UNLE_EXPR:
10190	rcode1 = UNLE;
10191	tcode2 = LE_EXPR;
10192	goto unordered_bcc;
10193      case UNGT_EXPR:
10194	rcode1 = UNGT;
10195	tcode2 = GT_EXPR;
10196	goto unordered_bcc;
10197      case UNGE_EXPR:
10198	rcode1 = UNGE;
10199	tcode2 = GE_EXPR;
10200	goto unordered_bcc;
10201      case UNEQ_EXPR:
10202	rcode1 = UNEQ;
10203	tcode2 = EQ_EXPR;
10204	goto unordered_bcc;
10205
10206      unordered_bcc:
10207	mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10208	if (can_compare_p (rcode1, mode, ccp_jump))
10209	  do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
10210			       if_true_label);
10211	else
10212	  {
10213	    tree op0 = save_expr (TREE_OPERAND (exp, 0));
10214	    tree op1 = save_expr (TREE_OPERAND (exp, 1));
10215	    tree cmp0, cmp1;
10216
10217	    /* If the target doesn't support combined unordered
10218	       compares, decompose into UNORDERED + comparison.  */
10219	    cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
10220	    cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
10221	    exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
10222	    do_jump (exp, if_false_label, if_true_label);
10223	  }
10224      }
10225      break;
10226
10227      /* Special case:
10228		__builtin_expect (<test>, 0)	and
10229		__builtin_expect (<test>, 1)
10230
10231	 We need to do this here, so that <test> is not converted to a SCC
10232	 operation on machines that use condition code registers and COMPARE
10233	 like the PowerPC, and then the jump is done based on whether the SCC
10234	 operation produced a 1 or 0.  */
10235    case CALL_EXPR:
10236      /* Check for a built-in function.  */
10237      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
10238	{
10239	  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10240	  tree arglist = TREE_OPERAND (exp, 1);
10241
10242	  if (TREE_CODE (fndecl) == FUNCTION_DECL
10243	      && DECL_BUILT_IN (fndecl)
10244	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10245	      && arglist != NULL_TREE
10246	      && TREE_CHAIN (arglist) != NULL_TREE)
10247	    {
10248	      rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10249						    if_true_label);
10250
10251	      if (seq != NULL_RTX)
10252		{
10253		  emit_insn (seq);
10254		  return;
10255		}
10256	    }
10257	}
10258      /* fall through and generate the normal code.  */
10259
10260    default:
10261    normal:
10262      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10263#if 0
10264      /* This is not needed any more and causes poor code since it causes
10265	 comparisons and tests from non-SI objects to have different code
10266	 sequences.  */
10267      /* Copy to register to avoid generating bad insns by cse
10268	 from (set (mem ...) (arithop))  (set (cc0) (mem ...)).  */
10269      if (!cse_not_expected && GET_CODE (temp) == MEM)
10270	temp = copy_to_reg (temp);
10271#endif
10272      do_pending_stack_adjust ();
10273      /* Do any postincrements in the expression that was tested.  */
10274      emit_queue ();
10275
10276      if (GET_CODE (temp) == CONST_INT
10277	  || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10278	  || GET_CODE (temp) == LABEL_REF)
10279	{
10280	  rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10281	  if (target)
10282	    emit_jump (target);
10283	}
10284      else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10285	       && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10286	/* Note swapping the labels gives us not-equal.  */
10287	do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10288      else if (GET_MODE (temp) != VOIDmode)
10289	do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10290				 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10291				 GET_MODE (temp), NULL_RTX,
10292				 if_false_label, if_true_label);
10293      else
10294	abort ();
10295    }
10296
10297  if (drop_through_label)
10298    {
10299      /* If do_jump produces code that might be jumped around,
10300	 do any stack adjusts from that code, before the place
10301	 where control merges in.  */
10302      do_pending_stack_adjust ();
10303      emit_label (drop_through_label);
10304    }
10305}
10306
10307/* Given a comparison expression EXP for values too wide to be compared
10308   with one insn, test the comparison and jump to the appropriate label.
10309   The code of EXP is ignored; we always test GT if SWAP is 0,
10310   and LT if SWAP is 1.  */
10311
10312static void
10313do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10314     tree exp;
10315     int swap;
10316     rtx if_false_label, if_true_label;
10317{
10318  rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10319  rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10320  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10321  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10322
10323  do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10324}
10325
10326/* Compare OP0 with OP1, word at a time, in mode MODE.
10327   UNSIGNEDP says to do unsigned comparison.
10328   Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise.  */
10329
10330void
10331do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10332     enum machine_mode mode;
10333     int unsignedp;
10334     rtx op0, op1;
10335     rtx if_false_label, if_true_label;
10336{
10337  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10338  rtx drop_through_label = 0;
10339  int i;
10340
10341  if (! if_true_label || ! if_false_label)
10342    drop_through_label = gen_label_rtx ();
10343  if (! if_true_label)
10344    if_true_label = drop_through_label;
10345  if (! if_false_label)
10346    if_false_label = drop_through_label;
10347
10348  /* Compare a word at a time, high order first.  */
10349  for (i = 0; i < nwords; i++)
10350    {
10351      rtx op0_word, op1_word;
10352
10353      if (WORDS_BIG_ENDIAN)
10354	{
10355	  op0_word = operand_subword_force (op0, i, mode);
10356	  op1_word = operand_subword_force (op1, i, mode);
10357	}
10358      else
10359	{
10360	  op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10361	  op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10362	}
10363
10364      /* All but high-order word must be compared as unsigned.  */
10365      do_compare_rtx_and_jump (op0_word, op1_word, GT,
10366			       (unsignedp || i > 0), word_mode, NULL_RTX,
10367			       NULL_RTX, if_true_label);
10368
10369      /* Consider lower words only if these are equal.  */
10370      do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10371			       NULL_RTX, NULL_RTX, if_false_label);
10372    }
10373
10374  if (if_false_label)
10375    emit_jump (if_false_label);
10376  if (drop_through_label)
10377    emit_label (drop_through_label);
10378}
10379
10380/* Given an EQ_EXPR expression EXP for values too wide to be compared
10381   with one insn, test the comparison and jump to the appropriate label.  */
10382
10383static void
10384do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10385     tree exp;
10386     rtx if_false_label, if_true_label;
10387{
10388  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10389  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10390  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10391  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10392  int i;
10393  rtx drop_through_label = 0;
10394
10395  if (! if_false_label)
10396    drop_through_label = if_false_label = gen_label_rtx ();
10397
10398  for (i = 0; i < nwords; i++)
10399    do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10400			     operand_subword_force (op1, i, mode),
10401			     EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10402			     word_mode, NULL_RTX, if_false_label, NULL_RTX);
10403
10404  if (if_true_label)
10405    emit_jump (if_true_label);
10406  if (drop_through_label)
10407    emit_label (drop_through_label);
10408}
10409
10410/* Jump according to whether OP0 is 0.
10411   We assume that OP0 has an integer mode that is too wide
10412   for the available compare insns.  */
10413
10414void
10415do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10416     rtx op0;
10417     rtx if_false_label, if_true_label;
10418{
10419  int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10420  rtx part;
10421  int i;
10422  rtx drop_through_label = 0;
10423
10424  /* The fastest way of doing this comparison on almost any machine is to
10425     "or" all the words and compare the result.  If all have to be loaded
10426     from memory and this is a very wide item, it's possible this may
10427     be slower, but that's highly unlikely.  */
10428
10429  part = gen_reg_rtx (word_mode);
10430  emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10431  for (i = 1; i < nwords && part != 0; i++)
10432    part = expand_binop (word_mode, ior_optab, part,
10433			 operand_subword_force (op0, i, GET_MODE (op0)),
10434			 part, 1, OPTAB_WIDEN);
10435
10436  if (part != 0)
10437    {
10438      do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10439			       NULL_RTX, if_false_label, if_true_label);
10440
10441      return;
10442    }
10443
10444  /* If we couldn't do the "or" simply, do this with a series of compares.  */
10445  if (! if_false_label)
10446    drop_through_label = if_false_label = gen_label_rtx ();
10447
10448  for (i = 0; i < nwords; i++)
10449    do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10450			     const0_rtx, EQ, 1, word_mode, NULL_RTX,
10451			     if_false_label, NULL_RTX);
10452
10453  if (if_true_label)
10454    emit_jump (if_true_label);
10455
10456  if (drop_through_label)
10457    emit_label (drop_through_label);
10458}
10459
10460/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10461   (including code to compute the values to be compared)
10462   and set (CC0) according to the result.
10463   The decision as to signed or unsigned comparison must be made by the caller.
10464
10465   We force a stack adjustment unless there are currently
10466   things pushed on the stack that aren't yet used.
10467
10468   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10469   compared.  */
10470
10471rtx
10472compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10473     rtx op0, op1;
10474     enum rtx_code code;
10475     int unsignedp;
10476     enum machine_mode mode;
10477     rtx size;
10478{
10479  enum rtx_code ucode;
10480  rtx tem;
10481
10482  /* If one operand is constant, make it the second one.  Only do this
10483     if the other operand is not constant as well.  */
10484
10485  if (swap_commutative_operands_p (op0, op1))
10486    {
10487      tem = op0;
10488      op0 = op1;
10489      op1 = tem;
10490      code = swap_condition (code);
10491    }
10492
10493  if (flag_force_mem)
10494    {
10495      op0 = force_not_mem (op0);
10496      op1 = force_not_mem (op1);
10497    }
10498
10499  do_pending_stack_adjust ();
10500
10501  ucode = unsignedp ? unsigned_condition (code) : code;
10502  if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10503    return tem;
10504
10505#if 0
10506  /* There's no need to do this now that combine.c can eliminate lots of
10507     sign extensions.  This can be less efficient in certain cases on other
10508     machines.  */
10509
10510  /* If this is a signed equality comparison, we can do it as an
10511     unsigned comparison since zero-extension is cheaper than sign
10512     extension and comparisons with zero are done as unsigned.  This is
10513     the case even on machines that can do fast sign extension, since
10514     zero-extension is easier to combine with other operations than
10515     sign-extension is.  If we are comparing against a constant, we must
10516     convert it to what it would look like unsigned.  */
10517  if ((code == EQ || code == NE) && ! unsignedp
10518      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10519    {
10520      if (GET_CODE (op1) == CONST_INT
10521	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10522	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10523      unsignedp = 1;
10524    }
10525#endif
10526
10527  emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10528
10529#if HAVE_cc0
10530  return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10531#else
10532  return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10533#endif
10534}
10535
10536/* Like do_compare_and_jump but expects the values to compare as two rtx's.
10537   The decision as to signed or unsigned comparison must be made by the caller.
10538
10539   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10540   compared.  */
10541
10542void
10543do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10544			 if_false_label, if_true_label)
10545     rtx op0, op1;
10546     enum rtx_code code;
10547     int unsignedp;
10548     enum machine_mode mode;
10549     rtx size;
10550     rtx if_false_label, if_true_label;
10551{
10552  enum rtx_code ucode;
10553  rtx tem;
10554  int dummy_true_label = 0;
10555
10556  /* Reverse the comparison if that is safe and we want to jump if it is
10557     false.  */
10558  if (! if_true_label && ! FLOAT_MODE_P (mode))
10559    {
10560      if_true_label = if_false_label;
10561      if_false_label = 0;
10562      code = reverse_condition (code);
10563    }
10564
10565  /* If one operand is constant, make it the second one.  Only do this
10566     if the other operand is not constant as well.  */
10567
10568  if (swap_commutative_operands_p (op0, op1))
10569    {
10570      tem = op0;
10571      op0 = op1;
10572      op1 = tem;
10573      code = swap_condition (code);
10574    }
10575
10576  if (flag_force_mem)
10577    {
10578      op0 = force_not_mem (op0);
10579      op1 = force_not_mem (op1);
10580    }
10581
10582  do_pending_stack_adjust ();
10583
10584  ucode = unsignedp ? unsigned_condition (code) : code;
10585  if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10586    {
10587      if (tem == const_true_rtx)
10588	{
10589	  if (if_true_label)
10590	    emit_jump (if_true_label);
10591	}
10592      else
10593	{
10594	  if (if_false_label)
10595	    emit_jump (if_false_label);
10596	}
10597      return;
10598    }
10599
10600#if 0
10601  /* There's no need to do this now that combine.c can eliminate lots of
10602     sign extensions.  This can be less efficient in certain cases on other
10603     machines.  */
10604
10605  /* If this is a signed equality comparison, we can do it as an
10606     unsigned comparison since zero-extension is cheaper than sign
10607     extension and comparisons with zero are done as unsigned.  This is
10608     the case even on machines that can do fast sign extension, since
10609     zero-extension is easier to combine with other operations than
10610     sign-extension is.  If we are comparing against a constant, we must
10611     convert it to what it would look like unsigned.  */
10612  if ((code == EQ || code == NE) && ! unsignedp
10613      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10614    {
10615      if (GET_CODE (op1) == CONST_INT
10616	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10617	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10618      unsignedp = 1;
10619    }
10620#endif
10621
10622  if (! if_true_label)
10623    {
10624      dummy_true_label = 1;
10625      if_true_label = gen_label_rtx ();
10626    }
10627
10628  emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10629			   if_true_label);
10630
10631  if (if_false_label)
10632    emit_jump (if_false_label);
10633  if (dummy_true_label)
10634    emit_label (if_true_label);
10635}
10636
10637/* Generate code for a comparison expression EXP (including code to compute
10638   the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10639   IF_TRUE_LABEL.  One of the labels can be NULL_RTX, in which case the
10640   generated code will drop through.
10641   SIGNED_CODE should be the rtx operation for this comparison for
10642   signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10643
10644   We force a stack adjustment unless there are currently
10645   things pushed on the stack that aren't yet used.  */
10646
10647static void
10648do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10649		     if_true_label)
10650     tree exp;
10651     enum rtx_code signed_code, unsigned_code;
10652     rtx if_false_label, if_true_label;
10653{
10654  rtx op0, op1;
10655  tree type;
10656  enum machine_mode mode;
10657  int unsignedp;
10658  enum rtx_code code;
10659
10660  /* Don't crash if the comparison was erroneous.  */
10661  op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10662  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10663    return;
10664
10665  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10666  if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10667    return;
10668
10669  type = TREE_TYPE (TREE_OPERAND (exp, 0));
10670  mode = TYPE_MODE (type);
10671  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10672      && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10673	  || (GET_MODE_BITSIZE (mode)
10674	      > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10675								      1)))))))
10676    {
10677      /* op0 might have been replaced by promoted constant, in which
10678	 case the type of second argument should be used.  */
10679      type = TREE_TYPE (TREE_OPERAND (exp, 1));
10680      mode = TYPE_MODE (type);
10681    }
10682  unsignedp = TREE_UNSIGNED (type);
10683  code = unsignedp ? unsigned_code : signed_code;
10684
10685#ifdef HAVE_canonicalize_funcptr_for_compare
10686  /* If function pointers need to be "canonicalized" before they can
10687     be reliably compared, then canonicalize them.  */
10688  if (HAVE_canonicalize_funcptr_for_compare
10689      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10690      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10691	  == FUNCTION_TYPE))
10692    {
10693      rtx new_op0 = gen_reg_rtx (mode);
10694
10695      emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10696      op0 = new_op0;
10697    }
10698
10699  if (HAVE_canonicalize_funcptr_for_compare
10700      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10701      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10702	  == FUNCTION_TYPE))
10703    {
10704      rtx new_op1 = gen_reg_rtx (mode);
10705
10706      emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10707      op1 = new_op1;
10708    }
10709#endif
10710
10711  /* Do any postincrements in the expression that was tested.  */
10712  emit_queue ();
10713
10714  do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10715			   ((mode == BLKmode)
10716			    ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10717			   if_false_label, if_true_label);
10718}
10719
10720/* Generate code to calculate EXP using a store-flag instruction
10721   and return an rtx for the result.  EXP is either a comparison
10722   or a TRUTH_NOT_EXPR whose operand is a comparison.
10723
10724   If TARGET is nonzero, store the result there if convenient.
10725
10726   If ONLY_CHEAP is nonzero, only do this if it is likely to be very
10727   cheap.
10728
10729   Return zero if there is no suitable set-flag instruction
10730   available on this machine.
10731
10732   Once expand_expr has been called on the arguments of the comparison,
10733   we are committed to doing the store flag, since it is not safe to
10734   re-evaluate the expression.  We emit the store-flag insn by calling
10735   emit_store_flag, but only expand the arguments if we have a reason
10736   to believe that emit_store_flag will be successful.  If we think that
10737   it will, but it isn't, we have to simulate the store-flag with a
10738   set/jump/set sequence.  */
10739
10740static rtx
10741do_store_flag (exp, target, mode, only_cheap)
10742     tree exp;
10743     rtx target;
10744     enum machine_mode mode;
10745     int only_cheap;
10746{
10747  enum rtx_code code;
10748  tree arg0, arg1, type;
10749  tree tem;
10750  enum machine_mode operand_mode;
10751  int invert = 0;
10752  int unsignedp;
10753  rtx op0, op1;
10754  enum insn_code icode;
10755  rtx subtarget = target;
10756  rtx result, label;
10757
10758  /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10759     result at the end.  We can't simply invert the test since it would
10760     have already been inverted if it were valid.  This case occurs for
10761     some floating-point comparisons.  */
10762
10763  if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10764    invert = 1, exp = TREE_OPERAND (exp, 0);
10765
10766  arg0 = TREE_OPERAND (exp, 0);
10767  arg1 = TREE_OPERAND (exp, 1);
10768
10769  /* Don't crash if the comparison was erroneous.  */
10770  if (arg0 == error_mark_node || arg1 == error_mark_node)
10771    return const0_rtx;
10772
10773  type = TREE_TYPE (arg0);
10774  operand_mode = TYPE_MODE (type);
10775  unsignedp = TREE_UNSIGNED (type);
10776
10777  /* We won't bother with BLKmode store-flag operations because it would mean
10778     passing a lot of information to emit_store_flag.  */
10779  if (operand_mode == BLKmode)
10780    return 0;
10781
10782  /* We won't bother with store-flag operations involving function pointers
10783     when function pointers must be canonicalized before comparisons.  */
10784#ifdef HAVE_canonicalize_funcptr_for_compare
10785  if (HAVE_canonicalize_funcptr_for_compare
10786      && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10787	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10788	       == FUNCTION_TYPE))
10789	  || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10790	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10791		  == FUNCTION_TYPE))))
10792    return 0;
10793#endif
10794
10795  STRIP_NOPS (arg0);
10796  STRIP_NOPS (arg1);
10797
10798  /* Get the rtx comparison code to use.  We know that EXP is a comparison
10799     operation of some type.  Some comparisons against 1 and -1 can be
10800     converted to comparisons with zero.  Do so here so that the tests
10801     below will be aware that we have a comparison with zero.   These
10802     tests will not catch constants in the first operand, but constants
10803     are rarely passed as the first operand.  */
10804
10805  switch (TREE_CODE (exp))
10806    {
10807    case EQ_EXPR:
10808      code = EQ;
10809      break;
10810    case NE_EXPR:
10811      code = NE;
10812      break;
10813    case LT_EXPR:
10814      if (integer_onep (arg1))
10815	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10816      else
10817	code = unsignedp ? LTU : LT;
10818      break;
10819    case LE_EXPR:
10820      if (! unsignedp && integer_all_onesp (arg1))
10821	arg1 = integer_zero_node, code = LT;
10822      else
10823	code = unsignedp ? LEU : LE;
10824      break;
10825    case GT_EXPR:
10826      if (! unsignedp && integer_all_onesp (arg1))
10827	arg1 = integer_zero_node, code = GE;
10828      else
10829	code = unsignedp ? GTU : GT;
10830      break;
10831    case GE_EXPR:
10832      if (integer_onep (arg1))
10833	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10834      else
10835	code = unsignedp ? GEU : GE;
10836      break;
10837
10838    case UNORDERED_EXPR:
10839      code = UNORDERED;
10840      break;
10841    case ORDERED_EXPR:
10842      code = ORDERED;
10843      break;
10844    case UNLT_EXPR:
10845      code = UNLT;
10846      break;
10847    case UNLE_EXPR:
10848      code = UNLE;
10849      break;
10850    case UNGT_EXPR:
10851      code = UNGT;
10852      break;
10853    case UNGE_EXPR:
10854      code = UNGE;
10855      break;
10856    case UNEQ_EXPR:
10857      code = UNEQ;
10858      break;
10859
10860    default:
10861      abort ();
10862    }
10863
10864  /* Put a constant second.  */
10865  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10866    {
10867      tem = arg0; arg0 = arg1; arg1 = tem;
10868      code = swap_condition (code);
10869    }
10870
10871  /* If this is an equality or inequality test of a single bit, we can
10872     do this by shifting the bit being tested to the low-order bit and
10873     masking the result with the constant 1.  If the condition was EQ,
10874     we xor it with 1.  This does not require an scc insn and is faster
10875     than an scc insn even if we have it.  */
10876
10877  if ((code == NE || code == EQ)
10878      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10879      && integer_pow2p (TREE_OPERAND (arg0, 1)))
10880    {
10881      tree inner = TREE_OPERAND (arg0, 0);
10882      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10883      int ops_unsignedp;
10884
10885      /* If INNER is a right shift of a constant and it plus BITNUM does
10886	 not overflow, adjust BITNUM and INNER.  */
10887
10888      if (TREE_CODE (inner) == RSHIFT_EXPR
10889	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10890	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10891	  && bitnum < TYPE_PRECISION (type)
10892	  && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10893				   bitnum - TYPE_PRECISION (type)))
10894	{
10895	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10896	  inner = TREE_OPERAND (inner, 0);
10897	}
10898
10899      /* If we are going to be able to omit the AND below, we must do our
10900	 operations as unsigned.  If we must use the AND, we have a choice.
10901	 Normally unsigned is faster, but for some machines signed is.  */
10902      ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10903#ifdef LOAD_EXTEND_OP
10904		       : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10905#else
10906		       : 1
10907#endif
10908		       );
10909
10910      if (! get_subtarget (subtarget)
10911	  || GET_MODE (subtarget) != operand_mode
10912	  || ! safe_from_p (subtarget, inner, 1))
10913	subtarget = 0;
10914
10915      op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10916
10917      if (bitnum != 0)
10918	op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10919			    size_int (bitnum), subtarget, ops_unsignedp);
10920
10921      if (GET_MODE (op0) != mode)
10922	op0 = convert_to_mode (mode, op0, ops_unsignedp);
10923
10924      if ((code == EQ && ! invert) || (code == NE && invert))
10925	op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10926			    ops_unsignedp, OPTAB_LIB_WIDEN);
10927
10928      /* Put the AND last so it can combine with more things.  */
10929      if (bitnum != TYPE_PRECISION (type) - 1)
10930	op0 = expand_and (mode, op0, const1_rtx, subtarget);
10931
10932      return op0;
10933    }
10934
10935  /* Now see if we are likely to be able to do this.  Return if not.  */
10936  if (! can_compare_p (code, operand_mode, ccp_store_flag))
10937    return 0;
10938
10939  icode = setcc_gen_code[(int) code];
10940  if (icode == CODE_FOR_nothing
10941      || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10942    {
10943      /* We can only do this if it is one of the special cases that
10944	 can be handled without an scc insn.  */
10945      if ((code == LT && integer_zerop (arg1))
10946	  || (! only_cheap && code == GE && integer_zerop (arg1)))
10947	;
10948      else if (BRANCH_COST >= 0
10949	       && ! only_cheap && (code == NE || code == EQ)
10950	       && TREE_CODE (type) != REAL_TYPE
10951	       && ((abs_optab->handlers[(int) operand_mode].insn_code
10952		    != CODE_FOR_nothing)
10953		   || (ffs_optab->handlers[(int) operand_mode].insn_code
10954		       != CODE_FOR_nothing)))
10955	;
10956      else
10957	return 0;
10958    }
10959
10960  if (! get_subtarget (target)
10961      || GET_MODE (subtarget) != operand_mode
10962      || ! safe_from_p (subtarget, arg1, 1))
10963    subtarget = 0;
10964
10965  op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10966  op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10967
10968  if (target == 0)
10969    target = gen_reg_rtx (mode);
10970
10971  /* Pass copies of OP0 and OP1 in case they contain a QUEUED.  This is safe
10972     because, if the emit_store_flag does anything it will succeed and
10973     OP0 and OP1 will not be used subsequently.  */
10974
10975  result = emit_store_flag (target, code,
10976			    queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10977			    queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10978			    operand_mode, unsignedp, 1);
10979
10980  if (result)
10981    {
10982      if (invert)
10983	result = expand_binop (mode, xor_optab, result, const1_rtx,
10984			       result, 0, OPTAB_LIB_WIDEN);
10985      return result;
10986    }
10987
10988  /* If this failed, we have to do this with set/compare/jump/set code.  */
10989  if (GET_CODE (target) != REG
10990      || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10991    target = gen_reg_rtx (GET_MODE (target));
10992
10993  emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10994  result = compare_from_rtx (op0, op1, code, unsignedp,
10995			     operand_mode, NULL_RTX);
10996  if (GET_CODE (result) == CONST_INT)
10997    return (((result == const0_rtx && ! invert)
10998	     || (result != const0_rtx && invert))
10999	    ? const0_rtx : const1_rtx);
11000
11001  /* The code of RESULT may not match CODE if compare_from_rtx
11002     decided to swap its operands and reverse the original code.
11003
11004     We know that compare_from_rtx returns either a CONST_INT or
11005     a new comparison code, so it is safe to just extract the
11006     code from RESULT.  */
11007  code = GET_CODE (result);
11008
11009  label = gen_label_rtx ();
11010  if (bcc_gen_fctn[(int) code] == 0)
11011    abort ();
11012
11013  emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11014  emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11015  emit_label (label);
11016
11017  return target;
11018}
11019
11020
11021/* Stubs in case we haven't got a casesi insn.  */
11022#ifndef HAVE_casesi
11023# define HAVE_casesi 0
11024# define gen_casesi(a, b, c, d, e) (0)
11025# define CODE_FOR_casesi CODE_FOR_nothing
11026#endif
11027
11028/* If the machine does not have a case insn that compares the bounds,
11029   this means extra overhead for dispatch tables, which raises the
11030   threshold for using them.  */
11031#ifndef CASE_VALUES_THRESHOLD
11032#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
11033#endif /* CASE_VALUES_THRESHOLD */
11034
11035unsigned int
11036case_values_threshold ()
11037{
11038  return CASE_VALUES_THRESHOLD;
11039}
11040
11041/* Attempt to generate a casesi instruction.  Returns 1 if successful,
11042   0 otherwise (i.e. if there is no casesi instruction).  */
11043int
11044try_casesi (index_type, index_expr, minval, range,
11045	    table_label, default_label)
11046     tree index_type, index_expr, minval, range;
11047     rtx table_label ATTRIBUTE_UNUSED;
11048     rtx default_label;
11049{
11050  enum machine_mode index_mode = SImode;
11051  int index_bits = GET_MODE_BITSIZE (index_mode);
11052  rtx op1, op2, index;
11053  enum machine_mode op_mode;
11054
11055  if (! HAVE_casesi)
11056    return 0;
11057
11058  /* Convert the index to SImode.  */
11059  if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11060    {
11061      enum machine_mode omode = TYPE_MODE (index_type);
11062      rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
11063
11064      /* We must handle the endpoints in the original mode.  */
11065      index_expr = build (MINUS_EXPR, index_type,
11066			  index_expr, minval);
11067      minval = integer_zero_node;
11068      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11069      emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11070			       omode, 1, default_label);
11071      /* Now we can safely truncate.  */
11072      index = convert_to_mode (index_mode, index, 0);
11073    }
11074  else
11075    {
11076      if (TYPE_MODE (index_type) != index_mode)
11077	{
11078	  index_expr = convert ((*lang_hooks.types.type_for_size)
11079				(index_bits, 0), index_expr);
11080	  index_type = TREE_TYPE (index_expr);
11081	}
11082
11083      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11084    }
11085  emit_queue ();
11086  index = protect_from_queue (index, 0);
11087  do_pending_stack_adjust ();
11088
11089  op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
11090  if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
11091      (index, op_mode))
11092    index = copy_to_mode_reg (op_mode, index);
11093
11094  op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
11095
11096  op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
11097  op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
11098		       op1, TREE_UNSIGNED (TREE_TYPE (minval)));
11099  if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
11100      (op1, op_mode))
11101    op1 = copy_to_mode_reg (op_mode, op1);
11102
11103  op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
11104
11105  op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
11106  op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
11107		       op2, TREE_UNSIGNED (TREE_TYPE (range)));
11108  if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
11109      (op2, op_mode))
11110    op2 = copy_to_mode_reg (op_mode, op2);
11111
11112  emit_jump_insn (gen_casesi (index, op1, op2,
11113			      table_label, default_label));
11114  return 1;
11115}
11116
11117/* Attempt to generate a tablejump instruction; same concept.  */
11118#ifndef HAVE_tablejump
11119#define HAVE_tablejump 0
11120#define gen_tablejump(x, y) (0)
11121#endif
11122
11123/* Subroutine of the next function.
11124
11125   INDEX is the value being switched on, with the lowest value
11126   in the table already subtracted.
11127   MODE is its expected mode (needed if INDEX is constant).
11128   RANGE is the length of the jump table.
11129   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11130
11131   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11132   index value is out of range.  */
11133
11134static void
11135do_tablejump (index, mode, range, table_label, default_label)
11136     rtx index, range, table_label, default_label;
11137     enum machine_mode mode;
11138{
11139  rtx temp, vector;
11140
11141  if (INTVAL (range) > cfun->max_jumptable_ents)
11142    cfun->max_jumptable_ents = INTVAL (range);
11143
11144  /* Do an unsigned comparison (in the proper mode) between the index
11145     expression and the value which represents the length of the range.
11146     Since we just finished subtracting the lower bound of the range
11147     from the index expression, this comparison allows us to simultaneously
11148     check that the original index expression value is both greater than
11149     or equal to the minimum value of the range and less than or equal to
11150     the maximum value of the range.  */
11151
11152  emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11153			   default_label);
11154
11155  /* If index is in range, it must fit in Pmode.
11156     Convert to Pmode so we can index with it.  */
11157  if (mode != Pmode)
11158    index = convert_to_mode (Pmode, index, 1);
11159
11160  /* Don't let a MEM slip thru, because then INDEX that comes
11161     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11162     and break_out_memory_refs will go to work on it and mess it up.  */
11163#ifdef PIC_CASE_VECTOR_ADDRESS
11164  if (flag_pic && GET_CODE (index) != REG)
11165    index = copy_to_mode_reg (Pmode, index);
11166#endif
11167
11168  /* If flag_force_addr were to affect this address
11169     it could interfere with the tricky assumptions made
11170     about addresses that contain label-refs,
11171     which may be valid only very near the tablejump itself.  */
11172  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11173     GET_MODE_SIZE, because this indicates how large insns are.  The other
11174     uses should all be Pmode, because they are addresses.  This code
11175     could fail if addresses and insns are not the same size.  */
11176  index = gen_rtx_PLUS (Pmode,
11177			gen_rtx_MULT (Pmode, index,
11178				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11179			gen_rtx_LABEL_REF (Pmode, table_label));
11180#ifdef PIC_CASE_VECTOR_ADDRESS
11181  if (flag_pic)
11182    index = PIC_CASE_VECTOR_ADDRESS (index);
11183  else
11184#endif
11185    index = memory_address_noforce (CASE_VECTOR_MODE, index);
11186  temp = gen_reg_rtx (CASE_VECTOR_MODE);
11187  vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11188  RTX_UNCHANGING_P (vector) = 1;
11189  MEM_NOTRAP_P (vector) = 1;
11190  convert_move (temp, vector, 0);
11191
11192  emit_jump_insn (gen_tablejump (temp, table_label));
11193
11194  /* If we are generating PIC code or if the table is PC-relative, the
11195     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
11196  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11197    emit_barrier ();
11198}
11199
11200int
11201try_tablejump (index_type, index_expr, minval, range,
11202	       table_label, default_label)
11203     tree index_type, index_expr, minval, range;
11204     rtx table_label, default_label;
11205{
11206  rtx index;
11207
11208  if (! HAVE_tablejump)
11209    return 0;
11210
11211  index_expr = fold (build (MINUS_EXPR, index_type,
11212			    convert (index_type, index_expr),
11213			    convert (index_type, minval)));
11214  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11215  emit_queue ();
11216  index = protect_from_queue (index, 0);
11217  do_pending_stack_adjust ();
11218
11219  do_tablejump (index, TYPE_MODE (index_type),
11220		convert_modes (TYPE_MODE (index_type),
11221			       TYPE_MODE (TREE_TYPE (range)),
11222			       expand_expr (range, NULL_RTX,
11223					    VOIDmode, 0),
11224			       TREE_UNSIGNED (TREE_TYPE (range))),
11225		table_label, default_label);
11226  return 1;
11227}
11228
11229/* Nonzero if the mode is a valid vector mode for this architecture.
11230   This returns nonzero even if there is no hardware support for the
11231   vector mode, but we can emulate with narrower modes.  */
11232
11233int
11234vector_mode_valid_p (mode)
11235     enum machine_mode mode;
11236{
11237  enum mode_class class = GET_MODE_CLASS (mode);
11238  enum machine_mode innermode;
11239
11240  /* Doh!  What's going on?  */
11241  if (class != MODE_VECTOR_INT
11242      && class != MODE_VECTOR_FLOAT)
11243    return 0;
11244
11245  /* Hardware support.  Woo hoo!  */
11246  if (VECTOR_MODE_SUPPORTED_P (mode))
11247    return 1;
11248
11249  innermode = GET_MODE_INNER (mode);
11250
11251  /* We should probably return 1 if requesting V4DI and we have no DI,
11252     but we have V2DI, but this is probably very unlikely.  */
11253
11254  /* If we have support for the inner mode, we can safely emulate it.
11255     We may not have V2DI, but me can emulate with a pair of DIs.  */
11256  return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
11257}
11258
11259/* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
11260static rtx
11261const_vector_from_tree (exp)
11262     tree exp;
11263{
11264  rtvec v;
11265  int units, i;
11266  tree link, elt;
11267  enum machine_mode inner, mode;
11268
11269  mode = TYPE_MODE (TREE_TYPE (exp));
11270
11271  if (is_zeros_p (exp))
11272    return CONST0_RTX (mode);
11273
11274  units = GET_MODE_NUNITS (mode);
11275  inner = GET_MODE_INNER (mode);
11276
11277  v = rtvec_alloc (units);
11278
11279  link = TREE_VECTOR_CST_ELTS (exp);
11280  for (i = 0; link; link = TREE_CHAIN (link), ++i)
11281    {
11282      elt = TREE_VALUE (link);
11283
11284      if (TREE_CODE (elt) == REAL_CST)
11285	RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11286							 inner);
11287      else
11288	RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
11289					       TREE_INT_CST_HIGH (elt),
11290					       inner);
11291    }
11292
11293  return gen_rtx_raw_CONST_VECTOR (mode, v);
11294}
11295
11296#include "gt-expr.h"
11297