expr.c revision 90075
1/* Convert tree expression to rtl instructions, for GNU compiler.
2   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "machmode.h"
25#include "rtl.h"
26#include "tree.h"
27#include "obstack.h"
28#include "flags.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "except.h"
32#include "function.h"
33#include "insn-config.h"
34#include "insn-attr.h"
35/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
36#include "expr.h"
37#include "optabs.h"
38#include "libfuncs.h"
39#include "recog.h"
40#include "reload.h"
41#include "output.h"
42#include "typeclass.h"
43#include "toplev.h"
44#include "ggc.h"
45#include "langhooks.h"
46#include "intl.h"
47#include "tm_p.h"
48
49/* Decide whether a function's arguments should be processed
50   from first to last or from last to first.
51
52   They should if the stack and args grow in opposite directions, but
53   only if we have push insns.  */
54
55#ifdef PUSH_ROUNDING
56
57#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58#define PUSH_ARGS_REVERSED	/* If it's last to first.  */
59#endif
60
61#endif
62
63#ifndef STACK_PUSH_CODE
64#ifdef STACK_GROWS_DOWNWARD
65#define STACK_PUSH_CODE PRE_DEC
66#else
67#define STACK_PUSH_CODE PRE_INC
68#endif
69#endif
70
71/* Assume that case vectors are not pc-relative.  */
72#ifndef CASE_VECTOR_PC_RELATIVE
73#define CASE_VECTOR_PC_RELATIVE 0
74#endif
75
76/* If this is nonzero, we do not bother generating VOLATILE
77   around volatile memory references, and we are willing to
78   output indirect addresses.  If cse is to follow, we reject
79   indirect addresses so a useful potential cse is generated;
80   if it is used only once, instruction combination will produce
81   the same indirect address eventually.  */
82int cse_not_expected;
83
84/* Chain of pending expressions for PLACEHOLDER_EXPR to replace.  */
85static tree placeholder_list = 0;
86
87/* This structure is used by move_by_pieces to describe the move to
88   be performed.  */
89struct move_by_pieces
90{
91  rtx to;
92  rtx to_addr;
93  int autinc_to;
94  int explicit_inc_to;
95  rtx from;
96  rtx from_addr;
97  int autinc_from;
98  int explicit_inc_from;
99  unsigned HOST_WIDE_INT len;
100  HOST_WIDE_INT offset;
101  int reverse;
102};
103
104/* This structure is used by store_by_pieces to describe the clear to
105   be performed.  */
106
107struct store_by_pieces
108{
109  rtx to;
110  rtx to_addr;
111  int autinc_to;
112  int explicit_inc_to;
113  unsigned HOST_WIDE_INT len;
114  HOST_WIDE_INT offset;
115  rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
116  PTR constfundata;
117  int reverse;
118};
119
120extern struct obstack permanent_obstack;
121
122static rtx enqueue_insn		PARAMS ((rtx, rtx));
123static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124				PARAMS ((unsigned HOST_WIDE_INT,
125					 unsigned int));
126static void move_by_pieces_1	PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127					 struct move_by_pieces *));
128static rtx clear_by_pieces_1	PARAMS ((PTR, HOST_WIDE_INT,
129					 enum machine_mode));
130static void clear_by_pieces	PARAMS ((rtx, unsigned HOST_WIDE_INT,
131					 unsigned int));
132static void store_by_pieces_1	PARAMS ((struct store_by_pieces *,
133					 unsigned int));
134static void store_by_pieces_2	PARAMS ((rtx (*) (rtx, ...),
135					 enum machine_mode,
136					 struct store_by_pieces *));
137static rtx get_subtarget	PARAMS ((rtx));
138static int is_zeros_p		PARAMS ((tree));
139static int mostly_zeros_p	PARAMS ((tree));
140static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141					     HOST_WIDE_INT, enum machine_mode,
142					     tree, tree, int, int));
143static void store_constructor	PARAMS ((tree, rtx, int, HOST_WIDE_INT));
144static rtx store_field		PARAMS ((rtx, HOST_WIDE_INT,
145					 HOST_WIDE_INT, enum machine_mode,
146					 tree, enum machine_mode, int, tree,
147					 int));
148static rtx var_rtx		PARAMS ((tree));
149static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
150static rtx expand_increment	PARAMS ((tree, int, int));
151static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
152static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
153static void do_compare_and_jump	PARAMS ((tree, enum rtx_code, enum rtx_code,
154					 rtx, rtx));
155static rtx do_store_flag	PARAMS ((tree, rtx, enum machine_mode, int));
156#ifdef PUSH_ROUNDING
157static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
158#endif
159static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
160
161/* Record for each mode whether we can move a register directly to or
162   from an object of that mode in memory.  If we can't, we won't try
163   to use that mode directly when accessing a field of that mode.  */
164
165static char direct_load[NUM_MACHINE_MODES];
166static char direct_store[NUM_MACHINE_MODES];
167
168/* If a memory-to-memory move would take MOVE_RATIO or more simple
169   move-instruction sequences, we will do a movstr or libcall instead.  */
170
171#ifndef MOVE_RATIO
172#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
173#define MOVE_RATIO 2
174#else
175/* If we are optimizing for space (-Os), cut down the default move ratio.  */
176#define MOVE_RATIO (optimize_size ? 3 : 15)
177#endif
178#endif
179
180/* This macro is used to determine whether move_by_pieces should be called
181   to perform a structure copy.  */
182#ifndef MOVE_BY_PIECES_P
183#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184  (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
185#endif
186
187/* This array records the insn_code of insns to perform block moves.  */
188enum insn_code movstr_optab[NUM_MACHINE_MODES];
189
190/* This array records the insn_code of insns to perform block clears.  */
191enum insn_code clrstr_optab[NUM_MACHINE_MODES];
192
193/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow.  */
194
195#ifndef SLOW_UNALIGNED_ACCESS
196#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
197#endif
198
199/* This is run once per compilation to set up which modes can be used
200   directly in memory and to initialize the block move optab.  */
201
202void
203init_expr_once ()
204{
205  rtx insn, pat;
206  enum machine_mode mode;
207  int num_clobbers;
208  rtx mem, mem1;
209
210  start_sequence ();
211
212  /* Try indexing by frame ptr and try by stack ptr.
213     It is known that on the Convex the stack ptr isn't a valid index.
214     With luck, one or the other is valid on any machine.  */
215  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
216  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
217
218  insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
219  pat = PATTERN (insn);
220
221  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
222       mode = (enum machine_mode) ((int) mode + 1))
223    {
224      int regno;
225      rtx reg;
226
227      direct_load[(int) mode] = direct_store[(int) mode] = 0;
228      PUT_MODE (mem, mode);
229      PUT_MODE (mem1, mode);
230
231      /* See if there is some register that can be used in this mode and
232	 directly loaded or stored from memory.  */
233
234      if (mode != VOIDmode && mode != BLKmode)
235	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
236	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
237	     regno++)
238	  {
239	    if (! HARD_REGNO_MODE_OK (regno, mode))
240	      continue;
241
242	    reg = gen_rtx_REG (mode, regno);
243
244	    SET_SRC (pat) = mem;
245	    SET_DEST (pat) = reg;
246	    if (recog (pat, insn, &num_clobbers) >= 0)
247	      direct_load[(int) mode] = 1;
248
249	    SET_SRC (pat) = mem1;
250	    SET_DEST (pat) = reg;
251	    if (recog (pat, insn, &num_clobbers) >= 0)
252	      direct_load[(int) mode] = 1;
253
254	    SET_SRC (pat) = reg;
255	    SET_DEST (pat) = mem;
256	    if (recog (pat, insn, &num_clobbers) >= 0)
257	      direct_store[(int) mode] = 1;
258
259	    SET_SRC (pat) = reg;
260	    SET_DEST (pat) = mem1;
261	    if (recog (pat, insn, &num_clobbers) >= 0)
262	      direct_store[(int) mode] = 1;
263	  }
264    }
265
266  end_sequence ();
267}
268
269/* This is run at the start of compiling a function.  */
270
271void
272init_expr ()
273{
274  cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
275
276  pending_chain = 0;
277  pending_stack_adjust = 0;
278  stack_pointer_delta = 0;
279  inhibit_defer_pop = 0;
280  saveregs_value = 0;
281  apply_args_value = 0;
282  forced_labels = 0;
283}
284
285void
286mark_expr_status (p)
287     struct expr_status *p;
288{
289  if (p == NULL)
290    return;
291
292  ggc_mark_rtx (p->x_saveregs_value);
293  ggc_mark_rtx (p->x_apply_args_value);
294  ggc_mark_rtx (p->x_forced_labels);
295}
296
297void
298free_expr_status (f)
299     struct function *f;
300{
301  free (f->expr);
302  f->expr = NULL;
303}
304
305/* Small sanity check that the queue is empty at the end of a function.  */
306
307void
308finish_expr_for_function ()
309{
310  if (pending_chain)
311    abort ();
312}
313
314/* Manage the queue of increment instructions to be output
315   for POSTINCREMENT_EXPR expressions, etc.  */
316
317/* Queue up to increment (or change) VAR later.  BODY says how:
318   BODY should be the same thing you would pass to emit_insn
319   to increment right away.  It will go to emit_insn later on.
320
321   The value is a QUEUED expression to be used in place of VAR
322   where you want to guarantee the pre-incrementation value of VAR.  */
323
324static rtx
325enqueue_insn (var, body)
326     rtx var, body;
327{
328  pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
329				  body, pending_chain);
330  return pending_chain;
331}
332
333/* Use protect_from_queue to convert a QUEUED expression
334   into something that you can put immediately into an instruction.
335   If the queued incrementation has not happened yet,
336   protect_from_queue returns the variable itself.
337   If the incrementation has happened, protect_from_queue returns a temp
338   that contains a copy of the old value of the variable.
339
340   Any time an rtx which might possibly be a QUEUED is to be put
341   into an instruction, it must be passed through protect_from_queue first.
342   QUEUED expressions are not meaningful in instructions.
343
344   Do not pass a value through protect_from_queue and then hold
345   on to it for a while before putting it in an instruction!
346   If the queue is flushed in between, incorrect code will result.  */
347
348rtx
349protect_from_queue (x, modify)
350     rtx x;
351     int modify;
352{
353  RTX_CODE code = GET_CODE (x);
354
355#if 0  /* A QUEUED can hang around after the queue is forced out.  */
356  /* Shortcut for most common case.  */
357  if (pending_chain == 0)
358    return x;
359#endif
360
361  if (code != QUEUED)
362    {
363      /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
364	 use of autoincrement.  Make a copy of the contents of the memory
365	 location rather than a copy of the address, but not if the value is
366	 of mode BLKmode.  Don't modify X in place since it might be
367	 shared.  */
368      if (code == MEM && GET_MODE (x) != BLKmode
369	  && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
370	{
371	  rtx y = XEXP (x, 0);
372	  rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
373
374	  if (QUEUED_INSN (y))
375	    {
376	      rtx temp = gen_reg_rtx (GET_MODE (x));
377
378	      emit_insn_before (gen_move_insn (temp, new),
379				QUEUED_INSN (y));
380	      return temp;
381	    }
382
383	  /* Copy the address into a pseudo, so that the returned value
384	     remains correct across calls to emit_queue.  */
385	  return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
386	}
387
388      /* Otherwise, recursively protect the subexpressions of all
389	 the kinds of rtx's that can contain a QUEUED.  */
390      if (code == MEM)
391	{
392	  rtx tem = protect_from_queue (XEXP (x, 0), 0);
393	  if (tem != XEXP (x, 0))
394	    {
395	      x = copy_rtx (x);
396	      XEXP (x, 0) = tem;
397	    }
398	}
399      else if (code == PLUS || code == MULT)
400	{
401	  rtx new0 = protect_from_queue (XEXP (x, 0), 0);
402	  rtx new1 = protect_from_queue (XEXP (x, 1), 0);
403	  if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
404	    {
405	      x = copy_rtx (x);
406	      XEXP (x, 0) = new0;
407	      XEXP (x, 1) = new1;
408	    }
409	}
410      return x;
411    }
412  /* If the increment has not happened, use the variable itself.  Copy it
413     into a new pseudo so that the value remains correct across calls to
414     emit_queue.  */
415  if (QUEUED_INSN (x) == 0)
416    return copy_to_reg (QUEUED_VAR (x));
417  /* If the increment has happened and a pre-increment copy exists,
418     use that copy.  */
419  if (QUEUED_COPY (x) != 0)
420    return QUEUED_COPY (x);
421  /* The increment has happened but we haven't set up a pre-increment copy.
422     Set one up now, and use it.  */
423  QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
424  emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
425		    QUEUED_INSN (x));
426  return QUEUED_COPY (x);
427}
428
429/* Return nonzero if X contains a QUEUED expression:
430   if it contains anything that will be altered by a queued increment.
431   We handle only combinations of MEM, PLUS, MINUS and MULT operators
432   since memory addresses generally contain only those.  */
433
434int
435queued_subexp_p (x)
436     rtx x;
437{
438  enum rtx_code code = GET_CODE (x);
439  switch (code)
440    {
441    case QUEUED:
442      return 1;
443    case MEM:
444      return queued_subexp_p (XEXP (x, 0));
445    case MULT:
446    case PLUS:
447    case MINUS:
448      return (queued_subexp_p (XEXP (x, 0))
449	      || queued_subexp_p (XEXP (x, 1)));
450    default:
451      return 0;
452    }
453}
454
455/* Perform all the pending incrementations.  */
456
457void
458emit_queue ()
459{
460  rtx p;
461  while ((p = pending_chain))
462    {
463      rtx body = QUEUED_BODY (p);
464
465      if (GET_CODE (body) == SEQUENCE)
466	{
467	  QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
468	  emit_insn (QUEUED_BODY (p));
469	}
470      else
471	QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
472      pending_chain = QUEUED_NEXT (p);
473    }
474}
475
476/* Copy data from FROM to TO, where the machine modes are not the same.
477   Both modes may be integer, or both may be floating.
478   UNSIGNEDP should be nonzero if FROM is an unsigned type.
479   This causes zero-extension instead of sign-extension.  */
480
481void
482convert_move (to, from, unsignedp)
483     rtx to, from;
484     int unsignedp;
485{
486  enum machine_mode to_mode = GET_MODE (to);
487  enum machine_mode from_mode = GET_MODE (from);
488  int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
489  int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
490  enum insn_code code;
491  rtx libcall;
492
493  /* rtx code for making an equivalent value.  */
494  enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
495
496  to = protect_from_queue (to, 1);
497  from = protect_from_queue (from, 0);
498
499  if (to_real != from_real)
500    abort ();
501
502  /* If FROM is a SUBREG that indicates that we have already done at least
503     the required extension, strip it.  We don't handle such SUBREGs as
504     TO here.  */
505
506  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
507      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
508	  >= GET_MODE_SIZE (to_mode))
509      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
510    from = gen_lowpart (to_mode, from), from_mode = to_mode;
511
512  if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
513    abort ();
514
515  if (to_mode == from_mode
516      || (from_mode == VOIDmode && CONSTANT_P (from)))
517    {
518      emit_move_insn (to, from);
519      return;
520    }
521
522  if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
523    {
524      if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
525	abort ();
526
527      if (VECTOR_MODE_P (to_mode))
528	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
529      else
530	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
531
532      emit_move_insn (to, from);
533      return;
534    }
535
536  if (to_real != from_real)
537    abort ();
538
539  if (to_real)
540    {
541      rtx value, insns;
542
543      if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
544	{
545	  /* Try converting directly if the insn is supported.  */
546	  if ((code = can_extend_p (to_mode, from_mode, 0))
547	      != CODE_FOR_nothing)
548	    {
549	      emit_unop_insn (code, to, from, UNKNOWN);
550	      return;
551	    }
552	}
553
554#ifdef HAVE_trunchfqf2
555      if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
556	{
557	  emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
558	  return;
559	}
560#endif
561#ifdef HAVE_trunctqfqf2
562      if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
563	{
564	  emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
565	  return;
566	}
567#endif
568#ifdef HAVE_truncsfqf2
569      if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
570	{
571	  emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
572	  return;
573	}
574#endif
575#ifdef HAVE_truncdfqf2
576      if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
577	{
578	  emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
579	  return;
580	}
581#endif
582#ifdef HAVE_truncxfqf2
583      if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
584	{
585	  emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
586	  return;
587	}
588#endif
589#ifdef HAVE_trunctfqf2
590      if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
591	{
592	  emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
593	  return;
594	}
595#endif
596
597#ifdef HAVE_trunctqfhf2
598      if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
599	{
600	  emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
601	  return;
602	}
603#endif
604#ifdef HAVE_truncsfhf2
605      if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
606	{
607	  emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
608	  return;
609	}
610#endif
611#ifdef HAVE_truncdfhf2
612      if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
613	{
614	  emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
615	  return;
616	}
617#endif
618#ifdef HAVE_truncxfhf2
619      if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
620	{
621	  emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
622	  return;
623	}
624#endif
625#ifdef HAVE_trunctfhf2
626      if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
627	{
628	  emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
629	  return;
630	}
631#endif
632
633#ifdef HAVE_truncsftqf2
634      if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
635	{
636	  emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
637	  return;
638	}
639#endif
640#ifdef HAVE_truncdftqf2
641      if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
642	{
643	  emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
644	  return;
645	}
646#endif
647#ifdef HAVE_truncxftqf2
648      if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
649	{
650	  emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
651	  return;
652	}
653#endif
654#ifdef HAVE_trunctftqf2
655      if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
656	{
657	  emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
658	  return;
659	}
660#endif
661
662#ifdef HAVE_truncdfsf2
663      if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
664	{
665	  emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
666	  return;
667	}
668#endif
669#ifdef HAVE_truncxfsf2
670      if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
671	{
672	  emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
673	  return;
674	}
675#endif
676#ifdef HAVE_trunctfsf2
677      if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
678	{
679	  emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
680	  return;
681	}
682#endif
683#ifdef HAVE_truncxfdf2
684      if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
685	{
686	  emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
687	  return;
688	}
689#endif
690#ifdef HAVE_trunctfdf2
691      if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
692	{
693	  emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
694	  return;
695	}
696#endif
697
698      libcall = (rtx) 0;
699      switch (from_mode)
700	{
701	case SFmode:
702	  switch (to_mode)
703	    {
704	    case DFmode:
705	      libcall = extendsfdf2_libfunc;
706	      break;
707
708	    case XFmode:
709	      libcall = extendsfxf2_libfunc;
710	      break;
711
712	    case TFmode:
713	      libcall = extendsftf2_libfunc;
714	      break;
715
716	    default:
717	      break;
718	    }
719	  break;
720
721	case DFmode:
722	  switch (to_mode)
723	    {
724	    case SFmode:
725	      libcall = truncdfsf2_libfunc;
726	      break;
727
728	    case XFmode:
729	      libcall = extenddfxf2_libfunc;
730	      break;
731
732	    case TFmode:
733	      libcall = extenddftf2_libfunc;
734	      break;
735
736	    default:
737	      break;
738	    }
739	  break;
740
741	case XFmode:
742	  switch (to_mode)
743	    {
744	    case SFmode:
745	      libcall = truncxfsf2_libfunc;
746	      break;
747
748	    case DFmode:
749	      libcall = truncxfdf2_libfunc;
750	      break;
751
752	    default:
753	      break;
754	    }
755	  break;
756
757	case TFmode:
758	  switch (to_mode)
759	    {
760	    case SFmode:
761	      libcall = trunctfsf2_libfunc;
762	      break;
763
764	    case DFmode:
765	      libcall = trunctfdf2_libfunc;
766	      break;
767
768	    default:
769	      break;
770	    }
771	  break;
772
773	default:
774	  break;
775	}
776
777      if (libcall == (rtx) 0)
778	/* This conversion is not implemented yet.  */
779	abort ();
780
781      start_sequence ();
782      value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
783				       1, from, from_mode);
784      insns = get_insns ();
785      end_sequence ();
786      emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
787								    from));
788      return;
789    }
790
791  /* Now both modes are integers.  */
792
793  /* Handle expanding beyond a word.  */
794  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
795      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
796    {
797      rtx insns;
798      rtx lowpart;
799      rtx fill_value;
800      rtx lowfrom;
801      int i;
802      enum machine_mode lowpart_mode;
803      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
804
805      /* Try converting directly if the insn is supported.  */
806      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
807	  != CODE_FOR_nothing)
808	{
809	  /* If FROM is a SUBREG, put it into a register.  Do this
810	     so that we always generate the same set of insns for
811	     better cse'ing; if an intermediate assignment occurred,
812	     we won't be doing the operation directly on the SUBREG.  */
813	  if (optimize > 0 && GET_CODE (from) == SUBREG)
814	    from = force_reg (from_mode, from);
815	  emit_unop_insn (code, to, from, equiv_code);
816	  return;
817	}
818      /* Next, try converting via full word.  */
819      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
820	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
821		   != CODE_FOR_nothing))
822	{
823	  if (GET_CODE (to) == REG)
824	    emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
825	  convert_move (gen_lowpart (word_mode, to), from, unsignedp);
826	  emit_unop_insn (code, to,
827			  gen_lowpart (word_mode, to), equiv_code);
828	  return;
829	}
830
831      /* No special multiword conversion insn; do it by hand.  */
832      start_sequence ();
833
834      /* Since we will turn this into a no conflict block, we must ensure
835	 that the source does not overlap the target.  */
836
837      if (reg_overlap_mentioned_p (to, from))
838	from = force_reg (from_mode, from);
839
840      /* Get a copy of FROM widened to a word, if necessary.  */
841      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
842	lowpart_mode = word_mode;
843      else
844	lowpart_mode = from_mode;
845
846      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
847
848      lowpart = gen_lowpart (lowpart_mode, to);
849      emit_move_insn (lowpart, lowfrom);
850
851      /* Compute the value to put in each remaining word.  */
852      if (unsignedp)
853	fill_value = const0_rtx;
854      else
855	{
856#ifdef HAVE_slt
857	  if (HAVE_slt
858	      && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
859	      && STORE_FLAG_VALUE == -1)
860	    {
861	      emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
862			     lowpart_mode, 0);
863	      fill_value = gen_reg_rtx (word_mode);
864	      emit_insn (gen_slt (fill_value));
865	    }
866	  else
867#endif
868	    {
869	      fill_value
870		= expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
871				size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
872				NULL_RTX, 0);
873	      fill_value = convert_to_mode (word_mode, fill_value, 1);
874	    }
875	}
876
877      /* Fill the remaining words.  */
878      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
879	{
880	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
881	  rtx subword = operand_subword (to, index, 1, to_mode);
882
883	  if (subword == 0)
884	    abort ();
885
886	  if (fill_value != subword)
887	    emit_move_insn (subword, fill_value);
888	}
889
890      insns = get_insns ();
891      end_sequence ();
892
893      emit_no_conflict_block (insns, to, from, NULL_RTX,
894			      gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
895      return;
896    }
897
898  /* Truncating multi-word to a word or less.  */
899  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
900      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
901    {
902      if (!((GET_CODE (from) == MEM
903	     && ! MEM_VOLATILE_P (from)
904	     && direct_load[(int) to_mode]
905	     && ! mode_dependent_address_p (XEXP (from, 0)))
906	    || GET_CODE (from) == REG
907	    || GET_CODE (from) == SUBREG))
908	from = force_reg (from_mode, from);
909      convert_move (to, gen_lowpart (word_mode, from), 0);
910      return;
911    }
912
913  /* Handle pointer conversion.  */			/* SPEE 900220.  */
914  if (to_mode == PQImode)
915    {
916      if (from_mode != QImode)
917	from = convert_to_mode (QImode, from, unsignedp);
918
919#ifdef HAVE_truncqipqi2
920      if (HAVE_truncqipqi2)
921	{
922	  emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
923	  return;
924	}
925#endif /* HAVE_truncqipqi2 */
926      abort ();
927    }
928
929  if (from_mode == PQImode)
930    {
931      if (to_mode != QImode)
932	{
933	  from = convert_to_mode (QImode, from, unsignedp);
934	  from_mode = QImode;
935	}
936      else
937	{
938#ifdef HAVE_extendpqiqi2
939	  if (HAVE_extendpqiqi2)
940	    {
941	      emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
942	      return;
943	    }
944#endif /* HAVE_extendpqiqi2 */
945	  abort ();
946	}
947    }
948
949  if (to_mode == PSImode)
950    {
951      if (from_mode != SImode)
952	from = convert_to_mode (SImode, from, unsignedp);
953
954#ifdef HAVE_truncsipsi2
955      if (HAVE_truncsipsi2)
956	{
957	  emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
958	  return;
959	}
960#endif /* HAVE_truncsipsi2 */
961      abort ();
962    }
963
964  if (from_mode == PSImode)
965    {
966      if (to_mode != SImode)
967	{
968	  from = convert_to_mode (SImode, from, unsignedp);
969	  from_mode = SImode;
970	}
971      else
972	{
973#ifdef HAVE_extendpsisi2
974	  if (! unsignedp && HAVE_extendpsisi2)
975	    {
976	      emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
977	      return;
978	    }
979#endif /* HAVE_extendpsisi2 */
980#ifdef HAVE_zero_extendpsisi2
981	  if (unsignedp && HAVE_zero_extendpsisi2)
982	    {
983	      emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
984	      return;
985	    }
986#endif /* HAVE_zero_extendpsisi2 */
987	  abort ();
988	}
989    }
990
991  if (to_mode == PDImode)
992    {
993      if (from_mode != DImode)
994	from = convert_to_mode (DImode, from, unsignedp);
995
996#ifdef HAVE_truncdipdi2
997      if (HAVE_truncdipdi2)
998	{
999	  emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1000	  return;
1001	}
1002#endif /* HAVE_truncdipdi2 */
1003      abort ();
1004    }
1005
1006  if (from_mode == PDImode)
1007    {
1008      if (to_mode != DImode)
1009	{
1010	  from = convert_to_mode (DImode, from, unsignedp);
1011	  from_mode = DImode;
1012	}
1013      else
1014	{
1015#ifdef HAVE_extendpdidi2
1016	  if (HAVE_extendpdidi2)
1017	    {
1018	      emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1019	      return;
1020	    }
1021#endif /* HAVE_extendpdidi2 */
1022	  abort ();
1023	}
1024    }
1025
1026  /* Now follow all the conversions between integers
1027     no more than a word long.  */
1028
1029  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
1030  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032				GET_MODE_BITSIZE (from_mode)))
1033    {
1034      if (!((GET_CODE (from) == MEM
1035	     && ! MEM_VOLATILE_P (from)
1036	     && direct_load[(int) to_mode]
1037	     && ! mode_dependent_address_p (XEXP (from, 0)))
1038	    || GET_CODE (from) == REG
1039	    || GET_CODE (from) == SUBREG))
1040	from = force_reg (from_mode, from);
1041      if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043	from = copy_to_reg (from);
1044      emit_move_insn (to, gen_lowpart (to_mode, from));
1045      return;
1046    }
1047
1048  /* Handle extension.  */
1049  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1050    {
1051      /* Convert directly if that works.  */
1052      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053	  != CODE_FOR_nothing)
1054	{
1055	  if (flag_force_mem)
1056	    from = force_not_mem (from);
1057
1058	  emit_unop_insn (code, to, from, equiv_code);
1059	  return;
1060	}
1061      else
1062	{
1063	  enum machine_mode intermediate;
1064	  rtx tmp;
1065	  tree shift_amount;
1066
1067	  /* Search for a mode to convert via.  */
1068	  for (intermediate = from_mode; intermediate != VOIDmode;
1069	       intermediate = GET_MODE_WIDER_MODE (intermediate))
1070	    if (((can_extend_p (to_mode, intermediate, unsignedp)
1071		  != CODE_FOR_nothing)
1072		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1073		     && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1074					       GET_MODE_BITSIZE (intermediate))))
1075		&& (can_extend_p (intermediate, from_mode, unsignedp)
1076		    != CODE_FOR_nothing))
1077	      {
1078		convert_move (to, convert_to_mode (intermediate, from,
1079						   unsignedp), unsignedp);
1080		return;
1081	      }
1082
1083	  /* No suitable intermediate mode.
1084	     Generate what we need with	shifts.  */
1085	  shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1086				      - GET_MODE_BITSIZE (from_mode), 0);
1087	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
1088	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1089			      to, unsignedp);
1090	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1091			      to, unsignedp);
1092	  if (tmp != to)
1093	    emit_move_insn (to, tmp);
1094	  return;
1095	}
1096    }
1097
1098  /* Support special truncate insns for certain modes.  */
1099
1100  if (from_mode == DImode && to_mode == SImode)
1101    {
1102#ifdef HAVE_truncdisi2
1103      if (HAVE_truncdisi2)
1104	{
1105	  emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1106	  return;
1107	}
1108#endif
1109      convert_move (to, force_reg (from_mode, from), unsignedp);
1110      return;
1111    }
1112
1113  if (from_mode == DImode && to_mode == HImode)
1114    {
1115#ifdef HAVE_truncdihi2
1116      if (HAVE_truncdihi2)
1117	{
1118	  emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1119	  return;
1120	}
1121#endif
1122      convert_move (to, force_reg (from_mode, from), unsignedp);
1123      return;
1124    }
1125
1126  if (from_mode == DImode && to_mode == QImode)
1127    {
1128#ifdef HAVE_truncdiqi2
1129      if (HAVE_truncdiqi2)
1130	{
1131	  emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1132	  return;
1133	}
1134#endif
1135      convert_move (to, force_reg (from_mode, from), unsignedp);
1136      return;
1137    }
1138
1139  if (from_mode == SImode && to_mode == HImode)
1140    {
1141#ifdef HAVE_truncsihi2
1142      if (HAVE_truncsihi2)
1143	{
1144	  emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1145	  return;
1146	}
1147#endif
1148      convert_move (to, force_reg (from_mode, from), unsignedp);
1149      return;
1150    }
1151
1152  if (from_mode == SImode && to_mode == QImode)
1153    {
1154#ifdef HAVE_truncsiqi2
1155      if (HAVE_truncsiqi2)
1156	{
1157	  emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1158	  return;
1159	}
1160#endif
1161      convert_move (to, force_reg (from_mode, from), unsignedp);
1162      return;
1163    }
1164
1165  if (from_mode == HImode && to_mode == QImode)
1166    {
1167#ifdef HAVE_trunchiqi2
1168      if (HAVE_trunchiqi2)
1169	{
1170	  emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1171	  return;
1172	}
1173#endif
1174      convert_move (to, force_reg (from_mode, from), unsignedp);
1175      return;
1176    }
1177
1178  if (from_mode == TImode && to_mode == DImode)
1179    {
1180#ifdef HAVE_trunctidi2
1181      if (HAVE_trunctidi2)
1182	{
1183	  emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1184	  return;
1185	}
1186#endif
1187      convert_move (to, force_reg (from_mode, from), unsignedp);
1188      return;
1189    }
1190
1191  if (from_mode == TImode && to_mode == SImode)
1192    {
1193#ifdef HAVE_trunctisi2
1194      if (HAVE_trunctisi2)
1195	{
1196	  emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1197	  return;
1198	}
1199#endif
1200      convert_move (to, force_reg (from_mode, from), unsignedp);
1201      return;
1202    }
1203
1204  if (from_mode == TImode && to_mode == HImode)
1205    {
1206#ifdef HAVE_trunctihi2
1207      if (HAVE_trunctihi2)
1208	{
1209	  emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1210	  return;
1211	}
1212#endif
1213      convert_move (to, force_reg (from_mode, from), unsignedp);
1214      return;
1215    }
1216
1217  if (from_mode == TImode && to_mode == QImode)
1218    {
1219#ifdef HAVE_trunctiqi2
1220      if (HAVE_trunctiqi2)
1221	{
1222	  emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1223	  return;
1224	}
1225#endif
1226      convert_move (to, force_reg (from_mode, from), unsignedp);
1227      return;
1228    }
1229
1230  /* Handle truncation of volatile memrefs, and so on;
1231     the things that couldn't be truncated directly,
1232     and for which there was no special instruction.  */
1233  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1234    {
1235      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1236      emit_move_insn (to, temp);
1237      return;
1238    }
1239
1240  /* Mode combination is not recognized.  */
1241  abort ();
1242}
1243
1244/* Return an rtx for a value that would result
1245   from converting X to mode MODE.
1246   Both X and MODE may be floating, or both integer.
1247   UNSIGNEDP is nonzero if X is an unsigned value.
1248   This can be done by referring to a part of X in place
1249   or by copying to a new temporary with conversion.
1250
1251   This function *must not* call protect_from_queue
1252   except when putting X into an insn (in which case convert_move does it).  */
1253
1254rtx
1255convert_to_mode (mode, x, unsignedp)
1256     enum machine_mode mode;
1257     rtx x;
1258     int unsignedp;
1259{
1260  return convert_modes (mode, VOIDmode, x, unsignedp);
1261}
1262
1263/* Return an rtx for a value that would result
1264   from converting X from mode OLDMODE to mode MODE.
1265   Both modes may be floating, or both integer.
1266   UNSIGNEDP is nonzero if X is an unsigned value.
1267
1268   This can be done by referring to a part of X in place
1269   or by copying to a new temporary with conversion.
1270
1271   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1272
1273   This function *must not* call protect_from_queue
1274   except when putting X into an insn (in which case convert_move does it).  */
1275
1276rtx
1277convert_modes (mode, oldmode, x, unsignedp)
1278     enum machine_mode mode, oldmode;
1279     rtx x;
1280     int unsignedp;
1281{
1282  rtx temp;
1283
1284  /* If FROM is a SUBREG that indicates that we have already done at least
1285     the required extension, strip it.  */
1286
1287  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1288      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1289      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1290    x = gen_lowpart (mode, x);
1291
1292  if (GET_MODE (x) != VOIDmode)
1293    oldmode = GET_MODE (x);
1294
1295  if (mode == oldmode)
1296    return x;
1297
1298  /* There is one case that we must handle specially: If we are converting
1299     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1300     we are to interpret the constant as unsigned, gen_lowpart will do
1301     the wrong if the constant appears negative.  What we want to do is
1302     make the high-order word of the constant zero, not all ones.  */
1303
1304  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1305      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1306      && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1307    {
1308      HOST_WIDE_INT val = INTVAL (x);
1309
1310      if (oldmode != VOIDmode
1311	  && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1312	{
1313	  int width = GET_MODE_BITSIZE (oldmode);
1314
1315	  /* We need to zero extend VAL.  */
1316	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1317	}
1318
1319      return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1320    }
1321
1322  /* We can do this with a gen_lowpart if both desired and current modes
1323     are integer, and this is either a constant integer, a register, or a
1324     non-volatile MEM.  Except for the constant case where MODE is no
1325     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
1326
1327  if ((GET_CODE (x) == CONST_INT
1328       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1329      || (GET_MODE_CLASS (mode) == MODE_INT
1330	  && GET_MODE_CLASS (oldmode) == MODE_INT
1331	  && (GET_CODE (x) == CONST_DOUBLE
1332	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1333		  && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1334		       && direct_load[(int) mode])
1335		      || (GET_CODE (x) == REG
1336			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1337						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
1338    {
1339      /* ?? If we don't know OLDMODE, we have to assume here that
1340	 X does not need sign- or zero-extension.   This may not be
1341	 the case, but it's the best we can do.  */
1342      if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1343	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1344	{
1345	  HOST_WIDE_INT val = INTVAL (x);
1346	  int width = GET_MODE_BITSIZE (oldmode);
1347
1348	  /* We must sign or zero-extend in this case.  Start by
1349	     zero-extending, then sign extend if we need to.  */
1350	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1351	  if (! unsignedp
1352	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1353	    val |= (HOST_WIDE_INT) (-1) << width;
1354
1355	  return GEN_INT (trunc_int_for_mode (val, mode));
1356	}
1357
1358      return gen_lowpart (mode, x);
1359    }
1360
1361  temp = gen_reg_rtx (mode);
1362  convert_move (temp, x, unsignedp);
1363  return temp;
1364}
1365
1366/* This macro is used to determine what the largest unit size that
1367   move_by_pieces can use is.  */
1368
1369/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1370   move efficiently, as opposed to  MOVE_MAX which is the maximum
1371   number of bytes we can move with a single instruction.  */
1372
1373#ifndef MOVE_MAX_PIECES
1374#define MOVE_MAX_PIECES   MOVE_MAX
1375#endif
1376
1377/* Generate several move instructions to copy LEN bytes from block FROM to
1378   block TO.  (These are MEM rtx's with BLKmode).  The caller must pass FROM
1379   and TO through protect_from_queue before calling.
1380
1381   If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1382   used to push FROM to the stack.
1383
1384   ALIGN is maximum alignment we can assume.  */
1385
1386void
1387move_by_pieces (to, from, len, align)
1388     rtx to, from;
1389     unsigned HOST_WIDE_INT len;
1390     unsigned int align;
1391{
1392  struct move_by_pieces data;
1393  rtx to_addr, from_addr = XEXP (from, 0);
1394  unsigned int max_size = MOVE_MAX_PIECES + 1;
1395  enum machine_mode mode = VOIDmode, tmode;
1396  enum insn_code icode;
1397
1398  data.offset = 0;
1399  data.from_addr = from_addr;
1400  if (to)
1401    {
1402      to_addr = XEXP (to, 0);
1403      data.to = to;
1404      data.autinc_to
1405	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1406	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1407      data.reverse
1408	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1409    }
1410  else
1411    {
1412      to_addr = NULL_RTX;
1413      data.to = NULL_RTX;
1414      data.autinc_to = 1;
1415#ifdef STACK_GROWS_DOWNWARD
1416      data.reverse = 1;
1417#else
1418      data.reverse = 0;
1419#endif
1420    }
1421  data.to_addr = to_addr;
1422  data.from = from;
1423  data.autinc_from
1424    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1425       || GET_CODE (from_addr) == POST_INC
1426       || GET_CODE (from_addr) == POST_DEC);
1427
1428  data.explicit_inc_from = 0;
1429  data.explicit_inc_to = 0;
1430  if (data.reverse) data.offset = len;
1431  data.len = len;
1432
1433  /* If copying requires more than two move insns,
1434     copy addresses to registers (to make displacements shorter)
1435     and use post-increment if available.  */
1436  if (!(data.autinc_from && data.autinc_to)
1437      && move_by_pieces_ninsns (len, align) > 2)
1438    {
1439      /* Find the mode of the largest move...  */
1440      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1441	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1442	if (GET_MODE_SIZE (tmode) < max_size)
1443	  mode = tmode;
1444
1445      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1446	{
1447	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1448	  data.autinc_from = 1;
1449	  data.explicit_inc_from = -1;
1450	}
1451      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1452	{
1453	  data.from_addr = copy_addr_to_reg (from_addr);
1454	  data.autinc_from = 1;
1455	  data.explicit_inc_from = 1;
1456	}
1457      if (!data.autinc_from && CONSTANT_P (from_addr))
1458	data.from_addr = copy_addr_to_reg (from_addr);
1459      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1460	{
1461	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1462	  data.autinc_to = 1;
1463	  data.explicit_inc_to = -1;
1464	}
1465      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1466	{
1467	  data.to_addr = copy_addr_to_reg (to_addr);
1468	  data.autinc_to = 1;
1469	  data.explicit_inc_to = 1;
1470	}
1471      if (!data.autinc_to && CONSTANT_P (to_addr))
1472	data.to_addr = copy_addr_to_reg (to_addr);
1473    }
1474
1475  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1476      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1477    align = MOVE_MAX * BITS_PER_UNIT;
1478
1479  /* First move what we can in the largest integer mode, then go to
1480     successively smaller modes.  */
1481
1482  while (max_size > 1)
1483    {
1484      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1485	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1486	if (GET_MODE_SIZE (tmode) < max_size)
1487	  mode = tmode;
1488
1489      if (mode == VOIDmode)
1490	break;
1491
1492      icode = mov_optab->handlers[(int) mode].insn_code;
1493      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1494	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1495
1496      max_size = GET_MODE_SIZE (mode);
1497    }
1498
1499  /* The code above should have handled everything.  */
1500  if (data.len > 0)
1501    abort ();
1502}
1503
1504/* Return number of insns required to move L bytes by pieces.
1505   ALIGN (in bits) is maximum alignment we can assume.  */
1506
1507static unsigned HOST_WIDE_INT
1508move_by_pieces_ninsns (l, align)
1509     unsigned HOST_WIDE_INT l;
1510     unsigned int align;
1511{
1512  unsigned HOST_WIDE_INT n_insns = 0;
1513  unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1514
1515  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1516      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1517    align = MOVE_MAX * BITS_PER_UNIT;
1518
1519  while (max_size > 1)
1520    {
1521      enum machine_mode mode = VOIDmode, tmode;
1522      enum insn_code icode;
1523
1524      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1525	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1526	if (GET_MODE_SIZE (tmode) < max_size)
1527	  mode = tmode;
1528
1529      if (mode == VOIDmode)
1530	break;
1531
1532      icode = mov_optab->handlers[(int) mode].insn_code;
1533      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1534	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1535
1536      max_size = GET_MODE_SIZE (mode);
1537    }
1538
1539  if (l)
1540    abort ();
1541  return n_insns;
1542}
1543
1544/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1545   with move instructions for mode MODE.  GENFUN is the gen_... function
1546   to make a move insn for that mode.  DATA has all the other info.  */
1547
1548static void
1549move_by_pieces_1 (genfun, mode, data)
1550     rtx (*genfun) PARAMS ((rtx, ...));
1551     enum machine_mode mode;
1552     struct move_by_pieces *data;
1553{
1554  unsigned int size = GET_MODE_SIZE (mode);
1555  rtx to1 = NULL_RTX, from1;
1556
1557  while (data->len >= size)
1558    {
1559      if (data->reverse)
1560	data->offset -= size;
1561
1562      if (data->to)
1563	{
1564	  if (data->autinc_to)
1565	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1566					     data->offset);
1567	  else
1568	    to1 = adjust_address (data->to, mode, data->offset);
1569	}
1570
1571      if (data->autinc_from)
1572	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1573					   data->offset);
1574      else
1575	from1 = adjust_address (data->from, mode, data->offset);
1576
1577      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1578	emit_insn (gen_add2_insn (data->to_addr,
1579				  GEN_INT (-(HOST_WIDE_INT)size)));
1580      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1581	emit_insn (gen_add2_insn (data->from_addr,
1582				  GEN_INT (-(HOST_WIDE_INT)size)));
1583
1584      if (data->to)
1585	emit_insn ((*genfun) (to1, from1));
1586      else
1587	{
1588#ifdef PUSH_ROUNDING
1589	  emit_single_push_insn (mode, from1, NULL);
1590#else
1591	  abort ();
1592#endif
1593	}
1594
1595      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1596	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1597      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1598	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1599
1600      if (! data->reverse)
1601	data->offset += size;
1602
1603      data->len -= size;
1604    }
1605}
1606
1607/* Emit code to move a block Y to a block X.
1608   This may be done with string-move instructions,
1609   with multiple scalar move instructions, or with a library call.
1610
1611   Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1612   with mode BLKmode.
1613   SIZE is an rtx that says how long they are.
1614   ALIGN is the maximum alignment we can assume they have.
1615
1616   Return the address of the new block, if memcpy is called and returns it,
1617   0 otherwise.  */
1618
1619rtx
1620emit_block_move (x, y, size)
1621     rtx x, y;
1622     rtx size;
1623{
1624  rtx retval = 0;
1625#ifdef TARGET_MEM_FUNCTIONS
1626  static tree fn;
1627  tree call_expr, arg_list;
1628#endif
1629  unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1630
1631  if (GET_MODE (x) != BLKmode)
1632    abort ();
1633
1634  if (GET_MODE (y) != BLKmode)
1635    abort ();
1636
1637  x = protect_from_queue (x, 1);
1638  y = protect_from_queue (y, 0);
1639  size = protect_from_queue (size, 0);
1640
1641  if (GET_CODE (x) != MEM)
1642    abort ();
1643  if (GET_CODE (y) != MEM)
1644    abort ();
1645  if (size == 0)
1646    abort ();
1647
1648  if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1649    move_by_pieces (x, y, INTVAL (size), align);
1650  else
1651    {
1652      /* Try the most limited insn first, because there's no point
1653	 including more than one in the machine description unless
1654	 the more limited one has some advantage.  */
1655
1656      rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1657      enum machine_mode mode;
1658
1659      /* Since this is a move insn, we don't care about volatility.  */
1660      volatile_ok = 1;
1661
1662      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1663	   mode = GET_MODE_WIDER_MODE (mode))
1664	{
1665	  enum insn_code code = movstr_optab[(int) mode];
1666	  insn_operand_predicate_fn pred;
1667
1668	  if (code != CODE_FOR_nothing
1669	      /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1670		 here because if SIZE is less than the mode mask, as it is
1671		 returned by the macro, it will definitely be less than the
1672		 actual mode mask.  */
1673	      && ((GET_CODE (size) == CONST_INT
1674		   && ((unsigned HOST_WIDE_INT) INTVAL (size)
1675		       <= (GET_MODE_MASK (mode) >> 1)))
1676		  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1677	      && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1678		  || (*pred) (x, BLKmode))
1679	      && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1680		  || (*pred) (y, BLKmode))
1681	      && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1682		  || (*pred) (opalign, VOIDmode)))
1683	    {
1684	      rtx op2;
1685	      rtx last = get_last_insn ();
1686	      rtx pat;
1687
1688	      op2 = convert_to_mode (mode, size, 1);
1689	      pred = insn_data[(int) code].operand[2].predicate;
1690	      if (pred != 0 && ! (*pred) (op2, mode))
1691		op2 = copy_to_mode_reg (mode, op2);
1692
1693	      pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1694	      if (pat)
1695		{
1696		  emit_insn (pat);
1697		  volatile_ok = 0;
1698		  return 0;
1699		}
1700	      else
1701		delete_insns_since (last);
1702	    }
1703	}
1704
1705      volatile_ok = 0;
1706
1707      /* X, Y, or SIZE may have been passed through protect_from_queue.
1708
1709	 It is unsafe to save the value generated by protect_from_queue
1710	 and reuse it later.  Consider what happens if emit_queue is
1711	 called before the return value from protect_from_queue is used.
1712
1713	 Expansion of the CALL_EXPR below will call emit_queue before
1714	 we are finished emitting RTL for argument setup.  So if we are
1715	 not careful we could get the wrong value for an argument.
1716
1717	 To avoid this problem we go ahead and emit code to copy X, Y &
1718	 SIZE into new pseudos.  We can then place those new pseudos
1719	 into an RTL_EXPR and use them later, even after a call to
1720	 emit_queue.
1721
1722	 Note this is not strictly needed for library calls since they
1723	 do not call emit_queue before loading their arguments.  However,
1724	 we may need to have library calls call emit_queue in the future
1725	 since failing to do so could cause problems for targets which
1726	 define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
1727      x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1728      y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1729
1730#ifdef TARGET_MEM_FUNCTIONS
1731      size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1732#else
1733      size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1734			      TREE_UNSIGNED (integer_type_node));
1735      size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1736#endif
1737
1738#ifdef TARGET_MEM_FUNCTIONS
1739      /* It is incorrect to use the libcall calling conventions to call
1740	 memcpy in this context.
1741
1742	 This could be a user call to memcpy and the user may wish to
1743	 examine the return value from memcpy.
1744
1745	 For targets where libcalls and normal calls have different conventions
1746	 for returning pointers, we could end up generating incorrect code.
1747
1748	 So instead of using a libcall sequence we build up a suitable
1749	 CALL_EXPR and expand the call in the normal fashion.  */
1750      if (fn == NULL_TREE)
1751	{
1752	  tree fntype;
1753
1754	  /* This was copied from except.c, I don't know if all this is
1755	     necessary in this context or not.  */
1756	  fn = get_identifier ("memcpy");
1757	  fntype = build_pointer_type (void_type_node);
1758	  fntype = build_function_type (fntype, NULL_TREE);
1759	  fn = build_decl (FUNCTION_DECL, fn, fntype);
1760	  ggc_add_tree_root (&fn, 1);
1761	  DECL_EXTERNAL (fn) = 1;
1762	  TREE_PUBLIC (fn) = 1;
1763	  DECL_ARTIFICIAL (fn) = 1;
1764	  TREE_NOTHROW (fn) = 1;
1765	  make_decl_rtl (fn, NULL);
1766	  assemble_external (fn);
1767	}
1768
1769      /* We need to make an argument list for the function call.
1770
1771	 memcpy has three arguments, the first two are void * addresses and
1772	 the last is a size_t byte count for the copy.  */
1773      arg_list
1774	= build_tree_list (NULL_TREE,
1775			   make_tree (build_pointer_type (void_type_node), x));
1776      TREE_CHAIN (arg_list)
1777	= build_tree_list (NULL_TREE,
1778			   make_tree (build_pointer_type (void_type_node), y));
1779      TREE_CHAIN (TREE_CHAIN (arg_list))
1780	 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1781      TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1782
1783      /* Now we have to build up the CALL_EXPR itself.  */
1784      call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1785      call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1786			 call_expr, arg_list, NULL_TREE);
1787      TREE_SIDE_EFFECTS (call_expr) = 1;
1788
1789      retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1790#else
1791      emit_library_call (bcopy_libfunc, LCT_NORMAL,
1792			 VOIDmode, 3, y, Pmode, x, Pmode,
1793			 convert_to_mode (TYPE_MODE (integer_type_node), size,
1794					  TREE_UNSIGNED (integer_type_node)),
1795			 TYPE_MODE (integer_type_node));
1796#endif
1797
1798      /* If we are initializing a readonly value, show the above call
1799	 clobbered it.  Otherwise, a load from it may erroneously be hoisted
1800	 from a loop.  */
1801      if (RTX_UNCHANGING_P (x))
1802	emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1803    }
1804
1805  return retval;
1806}
1807
1808/* Copy all or part of a value X into registers starting at REGNO.
1809   The number of registers to be filled is NREGS.  */
1810
1811void
1812move_block_to_reg (regno, x, nregs, mode)
1813     int regno;
1814     rtx x;
1815     int nregs;
1816     enum machine_mode mode;
1817{
1818  int i;
1819#ifdef HAVE_load_multiple
1820  rtx pat;
1821  rtx last;
1822#endif
1823
1824  if (nregs == 0)
1825    return;
1826
1827  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1828    x = validize_mem (force_const_mem (mode, x));
1829
1830  /* See if the machine can do this with a load multiple insn.  */
1831#ifdef HAVE_load_multiple
1832  if (HAVE_load_multiple)
1833    {
1834      last = get_last_insn ();
1835      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1836			       GEN_INT (nregs));
1837      if (pat)
1838	{
1839	  emit_insn (pat);
1840	  return;
1841	}
1842      else
1843	delete_insns_since (last);
1844    }
1845#endif
1846
1847  for (i = 0; i < nregs; i++)
1848    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1849		    operand_subword_force (x, i, mode));
1850}
1851
1852/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1853   The number of registers to be filled is NREGS.  SIZE indicates the number
1854   of bytes in the object X.  */
1855
1856void
1857move_block_from_reg (regno, x, nregs, size)
1858     int regno;
1859     rtx x;
1860     int nregs;
1861     int size;
1862{
1863  int i;
1864#ifdef HAVE_store_multiple
1865  rtx pat;
1866  rtx last;
1867#endif
1868  enum machine_mode mode;
1869
1870  if (nregs == 0)
1871    return;
1872
1873  /* If SIZE is that of a mode no bigger than a word, just use that
1874     mode's store operation.  */
1875  if (size <= UNITS_PER_WORD
1876      && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1877      && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1878    {
1879      emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1880      return;
1881    }
1882
1883  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1884     to the left before storing to memory.  Note that the previous test
1885     doesn't handle all cases (e.g. SIZE == 3).  */
1886  if (size < UNITS_PER_WORD
1887      && BYTES_BIG_ENDIAN
1888      && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1889    {
1890      rtx tem = operand_subword (x, 0, 1, BLKmode);
1891      rtx shift;
1892
1893      if (tem == 0)
1894	abort ();
1895
1896      shift = expand_shift (LSHIFT_EXPR, word_mode,
1897			    gen_rtx_REG (word_mode, regno),
1898			    build_int_2 ((UNITS_PER_WORD - size)
1899					 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1900      emit_move_insn (tem, shift);
1901      return;
1902    }
1903
1904  /* See if the machine can do this with a store multiple insn.  */
1905#ifdef HAVE_store_multiple
1906  if (HAVE_store_multiple)
1907    {
1908      last = get_last_insn ();
1909      pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1910				GEN_INT (nregs));
1911      if (pat)
1912	{
1913	  emit_insn (pat);
1914	  return;
1915	}
1916      else
1917	delete_insns_since (last);
1918    }
1919#endif
1920
1921  for (i = 0; i < nregs; i++)
1922    {
1923      rtx tem = operand_subword (x, i, 1, BLKmode);
1924
1925      if (tem == 0)
1926	abort ();
1927
1928      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1929    }
1930}
1931
1932/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1933   registers represented by a PARALLEL.  SSIZE represents the total size of
1934   block SRC in bytes, or -1 if not known.  */
1935/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1936   the balance will be in what would be the low-order memory addresses, i.e.
1937   left justified for big endian, right justified for little endian.  This
1938   happens to be true for the targets currently using this support.  If this
1939   ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1940   would be needed.  */
1941
1942void
1943emit_group_load (dst, orig_src, ssize)
1944     rtx dst, orig_src;
1945     int ssize;
1946{
1947  rtx *tmps, src;
1948  int start, i;
1949
1950  if (GET_CODE (dst) != PARALLEL)
1951    abort ();
1952
1953  /* Check for a NULL entry, used to indicate that the parameter goes
1954     both on the stack and in registers.  */
1955  if (XEXP (XVECEXP (dst, 0, 0), 0))
1956    start = 0;
1957  else
1958    start = 1;
1959
1960  tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1961
1962  /* Process the pieces.  */
1963  for (i = start; i < XVECLEN (dst, 0); i++)
1964    {
1965      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1966      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1967      unsigned int bytelen = GET_MODE_SIZE (mode);
1968      int shift = 0;
1969
1970      /* Handle trailing fragments that run over the size of the struct.  */
1971      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1972	{
1973	  shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1974	  bytelen = ssize - bytepos;
1975	  if (bytelen <= 0)
1976	    abort ();
1977	}
1978
1979      /* If we won't be loading directly from memory, protect the real source
1980	 from strange tricks we might play; but make sure that the source can
1981	 be loaded directly into the destination.  */
1982      src = orig_src;
1983      if (GET_CODE (orig_src) != MEM
1984	  && (!CONSTANT_P (orig_src)
1985	      || (GET_MODE (orig_src) != mode
1986		  && GET_MODE (orig_src) != VOIDmode)))
1987	{
1988	  if (GET_MODE (orig_src) == VOIDmode)
1989	    src = gen_reg_rtx (mode);
1990	  else
1991	    src = gen_reg_rtx (GET_MODE (orig_src));
1992
1993	  emit_move_insn (src, orig_src);
1994	}
1995
1996      /* Optimize the access just a bit.  */
1997      if (GET_CODE (src) == MEM
1998	  && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
1999	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2000	  && bytelen == GET_MODE_SIZE (mode))
2001	{
2002	  tmps[i] = gen_reg_rtx (mode);
2003	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2004	}
2005      else if (GET_CODE (src) == CONCAT)
2006	{
2007	  if (bytepos == 0
2008	      && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2009	    tmps[i] = XEXP (src, 0);
2010	  else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2011		   && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2012	    tmps[i] = XEXP (src, 1);
2013	  else if (bytepos == 0)
2014	    {
2015	      rtx mem = assign_stack_temp (GET_MODE (src),
2016					   GET_MODE_SIZE (GET_MODE (src)), 0);
2017	      emit_move_insn (mem, src);
2018	      tmps[i] = adjust_address (mem, mode, 0);
2019	    }
2020	  else
2021	    abort ();
2022	}
2023      else if (CONSTANT_P (src)
2024	       || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2025	tmps[i] = src;
2026      else
2027	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2028				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2029				     mode, mode, ssize);
2030
2031      if (BYTES_BIG_ENDIAN && shift)
2032	expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2033		      tmps[i], 0, OPTAB_WIDEN);
2034    }
2035
2036  emit_queue ();
2037
2038  /* Copy the extracted pieces into the proper (probable) hard regs.  */
2039  for (i = start; i < XVECLEN (dst, 0); i++)
2040    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2041}
2042
2043/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2044   registers represented by a PARALLEL.  SSIZE represents the total size of
2045   block DST, or -1 if not known.  */
2046
2047void
2048emit_group_store (orig_dst, src, ssize)
2049     rtx orig_dst, src;
2050     int ssize;
2051{
2052  rtx *tmps, dst;
2053  int start, i;
2054
2055  if (GET_CODE (src) != PARALLEL)
2056    abort ();
2057
2058  /* Check for a NULL entry, used to indicate that the parameter goes
2059     both on the stack and in registers.  */
2060  if (XEXP (XVECEXP (src, 0, 0), 0))
2061    start = 0;
2062  else
2063    start = 1;
2064
2065  tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2066
2067  /* Copy the (probable) hard regs into pseudos.  */
2068  for (i = start; i < XVECLEN (src, 0); i++)
2069    {
2070      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2071      tmps[i] = gen_reg_rtx (GET_MODE (reg));
2072      emit_move_insn (tmps[i], reg);
2073    }
2074  emit_queue ();
2075
2076  /* If we won't be storing directly into memory, protect the real destination
2077     from strange tricks we might play.  */
2078  dst = orig_dst;
2079  if (GET_CODE (dst) == PARALLEL)
2080    {
2081      rtx temp;
2082
2083      /* We can get a PARALLEL dst if there is a conditional expression in
2084	 a return statement.  In that case, the dst and src are the same,
2085	 so no action is necessary.  */
2086      if (rtx_equal_p (dst, src))
2087	return;
2088
2089      /* It is unclear if we can ever reach here, but we may as well handle
2090	 it.  Allocate a temporary, and split this into a store/load to/from
2091	 the temporary.  */
2092
2093      temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2094      emit_group_store (temp, src, ssize);
2095      emit_group_load (dst, temp, ssize);
2096      return;
2097    }
2098  else if (GET_CODE (dst) != MEM)
2099    {
2100      dst = gen_reg_rtx (GET_MODE (orig_dst));
2101      /* Make life a bit easier for combine.  */
2102      emit_move_insn (dst, const0_rtx);
2103    }
2104
2105  /* Process the pieces.  */
2106  for (i = start; i < XVECLEN (src, 0); i++)
2107    {
2108      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2109      enum machine_mode mode = GET_MODE (tmps[i]);
2110      unsigned int bytelen = GET_MODE_SIZE (mode);
2111
2112      /* Handle trailing fragments that run over the size of the struct.  */
2113      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2114	{
2115	  if (BYTES_BIG_ENDIAN)
2116	    {
2117	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2118	      expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2119			    tmps[i], 0, OPTAB_WIDEN);
2120	    }
2121	  bytelen = ssize - bytepos;
2122	}
2123
2124      /* Optimize the access just a bit.  */
2125      if (GET_CODE (dst) == MEM
2126	  && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
2127	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2128	  && bytelen == GET_MODE_SIZE (mode))
2129	emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2130      else
2131	store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2132			 mode, tmps[i], ssize);
2133    }
2134
2135  emit_queue ();
2136
2137  /* Copy from the pseudo into the (probable) hard reg.  */
2138  if (GET_CODE (dst) == REG)
2139    emit_move_insn (orig_dst, dst);
2140}
2141
2142/* Generate code to copy a BLKmode object of TYPE out of a
2143   set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2144   is null, a stack temporary is created.  TGTBLK is returned.
2145
2146   The primary purpose of this routine is to handle functions
2147   that return BLKmode structures in registers.  Some machines
2148   (the PA for example) want to return all small structures
2149   in registers regardless of the structure's alignment.  */
2150
2151rtx
2152copy_blkmode_from_reg (tgtblk, srcreg, type)
2153     rtx tgtblk;
2154     rtx srcreg;
2155     tree type;
2156{
2157  unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2158  rtx src = NULL, dst = NULL;
2159  unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2160  unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2161
2162  if (tgtblk == 0)
2163    {
2164      tgtblk = assign_temp (build_qualified_type (type,
2165						  (TYPE_QUALS (type)
2166						   | TYPE_QUAL_CONST)),
2167			    0, 1, 1);
2168      preserve_temp_slots (tgtblk);
2169    }
2170
2171  /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2172     into a new pseudo which is a full word.
2173
2174     If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2175     the wrong part of the register gets copied so we fake a type conversion
2176     in place.  */
2177  if (GET_MODE (srcreg) != BLKmode
2178      && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2179    {
2180      if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2181	srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2182      else
2183	srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2184    }
2185
2186  /* Structures whose size is not a multiple of a word are aligned
2187     to the least significant byte (to the right).  On a BYTES_BIG_ENDIAN
2188     machine, this means we must skip the empty high order bytes when
2189     calculating the bit offset.  */
2190  if (BYTES_BIG_ENDIAN
2191      && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2192      && bytes % UNITS_PER_WORD)
2193    big_endian_correction
2194      = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2195
2196  /* Copy the structure BITSIZE bites at a time.
2197
2198     We could probably emit more efficient code for machines which do not use
2199     strict alignment, but it doesn't seem worth the effort at the current
2200     time.  */
2201  for (bitpos = 0, xbitpos = big_endian_correction;
2202       bitpos < bytes * BITS_PER_UNIT;
2203       bitpos += bitsize, xbitpos += bitsize)
2204    {
2205      /* We need a new source operand each time xbitpos is on a
2206	 word boundary and when xbitpos == big_endian_correction
2207	 (the first time through).  */
2208      if (xbitpos % BITS_PER_WORD == 0
2209	  || xbitpos == big_endian_correction)
2210	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2211				     GET_MODE (srcreg));
2212
2213      /* We need a new destination operand each time bitpos is on
2214	 a word boundary.  */
2215      if (bitpos % BITS_PER_WORD == 0)
2216	dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2217
2218      /* Use xbitpos for the source extraction (right justified) and
2219	 xbitpos for the destination store (left justified).  */
2220      store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2221		       extract_bit_field (src, bitsize,
2222					  xbitpos % BITS_PER_WORD, 1,
2223					  NULL_RTX, word_mode, word_mode,
2224					  BITS_PER_WORD),
2225		       BITS_PER_WORD);
2226    }
2227
2228  return tgtblk;
2229}
2230
2231/* Add a USE expression for REG to the (possibly empty) list pointed
2232   to by CALL_FUSAGE.  REG must denote a hard register.  */
2233
2234void
2235use_reg (call_fusage, reg)
2236     rtx *call_fusage, reg;
2237{
2238  if (GET_CODE (reg) != REG
2239      || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2240    abort ();
2241
2242  *call_fusage
2243    = gen_rtx_EXPR_LIST (VOIDmode,
2244			 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2245}
2246
2247/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2248   starting at REGNO.  All of these registers must be hard registers.  */
2249
2250void
2251use_regs (call_fusage, regno, nregs)
2252     rtx *call_fusage;
2253     int regno;
2254     int nregs;
2255{
2256  int i;
2257
2258  if (regno + nregs > FIRST_PSEUDO_REGISTER)
2259    abort ();
2260
2261  for (i = 0; i < nregs; i++)
2262    use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2263}
2264
2265/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2266   PARALLEL REGS.  This is for calls that pass values in multiple
2267   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2268
2269void
2270use_group_regs (call_fusage, regs)
2271     rtx *call_fusage;
2272     rtx regs;
2273{
2274  int i;
2275
2276  for (i = 0; i < XVECLEN (regs, 0); i++)
2277    {
2278      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2279
2280      /* A NULL entry means the parameter goes both on the stack and in
2281	 registers.  This can also be a MEM for targets that pass values
2282	 partially on the stack and partially in registers.  */
2283      if (reg != 0 && GET_CODE (reg) == REG)
2284	use_reg (call_fusage, reg);
2285    }
2286}
2287
2288
2289int
2290can_store_by_pieces (len, constfun, constfundata, align)
2291     unsigned HOST_WIDE_INT len;
2292     rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2293     PTR constfundata;
2294     unsigned int align;
2295{
2296  unsigned HOST_WIDE_INT max_size, l;
2297  HOST_WIDE_INT offset = 0;
2298  enum machine_mode mode, tmode;
2299  enum insn_code icode;
2300  int reverse;
2301  rtx cst;
2302
2303  if (! MOVE_BY_PIECES_P (len, align))
2304    return 0;
2305
2306  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2307      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2308    align = MOVE_MAX * BITS_PER_UNIT;
2309
2310  /* We would first store what we can in the largest integer mode, then go to
2311     successively smaller modes.  */
2312
2313  for (reverse = 0;
2314       reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2315       reverse++)
2316    {
2317      l = len;
2318      mode = VOIDmode;
2319      max_size = MOVE_MAX_PIECES + 1;
2320      while (max_size > 1)
2321	{
2322	  for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2323	       tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2324	    if (GET_MODE_SIZE (tmode) < max_size)
2325	      mode = tmode;
2326
2327	  if (mode == VOIDmode)
2328	    break;
2329
2330	  icode = mov_optab->handlers[(int) mode].insn_code;
2331	  if (icode != CODE_FOR_nothing
2332	      && align >= GET_MODE_ALIGNMENT (mode))
2333	    {
2334	      unsigned int size = GET_MODE_SIZE (mode);
2335
2336	      while (l >= size)
2337		{
2338		  if (reverse)
2339		    offset -= size;
2340
2341		  cst = (*constfun) (constfundata, offset, mode);
2342		  if (!LEGITIMATE_CONSTANT_P (cst))
2343		    return 0;
2344
2345		  if (!reverse)
2346		    offset += size;
2347
2348		  l -= size;
2349		}
2350	    }
2351
2352	  max_size = GET_MODE_SIZE (mode);
2353	}
2354
2355      /* The code above should have handled everything.  */
2356      if (l != 0)
2357	abort ();
2358    }
2359
2360  return 1;
2361}
2362
2363/* Generate several move instructions to store LEN bytes generated by
2364   CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2365   pointer which will be passed as argument in every CONSTFUN call.
2366   ALIGN is maximum alignment we can assume.  */
2367
2368void
2369store_by_pieces (to, len, constfun, constfundata, align)
2370     rtx to;
2371     unsigned HOST_WIDE_INT len;
2372     rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2373     PTR constfundata;
2374     unsigned int align;
2375{
2376  struct store_by_pieces data;
2377
2378  if (! MOVE_BY_PIECES_P (len, align))
2379    abort ();
2380  to = protect_from_queue (to, 1);
2381  data.constfun = constfun;
2382  data.constfundata = constfundata;
2383  data.len = len;
2384  data.to = to;
2385  store_by_pieces_1 (&data, align);
2386}
2387
2388/* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2389   rtx with BLKmode).  The caller must pass TO through protect_from_queue
2390   before calling. ALIGN is maximum alignment we can assume.  */
2391
2392static void
2393clear_by_pieces (to, len, align)
2394     rtx to;
2395     unsigned HOST_WIDE_INT len;
2396     unsigned int align;
2397{
2398  struct store_by_pieces data;
2399
2400  data.constfun = clear_by_pieces_1;
2401  data.constfundata = NULL;
2402  data.len = len;
2403  data.to = to;
2404  store_by_pieces_1 (&data, align);
2405}
2406
2407/* Callback routine for clear_by_pieces.
2408   Return const0_rtx unconditionally.  */
2409
2410static rtx
2411clear_by_pieces_1 (data, offset, mode)
2412     PTR data ATTRIBUTE_UNUSED;
2413     HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2414     enum machine_mode mode ATTRIBUTE_UNUSED;
2415{
2416  return const0_rtx;
2417}
2418
2419/* Subroutine of clear_by_pieces and store_by_pieces.
2420   Generate several move instructions to store LEN bytes of block TO.  (A MEM
2421   rtx with BLKmode).  The caller must pass TO through protect_from_queue
2422   before calling.  ALIGN is maximum alignment we can assume.  */
2423
2424static void
2425store_by_pieces_1 (data, align)
2426     struct store_by_pieces *data;
2427     unsigned int align;
2428{
2429  rtx to_addr = XEXP (data->to, 0);
2430  unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2431  enum machine_mode mode = VOIDmode, tmode;
2432  enum insn_code icode;
2433
2434  data->offset = 0;
2435  data->to_addr = to_addr;
2436  data->autinc_to
2437    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2438       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2439
2440  data->explicit_inc_to = 0;
2441  data->reverse
2442    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2443  if (data->reverse)
2444    data->offset = data->len;
2445
2446  /* If storing requires more than two move insns,
2447     copy addresses to registers (to make displacements shorter)
2448     and use post-increment if available.  */
2449  if (!data->autinc_to
2450      && move_by_pieces_ninsns (data->len, align) > 2)
2451    {
2452      /* Determine the main mode we'll be using.  */
2453      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2454	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2455	if (GET_MODE_SIZE (tmode) < max_size)
2456	  mode = tmode;
2457
2458      if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2459	{
2460	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2461	  data->autinc_to = 1;
2462	  data->explicit_inc_to = -1;
2463	}
2464
2465      if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2466	  && ! data->autinc_to)
2467	{
2468	  data->to_addr = copy_addr_to_reg (to_addr);
2469	  data->autinc_to = 1;
2470	  data->explicit_inc_to = 1;
2471	}
2472
2473      if ( !data->autinc_to && CONSTANT_P (to_addr))
2474	data->to_addr = copy_addr_to_reg (to_addr);
2475    }
2476
2477  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2478      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2479    align = MOVE_MAX * BITS_PER_UNIT;
2480
2481  /* First store what we can in the largest integer mode, then go to
2482     successively smaller modes.  */
2483
2484  while (max_size > 1)
2485    {
2486      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2487	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2488	if (GET_MODE_SIZE (tmode) < max_size)
2489	  mode = tmode;
2490
2491      if (mode == VOIDmode)
2492	break;
2493
2494      icode = mov_optab->handlers[(int) mode].insn_code;
2495      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2496	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2497
2498      max_size = GET_MODE_SIZE (mode);
2499    }
2500
2501  /* The code above should have handled everything.  */
2502  if (data->len != 0)
2503    abort ();
2504}
2505
2506/* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2507   with move instructions for mode MODE.  GENFUN is the gen_... function
2508   to make a move insn for that mode.  DATA has all the other info.  */
2509
2510static void
2511store_by_pieces_2 (genfun, mode, data)
2512     rtx (*genfun) PARAMS ((rtx, ...));
2513     enum machine_mode mode;
2514     struct store_by_pieces *data;
2515{
2516  unsigned int size = GET_MODE_SIZE (mode);
2517  rtx to1, cst;
2518
2519  while (data->len >= size)
2520    {
2521      if (data->reverse)
2522	data->offset -= size;
2523
2524      if (data->autinc_to)
2525	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2526					 data->offset);
2527      else
2528	to1 = adjust_address (data->to, mode, data->offset);
2529
2530      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2531	emit_insn (gen_add2_insn (data->to_addr,
2532				  GEN_INT (-(HOST_WIDE_INT) size)));
2533
2534      cst = (*data->constfun) (data->constfundata, data->offset, mode);
2535      emit_insn ((*genfun) (to1, cst));
2536
2537      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2538	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2539
2540      if (! data->reverse)
2541	data->offset += size;
2542
2543      data->len -= size;
2544    }
2545}
2546
2547/* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2548   its length in bytes.  */
2549
2550rtx
2551clear_storage (object, size)
2552     rtx object;
2553     rtx size;
2554{
2555#ifdef TARGET_MEM_FUNCTIONS
2556  static tree fn;
2557  tree call_expr, arg_list;
2558#endif
2559  rtx retval = 0;
2560  unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2561			: GET_MODE_ALIGNMENT (GET_MODE (object)));
2562
2563  /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2564     just move a zero.  Otherwise, do this a piece at a time.  */
2565  if (GET_MODE (object) != BLKmode
2566      && GET_CODE (size) == CONST_INT
2567      && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2568    emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2569  else
2570    {
2571      object = protect_from_queue (object, 1);
2572      size = protect_from_queue (size, 0);
2573
2574      if (GET_CODE (size) == CONST_INT
2575	  && MOVE_BY_PIECES_P (INTVAL (size), align))
2576	clear_by_pieces (object, INTVAL (size), align);
2577      else
2578	{
2579	  /* Try the most limited insn first, because there's no point
2580	     including more than one in the machine description unless
2581	     the more limited one has some advantage.  */
2582
2583	  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2584	  enum machine_mode mode;
2585
2586	  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2587	       mode = GET_MODE_WIDER_MODE (mode))
2588	    {
2589	      enum insn_code code = clrstr_optab[(int) mode];
2590	      insn_operand_predicate_fn pred;
2591
2592	      if (code != CODE_FOR_nothing
2593		  /* We don't need MODE to be narrower than
2594		     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2595		     the mode mask, as it is returned by the macro, it will
2596		     definitely be less than the actual mode mask.  */
2597		  && ((GET_CODE (size) == CONST_INT
2598		       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2599			   <= (GET_MODE_MASK (mode) >> 1)))
2600		      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2601		  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2602		      || (*pred) (object, BLKmode))
2603		  && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2604		      || (*pred) (opalign, VOIDmode)))
2605		{
2606		  rtx op1;
2607		  rtx last = get_last_insn ();
2608		  rtx pat;
2609
2610		  op1 = convert_to_mode (mode, size, 1);
2611		  pred = insn_data[(int) code].operand[1].predicate;
2612		  if (pred != 0 && ! (*pred) (op1, mode))
2613		    op1 = copy_to_mode_reg (mode, op1);
2614
2615		  pat = GEN_FCN ((int) code) (object, op1, opalign);
2616		  if (pat)
2617		    {
2618		      emit_insn (pat);
2619		      return 0;
2620		    }
2621		  else
2622		    delete_insns_since (last);
2623		}
2624	    }
2625
2626	  /* OBJECT or SIZE may have been passed through protect_from_queue.
2627
2628	     It is unsafe to save the value generated by protect_from_queue
2629	     and reuse it later.  Consider what happens if emit_queue is
2630	     called before the return value from protect_from_queue is used.
2631
2632	     Expansion of the CALL_EXPR below will call emit_queue before
2633	     we are finished emitting RTL for argument setup.  So if we are
2634	     not careful we could get the wrong value for an argument.
2635
2636	     To avoid this problem we go ahead and emit code to copy OBJECT
2637	     and SIZE into new pseudos.  We can then place those new pseudos
2638	     into an RTL_EXPR and use them later, even after a call to
2639	     emit_queue.
2640
2641	     Note this is not strictly needed for library calls since they
2642	     do not call emit_queue before loading their arguments.  However,
2643	     we may need to have library calls call emit_queue in the future
2644	     since failing to do so could cause problems for targets which
2645	     define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
2646	  object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2647
2648#ifdef TARGET_MEM_FUNCTIONS
2649	  size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2650#else
2651	  size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2652				  TREE_UNSIGNED (integer_type_node));
2653	  size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2654#endif
2655
2656#ifdef TARGET_MEM_FUNCTIONS
2657	  /* It is incorrect to use the libcall calling conventions to call
2658	     memset in this context.
2659
2660	     This could be a user call to memset and the user may wish to
2661	     examine the return value from memset.
2662
2663	     For targets where libcalls and normal calls have different
2664	     conventions for returning pointers, we could end up generating
2665	     incorrect code.
2666
2667	     So instead of using a libcall sequence we build up a suitable
2668	     CALL_EXPR and expand the call in the normal fashion.  */
2669	  if (fn == NULL_TREE)
2670	    {
2671	      tree fntype;
2672
2673	      /* This was copied from except.c, I don't know if all this is
2674		 necessary in this context or not.  */
2675	      fn = get_identifier ("memset");
2676	      fntype = build_pointer_type (void_type_node);
2677	      fntype = build_function_type (fntype, NULL_TREE);
2678	      fn = build_decl (FUNCTION_DECL, fn, fntype);
2679	      ggc_add_tree_root (&fn, 1);
2680	      DECL_EXTERNAL (fn) = 1;
2681	      TREE_PUBLIC (fn) = 1;
2682	      DECL_ARTIFICIAL (fn) = 1;
2683	      TREE_NOTHROW (fn) = 1;
2684	      make_decl_rtl (fn, NULL);
2685	      assemble_external (fn);
2686	    }
2687
2688	  /* We need to make an argument list for the function call.
2689
2690	     memset has three arguments, the first is a void * addresses, the
2691	     second an integer with the initialization value, the last is a
2692	     size_t byte count for the copy.  */
2693	  arg_list
2694	    = build_tree_list (NULL_TREE,
2695			       make_tree (build_pointer_type (void_type_node),
2696					  object));
2697	  TREE_CHAIN (arg_list)
2698	    = build_tree_list (NULL_TREE,
2699			       make_tree (integer_type_node, const0_rtx));
2700	  TREE_CHAIN (TREE_CHAIN (arg_list))
2701	    = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2702	  TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2703
2704	  /* Now we have to build up the CALL_EXPR itself.  */
2705	  call_expr = build1 (ADDR_EXPR,
2706			      build_pointer_type (TREE_TYPE (fn)), fn);
2707	  call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2708			     call_expr, arg_list, NULL_TREE);
2709	  TREE_SIDE_EFFECTS (call_expr) = 1;
2710
2711	  retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2712#else
2713	  emit_library_call (bzero_libfunc, LCT_NORMAL,
2714			     VOIDmode, 2, object, Pmode, size,
2715			     TYPE_MODE (integer_type_node));
2716#endif
2717
2718	  /* If we are initializing a readonly value, show the above call
2719	     clobbered it.  Otherwise, a load from it may erroneously be
2720	     hoisted from a loop.  */
2721	  if (RTX_UNCHANGING_P (object))
2722	    emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2723	}
2724    }
2725
2726  return retval;
2727}
2728
2729/* Generate code to copy Y into X.
2730   Both Y and X must have the same mode, except that
2731   Y can be a constant with VOIDmode.
2732   This mode cannot be BLKmode; use emit_block_move for that.
2733
2734   Return the last instruction emitted.  */
2735
2736rtx
2737emit_move_insn (x, y)
2738     rtx x, y;
2739{
2740  enum machine_mode mode = GET_MODE (x);
2741  rtx y_cst = NULL_RTX;
2742  rtx last_insn;
2743
2744  x = protect_from_queue (x, 1);
2745  y = protect_from_queue (y, 0);
2746
2747  if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2748    abort ();
2749
2750  /* Never force constant_p_rtx to memory.  */
2751  if (GET_CODE (y) == CONSTANT_P_RTX)
2752    ;
2753  else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2754    {
2755      y_cst = y;
2756      y = force_const_mem (mode, y);
2757    }
2758
2759  /* If X or Y are memory references, verify that their addresses are valid
2760     for the machine.  */
2761  if (GET_CODE (x) == MEM
2762      && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2763	   && ! push_operand (x, GET_MODE (x)))
2764	  || (flag_force_addr
2765	      && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2766    x = validize_mem (x);
2767
2768  if (GET_CODE (y) == MEM
2769      && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2770	  || (flag_force_addr
2771	      && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2772    y = validize_mem (y);
2773
2774  if (mode == BLKmode)
2775    abort ();
2776
2777  last_insn = emit_move_insn_1 (x, y);
2778
2779  if (y_cst && GET_CODE (x) == REG)
2780    set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2781
2782  return last_insn;
2783}
2784
2785/* Low level part of emit_move_insn.
2786   Called just like emit_move_insn, but assumes X and Y
2787   are basically valid.  */
2788
2789rtx
2790emit_move_insn_1 (x, y)
2791     rtx x, y;
2792{
2793  enum machine_mode mode = GET_MODE (x);
2794  enum machine_mode submode;
2795  enum mode_class class = GET_MODE_CLASS (mode);
2796
2797  if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2798    abort ();
2799
2800  if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2801    return
2802      emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2803
2804  /* Expand complex moves by moving real part and imag part, if possible.  */
2805  else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2806	   && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2807						    * BITS_PER_UNIT),
2808						   (class == MODE_COMPLEX_INT
2809						    ? MODE_INT : MODE_FLOAT),
2810						   0))
2811	   && (mov_optab->handlers[(int) submode].insn_code
2812	       != CODE_FOR_nothing))
2813    {
2814      /* Don't split destination if it is a stack push.  */
2815      int stack = push_operand (x, GET_MODE (x));
2816
2817#ifdef PUSH_ROUNDING
2818      /* In case we output to the stack, but the size is smaller machine can
2819	 push exactly, we need to use move instructions.  */
2820      if (stack
2821	  && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2822	      != GET_MODE_SIZE (submode)))
2823	{
2824	  rtx temp;
2825	  HOST_WIDE_INT offset1, offset2;
2826
2827	  /* Do not use anti_adjust_stack, since we don't want to update
2828	     stack_pointer_delta.  */
2829	  temp = expand_binop (Pmode,
2830#ifdef STACK_GROWS_DOWNWARD
2831			       sub_optab,
2832#else
2833			       add_optab,
2834#endif
2835			       stack_pointer_rtx,
2836			       GEN_INT
2837				 (PUSH_ROUNDING
2838				  (GET_MODE_SIZE (GET_MODE (x)))),
2839			       stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2840
2841	  if (temp != stack_pointer_rtx)
2842	    emit_move_insn (stack_pointer_rtx, temp);
2843
2844#ifdef STACK_GROWS_DOWNWARD
2845	  offset1 = 0;
2846	  offset2 = GET_MODE_SIZE (submode);
2847#else
2848	  offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2849	  offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2850		     + GET_MODE_SIZE (submode));
2851#endif
2852
2853	  emit_move_insn (change_address (x, submode,
2854					  gen_rtx_PLUS (Pmode,
2855						        stack_pointer_rtx,
2856							GEN_INT (offset1))),
2857			  gen_realpart (submode, y));
2858	  emit_move_insn (change_address (x, submode,
2859					  gen_rtx_PLUS (Pmode,
2860						        stack_pointer_rtx,
2861							GEN_INT (offset2))),
2862			  gen_imagpart (submode, y));
2863	}
2864      else
2865#endif
2866      /* If this is a stack, push the highpart first, so it
2867	 will be in the argument order.
2868
2869	 In that case, change_address is used only to convert
2870	 the mode, not to change the address.  */
2871      if (stack)
2872	{
2873	  /* Note that the real part always precedes the imag part in memory
2874	     regardless of machine's endianness.  */
2875#ifdef STACK_GROWS_DOWNWARD
2876	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2877		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2878		      gen_imagpart (submode, y)));
2879	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2880		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2881		      gen_realpart (submode, y)));
2882#else
2883	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2884		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2885		      gen_realpart (submode, y)));
2886	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2887		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2888		      gen_imagpart (submode, y)));
2889#endif
2890	}
2891      else
2892	{
2893	  rtx realpart_x, realpart_y;
2894	  rtx imagpart_x, imagpart_y;
2895
2896	  /* If this is a complex value with each part being smaller than a
2897	     word, the usual calling sequence will likely pack the pieces into
2898	     a single register.  Unfortunately, SUBREG of hard registers only
2899	     deals in terms of words, so we have a problem converting input
2900	     arguments to the CONCAT of two registers that is used elsewhere
2901	     for complex values.  If this is before reload, we can copy it into
2902	     memory and reload.  FIXME, we should see about using extract and
2903	     insert on integer registers, but complex short and complex char
2904	     variables should be rarely used.  */
2905	  if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2906	      && (reload_in_progress | reload_completed) == 0)
2907	    {
2908	      int packed_dest_p
2909		= (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2910	      int packed_src_p
2911		= (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2912
2913	      if (packed_dest_p || packed_src_p)
2914		{
2915		  enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2916					       ? MODE_FLOAT : MODE_INT);
2917
2918		  enum machine_mode reg_mode
2919		    = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2920
2921		  if (reg_mode != BLKmode)
2922		    {
2923		      rtx mem = assign_stack_temp (reg_mode,
2924						   GET_MODE_SIZE (mode), 0);
2925		      rtx cmem = adjust_address (mem, mode, 0);
2926
2927		      cfun->cannot_inline
2928			= N_("function using short complex types cannot be inline");
2929
2930		      if (packed_dest_p)
2931			{
2932			  rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2933
2934			  emit_move_insn_1 (cmem, y);
2935			  return emit_move_insn_1 (sreg, mem);
2936			}
2937		      else
2938			{
2939			  rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2940
2941			  emit_move_insn_1 (mem, sreg);
2942			  return emit_move_insn_1 (x, cmem);
2943			}
2944		    }
2945		}
2946	    }
2947
2948	  realpart_x = gen_realpart (submode, x);
2949	  realpart_y = gen_realpart (submode, y);
2950	  imagpart_x = gen_imagpart (submode, x);
2951	  imagpart_y = gen_imagpart (submode, y);
2952
2953	  /* Show the output dies here.  This is necessary for SUBREGs
2954	     of pseudos since we cannot track their lifetimes correctly;
2955	     hard regs shouldn't appear here except as return values.
2956	     We never want to emit such a clobber after reload.  */
2957	  if (x != y
2958	      && ! (reload_in_progress || reload_completed)
2959	      && (GET_CODE (realpart_x) == SUBREG
2960		  || GET_CODE (imagpart_x) == SUBREG))
2961	    emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2962
2963	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2964		     (realpart_x, realpart_y));
2965	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2966		     (imagpart_x, imagpart_y));
2967	}
2968
2969      return get_last_insn ();
2970    }
2971
2972  /* This will handle any multi-word mode that lacks a move_insn pattern.
2973     However, you will get better code if you define such patterns,
2974     even if they must turn into multiple assembler instructions.  */
2975  else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2976    {
2977      rtx last_insn = 0;
2978      rtx seq, inner;
2979      int need_clobber;
2980      int i;
2981
2982#ifdef PUSH_ROUNDING
2983
2984      /* If X is a push on the stack, do the push now and replace
2985	 X with a reference to the stack pointer.  */
2986      if (push_operand (x, GET_MODE (x)))
2987	{
2988	  rtx temp;
2989	  enum rtx_code code;
2990
2991	  /* Do not use anti_adjust_stack, since we don't want to update
2992	     stack_pointer_delta.  */
2993	  temp = expand_binop (Pmode,
2994#ifdef STACK_GROWS_DOWNWARD
2995			       sub_optab,
2996#else
2997			       add_optab,
2998#endif
2999			       stack_pointer_rtx,
3000			       GEN_INT
3001				 (PUSH_ROUNDING
3002				  (GET_MODE_SIZE (GET_MODE (x)))),
3003			       stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3004
3005          if (temp != stack_pointer_rtx)
3006            emit_move_insn (stack_pointer_rtx, temp);
3007
3008	  code = GET_CODE (XEXP (x, 0));
3009
3010	  /* Just hope that small offsets off SP are OK.  */
3011	  if (code == POST_INC)
3012	    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3013				GEN_INT (-((HOST_WIDE_INT)
3014					   GET_MODE_SIZE (GET_MODE (x)))));
3015	  else if (code == POST_DEC)
3016	    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3017				GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3018	  else
3019	    temp = stack_pointer_rtx;
3020
3021	  x = change_address (x, VOIDmode, temp);
3022	}
3023#endif
3024
3025      /* If we are in reload, see if either operand is a MEM whose address
3026	 is scheduled for replacement.  */
3027      if (reload_in_progress && GET_CODE (x) == MEM
3028	  && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3029	x = replace_equiv_address_nv (x, inner);
3030      if (reload_in_progress && GET_CODE (y) == MEM
3031	  && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3032	y = replace_equiv_address_nv (y, inner);
3033
3034      start_sequence ();
3035
3036      need_clobber = 0;
3037      for (i = 0;
3038	   i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3039	   i++)
3040	{
3041	  rtx xpart = operand_subword (x, i, 1, mode);
3042	  rtx ypart = operand_subword (y, i, 1, mode);
3043
3044	  /* If we can't get a part of Y, put Y into memory if it is a
3045	     constant.  Otherwise, force it into a register.  If we still
3046	     can't get a part of Y, abort.  */
3047	  if (ypart == 0 && CONSTANT_P (y))
3048	    {
3049	      y = force_const_mem (mode, y);
3050	      ypart = operand_subword (y, i, 1, mode);
3051	    }
3052	  else if (ypart == 0)
3053	    ypart = operand_subword_force (y, i, mode);
3054
3055	  if (xpart == 0 || ypart == 0)
3056	    abort ();
3057
3058	  need_clobber |= (GET_CODE (xpart) == SUBREG);
3059
3060	  last_insn = emit_move_insn (xpart, ypart);
3061	}
3062
3063      seq = gen_sequence ();
3064      end_sequence ();
3065
3066      /* Show the output dies here.  This is necessary for SUBREGs
3067	 of pseudos since we cannot track their lifetimes correctly;
3068	 hard regs shouldn't appear here except as return values.
3069	 We never want to emit such a clobber after reload.  */
3070      if (x != y
3071	  && ! (reload_in_progress || reload_completed)
3072	  && need_clobber != 0)
3073	emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3074
3075      emit_insn (seq);
3076
3077      return last_insn;
3078    }
3079  else
3080    abort ();
3081}
3082
3083/* Pushing data onto the stack.  */
3084
3085/* Push a block of length SIZE (perhaps variable)
3086   and return an rtx to address the beginning of the block.
3087   Note that it is not possible for the value returned to be a QUEUED.
3088   The value may be virtual_outgoing_args_rtx.
3089
3090   EXTRA is the number of bytes of padding to push in addition to SIZE.
3091   BELOW nonzero means this padding comes at low addresses;
3092   otherwise, the padding comes at high addresses.  */
3093
3094rtx
3095push_block (size, extra, below)
3096     rtx size;
3097     int extra, below;
3098{
3099  rtx temp;
3100
3101  size = convert_modes (Pmode, ptr_mode, size, 1);
3102  if (CONSTANT_P (size))
3103    anti_adjust_stack (plus_constant (size, extra));
3104  else if (GET_CODE (size) == REG && extra == 0)
3105    anti_adjust_stack (size);
3106  else
3107    {
3108      temp = copy_to_mode_reg (Pmode, size);
3109      if (extra != 0)
3110	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3111			     temp, 0, OPTAB_LIB_WIDEN);
3112      anti_adjust_stack (temp);
3113    }
3114
3115#ifndef STACK_GROWS_DOWNWARD
3116  if (0)
3117#else
3118  if (1)
3119#endif
3120    {
3121      temp = virtual_outgoing_args_rtx;
3122      if (extra != 0 && below)
3123	temp = plus_constant (temp, extra);
3124    }
3125  else
3126    {
3127      if (GET_CODE (size) == CONST_INT)
3128	temp = plus_constant (virtual_outgoing_args_rtx,
3129			      -INTVAL (size) - (below ? 0 : extra));
3130      else if (extra != 0 && !below)
3131	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3132			     negate_rtx (Pmode, plus_constant (size, extra)));
3133      else
3134	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3135			     negate_rtx (Pmode, size));
3136    }
3137
3138  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3139}
3140
3141#ifdef PUSH_ROUNDING
3142
3143/* Emit single push insn.  */
3144
3145static void
3146emit_single_push_insn (mode, x, type)
3147     rtx x;
3148     enum machine_mode mode;
3149     tree type;
3150{
3151  rtx dest_addr;
3152  unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3153  rtx dest;
3154  enum insn_code icode;
3155  insn_operand_predicate_fn pred;
3156
3157  stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3158  /* If there is push pattern, use it.  Otherwise try old way of throwing
3159     MEM representing push operation to move expander.  */
3160  icode = push_optab->handlers[(int) mode].insn_code;
3161  if (icode != CODE_FOR_nothing)
3162    {
3163      if (((pred = insn_data[(int) icode].operand[0].predicate)
3164	   && !((*pred) (x, mode))))
3165	x = force_reg (mode, x);
3166      emit_insn (GEN_FCN (icode) (x));
3167      return;
3168    }
3169  if (GET_MODE_SIZE (mode) == rounded_size)
3170    dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3171  else
3172    {
3173#ifdef STACK_GROWS_DOWNWARD
3174      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3175				GEN_INT (-(HOST_WIDE_INT) rounded_size));
3176#else
3177      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3178				GEN_INT (rounded_size));
3179#endif
3180      dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3181    }
3182
3183  dest = gen_rtx_MEM (mode, dest_addr);
3184
3185  if (type != 0)
3186    {
3187      set_mem_attributes (dest, type, 1);
3188
3189      if (flag_optimize_sibling_calls)
3190	/* Function incoming arguments may overlap with sibling call
3191	   outgoing arguments and we cannot allow reordering of reads
3192	   from function arguments with stores to outgoing arguments
3193	   of sibling calls.  */
3194	set_mem_alias_set (dest, 0);
3195    }
3196  emit_move_insn (dest, x);
3197}
3198#endif
3199
3200/* Generate code to push X onto the stack, assuming it has mode MODE and
3201   type TYPE.
3202   MODE is redundant except when X is a CONST_INT (since they don't
3203   carry mode info).
3204   SIZE is an rtx for the size of data to be copied (in bytes),
3205   needed only if X is BLKmode.
3206
3207   ALIGN (in bits) is maximum alignment we can assume.
3208
3209   If PARTIAL and REG are both nonzero, then copy that many of the first
3210   words of X into registers starting with REG, and push the rest of X.
3211   The amount of space pushed is decreased by PARTIAL words,
3212   rounded *down* to a multiple of PARM_BOUNDARY.
3213   REG must be a hard register in this case.
3214   If REG is zero but PARTIAL is not, take any all others actions for an
3215   argument partially in registers, but do not actually load any
3216   registers.
3217
3218   EXTRA is the amount in bytes of extra space to leave next to this arg.
3219   This is ignored if an argument block has already been allocated.
3220
3221   On a machine that lacks real push insns, ARGS_ADDR is the address of
3222   the bottom of the argument block for this call.  We use indexing off there
3223   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
3224   argument block has not been preallocated.
3225
3226   ARGS_SO_FAR is the size of args previously pushed for this call.
3227
3228   REG_PARM_STACK_SPACE is nonzero if functions require stack space
3229   for arguments passed in registers.  If nonzero, it will be the number
3230   of bytes required.  */
3231
3232void
3233emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3234		args_addr, args_so_far, reg_parm_stack_space,
3235                alignment_pad)
3236     rtx x;
3237     enum machine_mode mode;
3238     tree type;
3239     rtx size;
3240     unsigned int align;
3241     int partial;
3242     rtx reg;
3243     int extra;
3244     rtx args_addr;
3245     rtx args_so_far;
3246     int reg_parm_stack_space;
3247     rtx alignment_pad;
3248{
3249  rtx xinner;
3250  enum direction stack_direction
3251#ifdef STACK_GROWS_DOWNWARD
3252    = downward;
3253#else
3254    = upward;
3255#endif
3256
3257  /* Decide where to pad the argument: `downward' for below,
3258     `upward' for above, or `none' for don't pad it.
3259     Default is below for small data on big-endian machines; else above.  */
3260  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3261
3262  /* Invert direction if stack is post-decrement.
3263     FIXME: why?  */
3264  if (STACK_PUSH_CODE == POST_DEC)
3265    if (where_pad != none)
3266      where_pad = (where_pad == downward ? upward : downward);
3267
3268  xinner = x = protect_from_queue (x, 0);
3269
3270  if (mode == BLKmode)
3271    {
3272      /* Copy a block into the stack, entirely or partially.  */
3273
3274      rtx temp;
3275      int used = partial * UNITS_PER_WORD;
3276      int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3277      int skip;
3278
3279      if (size == 0)
3280	abort ();
3281
3282      used -= offset;
3283
3284      /* USED is now the # of bytes we need not copy to the stack
3285	 because registers will take care of them.  */
3286
3287      if (partial != 0)
3288	xinner = adjust_address (xinner, BLKmode, used);
3289
3290      /* If the partial register-part of the arg counts in its stack size,
3291	 skip the part of stack space corresponding to the registers.
3292	 Otherwise, start copying to the beginning of the stack space,
3293	 by setting SKIP to 0.  */
3294      skip = (reg_parm_stack_space == 0) ? 0 : used;
3295
3296#ifdef PUSH_ROUNDING
3297      /* Do it with several push insns if that doesn't take lots of insns
3298	 and if there is no difficulty with push insns that skip bytes
3299	 on the stack for alignment purposes.  */
3300      if (args_addr == 0
3301	  && PUSH_ARGS
3302	  && GET_CODE (size) == CONST_INT
3303	  && skip == 0
3304	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3305	  /* Here we avoid the case of a structure whose weak alignment
3306	     forces many pushes of a small amount of data,
3307	     and such small pushes do rounding that causes trouble.  */
3308	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3309	      || align >= BIGGEST_ALIGNMENT
3310	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3311		  == (align / BITS_PER_UNIT)))
3312	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3313	{
3314	  /* Push padding now if padding above and stack grows down,
3315	     or if padding below and stack grows up.
3316	     But if space already allocated, this has already been done.  */
3317	  if (extra && args_addr == 0
3318	      && where_pad != none && where_pad != stack_direction)
3319	    anti_adjust_stack (GEN_INT (extra));
3320
3321	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3322	}
3323      else
3324#endif /* PUSH_ROUNDING  */
3325	{
3326	  rtx target;
3327
3328	  /* Otherwise make space on the stack and copy the data
3329	     to the address of that space.  */
3330
3331	  /* Deduct words put into registers from the size we must copy.  */
3332	  if (partial != 0)
3333	    {
3334	      if (GET_CODE (size) == CONST_INT)
3335		size = GEN_INT (INTVAL (size) - used);
3336	      else
3337		size = expand_binop (GET_MODE (size), sub_optab, size,
3338				     GEN_INT (used), NULL_RTX, 0,
3339				     OPTAB_LIB_WIDEN);
3340	    }
3341
3342	  /* Get the address of the stack space.
3343	     In this case, we do not deal with EXTRA separately.
3344	     A single stack adjust will do.  */
3345	  if (! args_addr)
3346	    {
3347	      temp = push_block (size, extra, where_pad == downward);
3348	      extra = 0;
3349	    }
3350	  else if (GET_CODE (args_so_far) == CONST_INT)
3351	    temp = memory_address (BLKmode,
3352				   plus_constant (args_addr,
3353						  skip + INTVAL (args_so_far)));
3354	  else
3355	    temp = memory_address (BLKmode,
3356				   plus_constant (gen_rtx_PLUS (Pmode,
3357								args_addr,
3358								args_so_far),
3359						  skip));
3360	  target = gen_rtx_MEM (BLKmode, temp);
3361
3362	  if (type != 0)
3363	    {
3364	      set_mem_attributes (target, type, 1);
3365	      /* Function incoming arguments may overlap with sibling call
3366		 outgoing arguments and we cannot allow reordering of reads
3367		 from function arguments with stores to outgoing arguments
3368		 of sibling calls.  */
3369	      set_mem_alias_set (target, 0);
3370	    }
3371	  else
3372	    set_mem_align (target, align);
3373
3374	  /* TEMP is the address of the block.  Copy the data there.  */
3375	  if (GET_CODE (size) == CONST_INT
3376	      && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3377	    {
3378	      move_by_pieces (target, xinner, INTVAL (size), align);
3379	      goto ret;
3380	    }
3381	  else
3382	    {
3383	      rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3384	      enum machine_mode mode;
3385
3386	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3387		   mode != VOIDmode;
3388		   mode = GET_MODE_WIDER_MODE (mode))
3389		{
3390		  enum insn_code code = movstr_optab[(int) mode];
3391		  insn_operand_predicate_fn pred;
3392
3393		  if (code != CODE_FOR_nothing
3394		      && ((GET_CODE (size) == CONST_INT
3395			   && ((unsigned HOST_WIDE_INT) INTVAL (size)
3396			       <= (GET_MODE_MASK (mode) >> 1)))
3397			  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3398		      && (!(pred = insn_data[(int) code].operand[0].predicate)
3399			  || ((*pred) (target, BLKmode)))
3400		      && (!(pred = insn_data[(int) code].operand[1].predicate)
3401			  || ((*pred) (xinner, BLKmode)))
3402		      && (!(pred = insn_data[(int) code].operand[3].predicate)
3403			  || ((*pred) (opalign, VOIDmode))))
3404		    {
3405		      rtx op2 = convert_to_mode (mode, size, 1);
3406		      rtx last = get_last_insn ();
3407		      rtx pat;
3408
3409		      pred = insn_data[(int) code].operand[2].predicate;
3410		      if (pred != 0 && ! (*pred) (op2, mode))
3411			op2 = copy_to_mode_reg (mode, op2);
3412
3413		      pat = GEN_FCN ((int) code) (target, xinner,
3414						  op2, opalign);
3415		      if (pat)
3416			{
3417			  emit_insn (pat);
3418			  goto ret;
3419			}
3420		      else
3421			delete_insns_since (last);
3422		    }
3423		}
3424	    }
3425
3426	  if (!ACCUMULATE_OUTGOING_ARGS)
3427	    {
3428	      /* If the source is referenced relative to the stack pointer,
3429		 copy it to another register to stabilize it.  We do not need
3430		 to do this if we know that we won't be changing sp.  */
3431
3432	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3433		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3434		temp = copy_to_reg (temp);
3435	    }
3436
3437	  /* Make inhibit_defer_pop nonzero around the library call
3438	     to force it to pop the bcopy-arguments right away.  */
3439	  NO_DEFER_POP;
3440#ifdef TARGET_MEM_FUNCTIONS
3441	  emit_library_call (memcpy_libfunc, LCT_NORMAL,
3442			     VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3443			     convert_to_mode (TYPE_MODE (sizetype),
3444					      size, TREE_UNSIGNED (sizetype)),
3445			     TYPE_MODE (sizetype));
3446#else
3447	  emit_library_call (bcopy_libfunc, LCT_NORMAL,
3448			     VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3449			     convert_to_mode (TYPE_MODE (integer_type_node),
3450					      size,
3451					      TREE_UNSIGNED (integer_type_node)),
3452			     TYPE_MODE (integer_type_node));
3453#endif
3454	  OK_DEFER_POP;
3455	}
3456    }
3457  else if (partial > 0)
3458    {
3459      /* Scalar partly in registers.  */
3460
3461      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3462      int i;
3463      int not_stack;
3464      /* # words of start of argument
3465	 that we must make space for but need not store.  */
3466      int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3467      int args_offset = INTVAL (args_so_far);
3468      int skip;
3469
3470      /* Push padding now if padding above and stack grows down,
3471	 or if padding below and stack grows up.
3472	 But if space already allocated, this has already been done.  */
3473      if (extra && args_addr == 0
3474	  && where_pad != none && where_pad != stack_direction)
3475	anti_adjust_stack (GEN_INT (extra));
3476
3477      /* If we make space by pushing it, we might as well push
3478	 the real data.  Otherwise, we can leave OFFSET nonzero
3479	 and leave the space uninitialized.  */
3480      if (args_addr == 0)
3481	offset = 0;
3482
3483      /* Now NOT_STACK gets the number of words that we don't need to
3484	 allocate on the stack.  */
3485      not_stack = partial - offset;
3486
3487      /* If the partial register-part of the arg counts in its stack size,
3488	 skip the part of stack space corresponding to the registers.
3489	 Otherwise, start copying to the beginning of the stack space,
3490	 by setting SKIP to 0.  */
3491      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3492
3493      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3494	x = validize_mem (force_const_mem (mode, x));
3495
3496      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3497	 SUBREGs of such registers are not allowed.  */
3498      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3499	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3500	x = copy_to_reg (x);
3501
3502      /* Loop over all the words allocated on the stack for this arg.  */
3503      /* We can do it by words, because any scalar bigger than a word
3504	 has a size a multiple of a word.  */
3505#ifndef PUSH_ARGS_REVERSED
3506      for (i = not_stack; i < size; i++)
3507#else
3508      for (i = size - 1; i >= not_stack; i--)
3509#endif
3510	if (i >= not_stack + offset)
3511	  emit_push_insn (operand_subword_force (x, i, mode),
3512			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3513			  0, args_addr,
3514			  GEN_INT (args_offset + ((i - not_stack + skip)
3515						  * UNITS_PER_WORD)),
3516			  reg_parm_stack_space, alignment_pad);
3517    }
3518  else
3519    {
3520      rtx addr;
3521      rtx target = NULL_RTX;
3522      rtx dest;
3523
3524      /* Push padding now if padding above and stack grows down,
3525	 or if padding below and stack grows up.
3526	 But if space already allocated, this has already been done.  */
3527      if (extra && args_addr == 0
3528	  && where_pad != none && where_pad != stack_direction)
3529	anti_adjust_stack (GEN_INT (extra));
3530
3531#ifdef PUSH_ROUNDING
3532      if (args_addr == 0 && PUSH_ARGS)
3533	emit_single_push_insn (mode, x, type);
3534      else
3535#endif
3536	{
3537	  if (GET_CODE (args_so_far) == CONST_INT)
3538	    addr
3539	      = memory_address (mode,
3540				plus_constant (args_addr,
3541					       INTVAL (args_so_far)));
3542	  else
3543	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3544						       args_so_far));
3545	  target = addr;
3546	  dest = gen_rtx_MEM (mode, addr);
3547	  if (type != 0)
3548	    {
3549	      set_mem_attributes (dest, type, 1);
3550	      /* Function incoming arguments may overlap with sibling call
3551		 outgoing arguments and we cannot allow reordering of reads
3552		 from function arguments with stores to outgoing arguments
3553		 of sibling calls.  */
3554	      set_mem_alias_set (dest, 0);
3555	    }
3556
3557	  emit_move_insn (dest, x);
3558	}
3559
3560    }
3561
3562 ret:
3563  /* If part should go in registers, copy that part
3564     into the appropriate registers.  Do this now, at the end,
3565     since mem-to-mem copies above may do function calls.  */
3566  if (partial > 0 && reg != 0)
3567    {
3568      /* Handle calls that pass values in multiple non-contiguous locations.
3569	 The Irix 6 ABI has examples of this.  */
3570      if (GET_CODE (reg) == PARALLEL)
3571	emit_group_load (reg, x, -1);  /* ??? size? */
3572      else
3573	move_block_to_reg (REGNO (reg), x, partial, mode);
3574    }
3575
3576  if (extra && args_addr == 0 && where_pad == stack_direction)
3577    anti_adjust_stack (GEN_INT (extra));
3578
3579  if (alignment_pad && args_addr == 0)
3580    anti_adjust_stack (alignment_pad);
3581}
3582
3583/* Return X if X can be used as a subtarget in a sequence of arithmetic
3584   operations.  */
3585
3586static rtx
3587get_subtarget (x)
3588     rtx x;
3589{
3590  return ((x == 0
3591	   /* Only registers can be subtargets.  */
3592	   || GET_CODE (x) != REG
3593	   /* If the register is readonly, it can't be set more than once.  */
3594	   || RTX_UNCHANGING_P (x)
3595	   /* Don't use hard regs to avoid extending their life.  */
3596	   || REGNO (x) < FIRST_PSEUDO_REGISTER
3597	   /* Avoid subtargets inside loops,
3598	      since they hide some invariant expressions.  */
3599	   || preserve_subexpressions_p ())
3600	  ? 0 : x);
3601}
3602
3603/* Expand an assignment that stores the value of FROM into TO.
3604   If WANT_VALUE is nonzero, return an rtx for the value of TO.
3605   (This may contain a QUEUED rtx;
3606   if the value is constant, this rtx is a constant.)
3607   Otherwise, the returned value is NULL_RTX.
3608
3609   SUGGEST_REG is no longer actually used.
3610   It used to mean, copy the value through a register
3611   and return that register, if that is possible.
3612   We now use WANT_VALUE to decide whether to do this.  */
3613
3614rtx
3615expand_assignment (to, from, want_value, suggest_reg)
3616     tree to, from;
3617     int want_value;
3618     int suggest_reg ATTRIBUTE_UNUSED;
3619{
3620  rtx to_rtx = 0;
3621  rtx result;
3622
3623  /* Don't crash if the lhs of the assignment was erroneous.  */
3624
3625  if (TREE_CODE (to) == ERROR_MARK)
3626    {
3627      result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3628      return want_value ? result : NULL_RTX;
3629    }
3630
3631  /* Assignment of a structure component needs special treatment
3632     if the structure component's rtx is not simply a MEM.
3633     Assignment of an array element at a constant index, and assignment of
3634     an array element in an unaligned packed structure field, has the same
3635     problem.  */
3636
3637  if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3638      || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3639    {
3640      enum machine_mode mode1;
3641      HOST_WIDE_INT bitsize, bitpos;
3642      rtx orig_to_rtx;
3643      tree offset;
3644      int unsignedp;
3645      int volatilep = 0;
3646      tree tem;
3647
3648      push_temp_slots ();
3649      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3650				 &unsignedp, &volatilep);
3651
3652      /* If we are going to use store_bit_field and extract_bit_field,
3653	 make sure to_rtx will be safe for multiple use.  */
3654
3655      if (mode1 == VOIDmode && want_value)
3656	tem = stabilize_reference (tem);
3657
3658      orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3659
3660      if (offset != 0)
3661	{
3662	  rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3663
3664	  if (GET_CODE (to_rtx) != MEM)
3665	    abort ();
3666
3667	  if (GET_MODE (offset_rtx) != ptr_mode)
3668	    offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3669
3670#ifdef POINTERS_EXTEND_UNSIGNED
3671	  if (GET_MODE (offset_rtx) != Pmode)
3672	    offset_rtx = convert_memory_address (Pmode, offset_rtx);
3673#endif
3674
3675	  /* A constant address in TO_RTX can have VOIDmode, we must not try
3676	     to call force_reg for that case.  Avoid that case.  */
3677	  if (GET_CODE (to_rtx) == MEM
3678	      && GET_MODE (to_rtx) == BLKmode
3679	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3680	      && bitsize > 0
3681	      && (bitpos % bitsize) == 0
3682	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3683	      && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3684	    {
3685	      rtx temp
3686		= adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3687
3688	      if (GET_CODE (XEXP (temp, 0)) == REG)
3689	        to_rtx = temp;
3690	      else
3691		to_rtx = (replace_equiv_address
3692			  (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3693					      XEXP (temp, 0))));
3694	      bitpos = 0;
3695	    }
3696
3697	  to_rtx = offset_address (to_rtx, offset_rtx,
3698				   highest_pow2_factor (offset));
3699	}
3700
3701      if (GET_CODE (to_rtx) == MEM)
3702	{
3703	  tree old_expr = MEM_EXPR (to_rtx);
3704
3705	  /* If the field is at offset zero, we could have been given the
3706	     DECL_RTX of the parent struct.  Don't munge it.  */
3707	  to_rtx = shallow_copy_rtx (to_rtx);
3708
3709	  set_mem_attributes (to_rtx, to, 0);
3710
3711	  /* If we changed MEM_EXPR, that means we're now referencing
3712	     the COMPONENT_REF, which means that MEM_OFFSET must be
3713	     relative to that field.  But we've not yet reflected BITPOS
3714	     in TO_RTX.  This will be done in store_field.  Adjust for
3715	     that by biasing MEM_OFFSET by -bitpos.  */
3716	  if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3717	      && (bitpos / BITS_PER_UNIT) != 0)
3718	    set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3719					     - (bitpos / BITS_PER_UNIT)));
3720	}
3721
3722      /* Deal with volatile and readonly fields.  The former is only done
3723	 for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
3724      if (volatilep && GET_CODE (to_rtx) == MEM)
3725	{
3726	  if (to_rtx == orig_to_rtx)
3727	    to_rtx = copy_rtx (to_rtx);
3728	  MEM_VOLATILE_P (to_rtx) = 1;
3729	}
3730
3731      if (TREE_CODE (to) == COMPONENT_REF
3732	  && TREE_READONLY (TREE_OPERAND (to, 1)))
3733	{
3734	  if (to_rtx == orig_to_rtx)
3735	    to_rtx = copy_rtx (to_rtx);
3736	  RTX_UNCHANGING_P (to_rtx) = 1;
3737	}
3738
3739      if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3740	{
3741	  if (to_rtx == orig_to_rtx)
3742	    to_rtx = copy_rtx (to_rtx);
3743	  MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3744	}
3745
3746      result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3747			    (want_value
3748			     /* Spurious cast for HPUX compiler.  */
3749			     ? ((enum machine_mode)
3750				TYPE_MODE (TREE_TYPE (to)))
3751			     : VOIDmode),
3752			    unsignedp, TREE_TYPE (tem), get_alias_set (to));
3753
3754      preserve_temp_slots (result);
3755      free_temp_slots ();
3756      pop_temp_slots ();
3757
3758      /* If the value is meaningful, convert RESULT to the proper mode.
3759	 Otherwise, return nothing.  */
3760      return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3761					  TYPE_MODE (TREE_TYPE (from)),
3762					  result,
3763					  TREE_UNSIGNED (TREE_TYPE (to)))
3764	      : NULL_RTX);
3765    }
3766
3767  /* If the rhs is a function call and its value is not an aggregate,
3768     call the function before we start to compute the lhs.
3769     This is needed for correct code for cases such as
3770     val = setjmp (buf) on machines where reference to val
3771     requires loading up part of an address in a separate insn.
3772
3773     Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3774     since it might be a promoted variable where the zero- or sign- extension
3775     needs to be done.  Handling this in the normal way is safe because no
3776     computation is done before the call.  */
3777  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3778      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3779      && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3780	    && GET_CODE (DECL_RTL (to)) == REG))
3781    {
3782      rtx value;
3783
3784      push_temp_slots ();
3785      value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3786      if (to_rtx == 0)
3787	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3788
3789      /* Handle calls that return values in multiple non-contiguous locations.
3790	 The Irix 6 ABI has examples of this.  */
3791      if (GET_CODE (to_rtx) == PARALLEL)
3792	emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3793      else if (GET_MODE (to_rtx) == BLKmode)
3794	emit_block_move (to_rtx, value, expr_size (from));
3795      else
3796	{
3797#ifdef POINTERS_EXTEND_UNSIGNED
3798	  if (POINTER_TYPE_P (TREE_TYPE (to))
3799	      && GET_MODE (to_rtx) != GET_MODE (value))
3800	    value = convert_memory_address (GET_MODE (to_rtx), value);
3801#endif
3802	  emit_move_insn (to_rtx, value);
3803	}
3804      preserve_temp_slots (to_rtx);
3805      free_temp_slots ();
3806      pop_temp_slots ();
3807      return want_value ? to_rtx : NULL_RTX;
3808    }
3809
3810  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
3811     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
3812
3813  if (to_rtx == 0)
3814    to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3815
3816  /* Don't move directly into a return register.  */
3817  if (TREE_CODE (to) == RESULT_DECL
3818      && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3819    {
3820      rtx temp;
3821
3822      push_temp_slots ();
3823      temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3824
3825      if (GET_CODE (to_rtx) == PARALLEL)
3826	emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3827      else
3828	emit_move_insn (to_rtx, temp);
3829
3830      preserve_temp_slots (to_rtx);
3831      free_temp_slots ();
3832      pop_temp_slots ();
3833      return want_value ? to_rtx : NULL_RTX;
3834    }
3835
3836  /* In case we are returning the contents of an object which overlaps
3837     the place the value is being stored, use a safe function when copying
3838     a value through a pointer into a structure value return block.  */
3839  if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3840      && current_function_returns_struct
3841      && !current_function_returns_pcc_struct)
3842    {
3843      rtx from_rtx, size;
3844
3845      push_temp_slots ();
3846      size = expr_size (from);
3847      from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3848
3849#ifdef TARGET_MEM_FUNCTIONS
3850      emit_library_call (memmove_libfunc, LCT_NORMAL,
3851			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3852			 XEXP (from_rtx, 0), Pmode,
3853			 convert_to_mode (TYPE_MODE (sizetype),
3854					  size, TREE_UNSIGNED (sizetype)),
3855			 TYPE_MODE (sizetype));
3856#else
3857      emit_library_call (bcopy_libfunc, LCT_NORMAL,
3858			 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3859			 XEXP (to_rtx, 0), Pmode,
3860			 convert_to_mode (TYPE_MODE (integer_type_node),
3861					  size, TREE_UNSIGNED (integer_type_node)),
3862			 TYPE_MODE (integer_type_node));
3863#endif
3864
3865      preserve_temp_slots (to_rtx);
3866      free_temp_slots ();
3867      pop_temp_slots ();
3868      return want_value ? to_rtx : NULL_RTX;
3869    }
3870
3871  /* Compute FROM and store the value in the rtx we got.  */
3872
3873  push_temp_slots ();
3874  result = store_expr (from, to_rtx, want_value);
3875  preserve_temp_slots (result);
3876  free_temp_slots ();
3877  pop_temp_slots ();
3878  return want_value ? result : NULL_RTX;
3879}
3880
3881/* Generate code for computing expression EXP,
3882   and storing the value into TARGET.
3883   TARGET may contain a QUEUED rtx.
3884
3885   If WANT_VALUE is nonzero, return a copy of the value
3886   not in TARGET, so that we can be sure to use the proper
3887   value in a containing expression even if TARGET has something
3888   else stored in it.  If possible, we copy the value through a pseudo
3889   and return that pseudo.  Or, if the value is constant, we try to
3890   return the constant.  In some cases, we return a pseudo
3891   copied *from* TARGET.
3892
3893   If the mode is BLKmode then we may return TARGET itself.
3894   It turns out that in BLKmode it doesn't cause a problem.
3895   because C has no operators that could combine two different
3896   assignments into the same BLKmode object with different values
3897   with no sequence point.  Will other languages need this to
3898   be more thorough?
3899
3900   If WANT_VALUE is 0, we return NULL, to make sure
3901   to catch quickly any cases where the caller uses the value
3902   and fails to set WANT_VALUE.  */
3903
3904rtx
3905store_expr (exp, target, want_value)
3906     tree exp;
3907     rtx target;
3908     int want_value;
3909{
3910  rtx temp;
3911  int dont_return_target = 0;
3912  int dont_store_target = 0;
3913
3914  if (TREE_CODE (exp) == COMPOUND_EXPR)
3915    {
3916      /* Perform first part of compound expression, then assign from second
3917	 part.  */
3918      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3919      emit_queue ();
3920      return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3921    }
3922  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3923    {
3924      /* For conditional expression, get safe form of the target.  Then
3925	 test the condition, doing the appropriate assignment on either
3926	 side.  This avoids the creation of unnecessary temporaries.
3927	 For non-BLKmode, it is more efficient not to do this.  */
3928
3929      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3930
3931      emit_queue ();
3932      target = protect_from_queue (target, 1);
3933
3934      do_pending_stack_adjust ();
3935      NO_DEFER_POP;
3936      jumpifnot (TREE_OPERAND (exp, 0), lab1);
3937      start_cleanup_deferral ();
3938      store_expr (TREE_OPERAND (exp, 1), target, 0);
3939      end_cleanup_deferral ();
3940      emit_queue ();
3941      emit_jump_insn (gen_jump (lab2));
3942      emit_barrier ();
3943      emit_label (lab1);
3944      start_cleanup_deferral ();
3945      store_expr (TREE_OPERAND (exp, 2), target, 0);
3946      end_cleanup_deferral ();
3947      emit_queue ();
3948      emit_label (lab2);
3949      OK_DEFER_POP;
3950
3951      return want_value ? target : NULL_RTX;
3952    }
3953  else if (queued_subexp_p (target))
3954    /* If target contains a postincrement, let's not risk
3955       using it as the place to generate the rhs.  */
3956    {
3957      if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3958	{
3959	  /* Expand EXP into a new pseudo.  */
3960	  temp = gen_reg_rtx (GET_MODE (target));
3961	  temp = expand_expr (exp, temp, GET_MODE (target), 0);
3962	}
3963      else
3964	temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3965
3966      /* If target is volatile, ANSI requires accessing the value
3967	 *from* the target, if it is accessed.  So make that happen.
3968	 In no case return the target itself.  */
3969      if (! MEM_VOLATILE_P (target) && want_value)
3970	dont_return_target = 1;
3971    }
3972  else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3973	   && GET_MODE (target) != BLKmode)
3974    /* If target is in memory and caller wants value in a register instead,
3975       arrange that.  Pass TARGET as target for expand_expr so that,
3976       if EXP is another assignment, WANT_VALUE will be nonzero for it.
3977       We know expand_expr will not use the target in that case.
3978       Don't do this if TARGET is volatile because we are supposed
3979       to write it and then read it.  */
3980    {
3981      temp = expand_expr (exp, target, GET_MODE (target), 0);
3982      if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3983	{
3984	  /* If TEMP is already in the desired TARGET, only copy it from
3985	     memory and don't store it there again.  */
3986	  if (temp == target
3987	      || (rtx_equal_p (temp, target)
3988		  && ! side_effects_p (temp) && ! side_effects_p (target)))
3989	    dont_store_target = 1;
3990	  temp = copy_to_reg (temp);
3991	}
3992      dont_return_target = 1;
3993    }
3994  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3995    /* If this is an scalar in a register that is stored in a wider mode
3996       than the declared mode, compute the result into its declared mode
3997       and then convert to the wider mode.  Our value is the computed
3998       expression.  */
3999    {
4000      /* If we don't want a value, we can do the conversion inside EXP,
4001	 which will often result in some optimizations.  Do the conversion
4002	 in two steps: first change the signedness, if needed, then
4003	 the extend.  But don't do this if the type of EXP is a subtype
4004	 of something else since then the conversion might involve
4005	 more than just converting modes.  */
4006      if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4007	  && TREE_TYPE (TREE_TYPE (exp)) == 0)
4008	{
4009	  if (TREE_UNSIGNED (TREE_TYPE (exp))
4010	      != SUBREG_PROMOTED_UNSIGNED_P (target))
4011	    exp
4012	      = convert
4013		(signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4014					  TREE_TYPE (exp)),
4015		 exp);
4016
4017	  exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4018					SUBREG_PROMOTED_UNSIGNED_P (target)),
4019			 exp);
4020	}
4021
4022      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4023
4024      /* If TEMP is a volatile MEM and we want a result value, make
4025	 the access now so it gets done only once.  Likewise if
4026	 it contains TARGET.  */
4027      if (GET_CODE (temp) == MEM && want_value
4028	  && (MEM_VOLATILE_P (temp)
4029	      || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4030	temp = copy_to_reg (temp);
4031
4032      /* If TEMP is a VOIDmode constant, use convert_modes to make
4033	 sure that we properly convert it.  */
4034      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4035	{
4036	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4037				temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4038	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4039			        GET_MODE (target), temp,
4040			        SUBREG_PROMOTED_UNSIGNED_P (target));
4041	}
4042
4043      convert_move (SUBREG_REG (target), temp,
4044		    SUBREG_PROMOTED_UNSIGNED_P (target));
4045
4046      /* If we promoted a constant, change the mode back down to match
4047	 target.  Otherwise, the caller might get confused by a result whose
4048	 mode is larger than expected.  */
4049
4050      if (want_value && GET_MODE (temp) != GET_MODE (target))
4051	{
4052	  if (GET_MODE (temp) != VOIDmode)
4053	    {
4054	      temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4055	      SUBREG_PROMOTED_VAR_P (temp) = 1;
4056	      SUBREG_PROMOTED_UNSIGNED_P (temp)
4057		= SUBREG_PROMOTED_UNSIGNED_P (target);
4058	    }
4059	  else
4060	    temp = convert_modes (GET_MODE (target),
4061				  GET_MODE (SUBREG_REG (target)),
4062				  temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4063	}
4064
4065      return want_value ? temp : NULL_RTX;
4066    }
4067  else
4068    {
4069      temp = expand_expr (exp, target, GET_MODE (target), 0);
4070      /* Return TARGET if it's a specified hardware register.
4071	 If TARGET is a volatile mem ref, either return TARGET
4072	 or return a reg copied *from* TARGET; ANSI requires this.
4073
4074	 Otherwise, if TEMP is not TARGET, return TEMP
4075	 if it is constant (for efficiency),
4076	 or if we really want the correct value.  */
4077      if (!(target && GET_CODE (target) == REG
4078	    && REGNO (target) < FIRST_PSEUDO_REGISTER)
4079	  && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4080	  && ! rtx_equal_p (temp, target)
4081	  && (CONSTANT_P (temp) || want_value))
4082	dont_return_target = 1;
4083    }
4084
4085  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4086     the same as that of TARGET, adjust the constant.  This is needed, for
4087     example, in case it is a CONST_DOUBLE and we want only a word-sized
4088     value.  */
4089  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4090      && TREE_CODE (exp) != ERROR_MARK
4091      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4092    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4093			  temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4094
4095  /* If value was not generated in the target, store it there.
4096     Convert the value to TARGET's type first if necessary.
4097     If TEMP and TARGET compare equal according to rtx_equal_p, but
4098     one or both of them are volatile memory refs, we have to distinguish
4099     two cases:
4100     - expand_expr has used TARGET.  In this case, we must not generate
4101       another copy.  This can be detected by TARGET being equal according
4102       to == .
4103     - expand_expr has not used TARGET - that means that the source just
4104       happens to have the same RTX form.  Since temp will have been created
4105       by expand_expr, it will compare unequal according to == .
4106       We must generate a copy in this case, to reach the correct number
4107       of volatile memory references.  */
4108
4109  if ((! rtx_equal_p (temp, target)
4110       || (temp != target && (side_effects_p (temp)
4111			      || side_effects_p (target))))
4112      && TREE_CODE (exp) != ERROR_MARK
4113      && ! dont_store_target)
4114    {
4115      target = protect_from_queue (target, 1);
4116      if (GET_MODE (temp) != GET_MODE (target)
4117	  && GET_MODE (temp) != VOIDmode)
4118	{
4119	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4120	  if (dont_return_target)
4121	    {
4122	      /* In this case, we will return TEMP,
4123		 so make sure it has the proper mode.
4124		 But don't forget to store the value into TARGET.  */
4125	      temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4126	      emit_move_insn (target, temp);
4127	    }
4128	  else
4129	    convert_move (target, temp, unsignedp);
4130	}
4131
4132      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4133	{
4134	  /* Handle copying a string constant into an array.  The string
4135	     constant may be shorter than the array.  So copy just the string's
4136	     actual length, and clear the rest.  First get the size of the data
4137	     type of the string, which is actually the size of the target.  */
4138	  rtx size = expr_size (exp);
4139
4140	  if (GET_CODE (size) == CONST_INT
4141	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
4142	    emit_block_move (target, temp, size);
4143	  else
4144	    {
4145	      /* Compute the size of the data to copy from the string.  */
4146	      tree copy_size
4147		= size_binop (MIN_EXPR,
4148			      make_tree (sizetype, size),
4149			      size_int (TREE_STRING_LENGTH (exp)));
4150	      rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4151					       VOIDmode, 0);
4152	      rtx label = 0;
4153
4154	      /* Copy that much.  */
4155	      copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4156	      emit_block_move (target, temp, copy_size_rtx);
4157
4158	      /* Figure out how much is left in TARGET that we have to clear.
4159		 Do all calculations in ptr_mode.  */
4160	      if (GET_CODE (copy_size_rtx) == CONST_INT)
4161		{
4162		  size = plus_constant (size, -INTVAL (copy_size_rtx));
4163		  target = adjust_address (target, BLKmode,
4164					   INTVAL (copy_size_rtx));
4165		}
4166	      else
4167		{
4168		  size = expand_binop (ptr_mode, sub_optab, size,
4169				       copy_size_rtx, NULL_RTX, 0,
4170				       OPTAB_LIB_WIDEN);
4171
4172#ifdef POINTERS_EXTEND_UNSIGNED
4173		  if (GET_MODE (copy_size_rtx) != Pmode)
4174		    copy_size_rtx = convert_memory_address (Pmode,
4175							    copy_size_rtx);
4176#endif
4177
4178		  target = offset_address (target, copy_size_rtx,
4179					   highest_pow2_factor (copy_size));
4180		  label = gen_label_rtx ();
4181		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4182					   GET_MODE (size), 0, label);
4183		}
4184
4185	      if (size != const0_rtx)
4186		clear_storage (target, size);
4187
4188	      if (label)
4189		emit_label (label);
4190	    }
4191	}
4192      /* Handle calls that return values in multiple non-contiguous locations.
4193	 The Irix 6 ABI has examples of this.  */
4194      else if (GET_CODE (target) == PARALLEL)
4195	emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4196      else if (GET_MODE (temp) == BLKmode)
4197	emit_block_move (target, temp, expr_size (exp));
4198      else
4199	emit_move_insn (target, temp);
4200    }
4201
4202  /* If we don't want a value, return NULL_RTX.  */
4203  if (! want_value)
4204    return NULL_RTX;
4205
4206  /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4207     ??? The latter test doesn't seem to make sense.  */
4208  else if (dont_return_target && GET_CODE (temp) != MEM)
4209    return temp;
4210
4211  /* Return TARGET itself if it is a hard register.  */
4212  else if (want_value && GET_MODE (target) != BLKmode
4213	   && ! (GET_CODE (target) == REG
4214		 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4215    return copy_to_reg (target);
4216
4217  else
4218    return target;
4219}
4220
4221/* Return 1 if EXP just contains zeros.  */
4222
4223static int
4224is_zeros_p (exp)
4225     tree exp;
4226{
4227  tree elt;
4228
4229  switch (TREE_CODE (exp))
4230    {
4231    case CONVERT_EXPR:
4232    case NOP_EXPR:
4233    case NON_LVALUE_EXPR:
4234    case VIEW_CONVERT_EXPR:
4235      return is_zeros_p (TREE_OPERAND (exp, 0));
4236
4237    case INTEGER_CST:
4238      return integer_zerop (exp);
4239
4240    case COMPLEX_CST:
4241      return
4242	is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4243
4244    case REAL_CST:
4245      return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4246
4247    case CONSTRUCTOR:
4248      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4249	return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4250      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4251	if (! is_zeros_p (TREE_VALUE (elt)))
4252	  return 0;
4253
4254      return 1;
4255
4256    default:
4257      return 0;
4258    }
4259}
4260
4261/* Return 1 if EXP contains mostly (3/4)  zeros.  */
4262
4263static int
4264mostly_zeros_p (exp)
4265     tree exp;
4266{
4267  if (TREE_CODE (exp) == CONSTRUCTOR)
4268    {
4269      int elts = 0, zeros = 0;
4270      tree elt = CONSTRUCTOR_ELTS (exp);
4271      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4272	{
4273	  /* If there are no ranges of true bits, it is all zero.  */
4274	  return elt == NULL_TREE;
4275	}
4276      for (; elt; elt = TREE_CHAIN (elt))
4277	{
4278	  /* We do not handle the case where the index is a RANGE_EXPR,
4279	     so the statistic will be somewhat inaccurate.
4280	     We do make a more accurate count in store_constructor itself,
4281	     so since this function is only used for nested array elements,
4282	     this should be close enough.  */
4283	  if (mostly_zeros_p (TREE_VALUE (elt)))
4284	    zeros++;
4285	  elts++;
4286	}
4287
4288      return 4 * zeros >= 3 * elts;
4289    }
4290
4291  return is_zeros_p (exp);
4292}
4293
4294/* Helper function for store_constructor.
4295   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4296   TYPE is the type of the CONSTRUCTOR, not the element type.
4297   CLEARED is as for store_constructor.
4298   ALIAS_SET is the alias set to use for any stores.
4299
4300   This provides a recursive shortcut back to store_constructor when it isn't
4301   necessary to go through store_field.  This is so that we can pass through
4302   the cleared field to let store_constructor know that we may not have to
4303   clear a substructure if the outer structure has already been cleared.  */
4304
4305static void
4306store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4307			 alias_set)
4308     rtx target;
4309     unsigned HOST_WIDE_INT bitsize;
4310     HOST_WIDE_INT bitpos;
4311     enum machine_mode mode;
4312     tree exp, type;
4313     int cleared;
4314     int alias_set;
4315{
4316  if (TREE_CODE (exp) == CONSTRUCTOR
4317      && bitpos % BITS_PER_UNIT == 0
4318      /* If we have a non-zero bitpos for a register target, then we just
4319	 let store_field do the bitfield handling.  This is unlikely to
4320	 generate unnecessary clear instructions anyways.  */
4321      && (bitpos == 0 || GET_CODE (target) == MEM))
4322    {
4323      if (GET_CODE (target) == MEM)
4324	target
4325	  = adjust_address (target,
4326			    GET_MODE (target) == BLKmode
4327			    || 0 != (bitpos
4328				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
4329			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4330
4331
4332      /* Update the alias set, if required.  */
4333      if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4334	  && MEM_ALIAS_SET (target) != 0)
4335	{
4336	  target = copy_rtx (target);
4337	  set_mem_alias_set (target, alias_set);
4338	}
4339
4340      store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4341    }
4342  else
4343    store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4344		 alias_set);
4345}
4346
4347/* Store the value of constructor EXP into the rtx TARGET.
4348   TARGET is either a REG or a MEM; we know it cannot conflict, since
4349   safe_from_p has been called.
4350   CLEARED is true if TARGET is known to have been zero'd.
4351   SIZE is the number of bytes of TARGET we are allowed to modify: this
4352   may not be the same as the size of EXP if we are assigning to a field
4353   which has been packed to exclude padding bits.  */
4354
4355static void
4356store_constructor (exp, target, cleared, size)
4357     tree exp;
4358     rtx target;
4359     int cleared;
4360     HOST_WIDE_INT size;
4361{
4362  tree type = TREE_TYPE (exp);
4363#ifdef WORD_REGISTER_OPERATIONS
4364  HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4365#endif
4366
4367  if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4368      || TREE_CODE (type) == QUAL_UNION_TYPE)
4369    {
4370      tree elt;
4371
4372      /* We either clear the aggregate or indicate the value is dead.  */
4373      if ((TREE_CODE (type) == UNION_TYPE
4374	   || TREE_CODE (type) == QUAL_UNION_TYPE)
4375	  && ! cleared
4376	  && ! CONSTRUCTOR_ELTS (exp))
4377	/* If the constructor is empty, clear the union.  */
4378	{
4379	  clear_storage (target, expr_size (exp));
4380	  cleared = 1;
4381	}
4382
4383      /* If we are building a static constructor into a register,
4384	 set the initial value as zero so we can fold the value into
4385	 a constant.  But if more than one register is involved,
4386	 this probably loses.  */
4387      else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4388	       && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4389	{
4390	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4391	  cleared = 1;
4392	}
4393
4394      /* If the constructor has fewer fields than the structure
4395	 or if we are initializing the structure to mostly zeros,
4396	 clear the whole structure first.  Don't do this if TARGET is a
4397	 register whose mode size isn't equal to SIZE since clear_storage
4398	 can't handle this case.  */
4399      else if (! cleared && size > 0
4400	       && ((list_length (CONSTRUCTOR_ELTS (exp))
4401		    != fields_length (type))
4402		   || mostly_zeros_p (exp))
4403	       && (GET_CODE (target) != REG
4404		   || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4405		       == size)))
4406	{
4407	  clear_storage (target, GEN_INT (size));
4408	  cleared = 1;
4409	}
4410
4411      if (! cleared)
4412	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4413
4414      /* Store each element of the constructor into
4415	 the corresponding field of TARGET.  */
4416
4417      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4418	{
4419	  tree field = TREE_PURPOSE (elt);
4420	  tree value = TREE_VALUE (elt);
4421	  enum machine_mode mode;
4422	  HOST_WIDE_INT bitsize;
4423	  HOST_WIDE_INT bitpos = 0;
4424	  int unsignedp;
4425	  tree offset;
4426	  rtx to_rtx = target;
4427
4428	  /* Just ignore missing fields.
4429	     We cleared the whole structure, above,
4430	     if any fields are missing.  */
4431	  if (field == 0)
4432	    continue;
4433
4434	  if (cleared && is_zeros_p (value))
4435	    continue;
4436
4437	  if (host_integerp (DECL_SIZE (field), 1))
4438	    bitsize = tree_low_cst (DECL_SIZE (field), 1);
4439	  else
4440	    bitsize = -1;
4441
4442	  unsignedp = TREE_UNSIGNED (field);
4443	  mode = DECL_MODE (field);
4444	  if (DECL_BIT_FIELD (field))
4445	    mode = VOIDmode;
4446
4447	  offset = DECL_FIELD_OFFSET (field);
4448	  if (host_integerp (offset, 0)
4449	      && host_integerp (bit_position (field), 0))
4450	    {
4451	      bitpos = int_bit_position (field);
4452	      offset = 0;
4453	    }
4454	  else
4455	    bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4456
4457	  if (offset)
4458	    {
4459	      rtx offset_rtx;
4460
4461	      if (contains_placeholder_p (offset))
4462		offset = build (WITH_RECORD_EXPR, sizetype,
4463				offset, make_tree (TREE_TYPE (exp), target));
4464
4465	      offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4466	      if (GET_CODE (to_rtx) != MEM)
4467		abort ();
4468
4469	      if (GET_MODE (offset_rtx) != ptr_mode)
4470		offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4471
4472#ifdef POINTERS_EXTEND_UNSIGNED
4473	      if (GET_MODE (offset_rtx) != Pmode)
4474		offset_rtx = convert_memory_address (Pmode, offset_rtx);
4475#endif
4476
4477	      to_rtx = offset_address (to_rtx, offset_rtx,
4478				       highest_pow2_factor (offset));
4479	    }
4480
4481	  if (TREE_READONLY (field))
4482	    {
4483	      if (GET_CODE (to_rtx) == MEM)
4484		to_rtx = copy_rtx (to_rtx);
4485
4486	      RTX_UNCHANGING_P (to_rtx) = 1;
4487	    }
4488
4489#ifdef WORD_REGISTER_OPERATIONS
4490	  /* If this initializes a field that is smaller than a word, at the
4491	     start of a word, try to widen it to a full word.
4492	     This special case allows us to output C++ member function
4493	     initializations in a form that the optimizers can understand.  */
4494	  if (GET_CODE (target) == REG
4495	      && bitsize < BITS_PER_WORD
4496	      && bitpos % BITS_PER_WORD == 0
4497	      && GET_MODE_CLASS (mode) == MODE_INT
4498	      && TREE_CODE (value) == INTEGER_CST
4499	      && exp_size >= 0
4500	      && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4501	    {
4502	      tree type = TREE_TYPE (value);
4503
4504	      if (TYPE_PRECISION (type) < BITS_PER_WORD)
4505		{
4506		  type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4507		  value = convert (type, value);
4508		}
4509
4510	      if (BYTES_BIG_ENDIAN)
4511		value
4512		  = fold (build (LSHIFT_EXPR, type, value,
4513				 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4514	      bitsize = BITS_PER_WORD;
4515	      mode = word_mode;
4516	    }
4517#endif
4518
4519	  if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4520	      && DECL_NONADDRESSABLE_P (field))
4521	    {
4522	      to_rtx = copy_rtx (to_rtx);
4523	      MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4524	    }
4525
4526	  store_constructor_field (to_rtx, bitsize, bitpos, mode,
4527				   value, type, cleared,
4528				   get_alias_set (TREE_TYPE (field)));
4529	}
4530    }
4531  else if (TREE_CODE (type) == ARRAY_TYPE)
4532    {
4533      tree elt;
4534      int i;
4535      int need_to_clear;
4536      tree domain = TYPE_DOMAIN (type);
4537      tree elttype = TREE_TYPE (type);
4538      int const_bounds_p = (TYPE_MIN_VALUE (domain)
4539			    && TYPE_MAX_VALUE (domain)
4540			    && host_integerp (TYPE_MIN_VALUE (domain), 0)
4541			    && host_integerp (TYPE_MAX_VALUE (domain), 0));
4542      HOST_WIDE_INT minelt = 0;
4543      HOST_WIDE_INT maxelt = 0;
4544
4545      /* If we have constant bounds for the range of the type, get them.  */
4546      if (const_bounds_p)
4547	{
4548	  minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4549	  maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4550	}
4551
4552      /* If the constructor has fewer elements than the array,
4553         clear the whole array first.  Similarly if this is
4554         static constructor of a non-BLKmode object.  */
4555      if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4556	need_to_clear = 1;
4557      else
4558	{
4559	  HOST_WIDE_INT count = 0, zero_count = 0;
4560	  need_to_clear = ! const_bounds_p;
4561
4562	  /* This loop is a more accurate version of the loop in
4563	     mostly_zeros_p (it handles RANGE_EXPR in an index).
4564	     It is also needed to check for missing elements.  */
4565	  for (elt = CONSTRUCTOR_ELTS (exp);
4566	       elt != NULL_TREE && ! need_to_clear;
4567	       elt = TREE_CHAIN (elt))
4568	    {
4569	      tree index = TREE_PURPOSE (elt);
4570	      HOST_WIDE_INT this_node_count;
4571
4572	      if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4573		{
4574		  tree lo_index = TREE_OPERAND (index, 0);
4575		  tree hi_index = TREE_OPERAND (index, 1);
4576
4577		  if (! host_integerp (lo_index, 1)
4578		      || ! host_integerp (hi_index, 1))
4579		    {
4580		      need_to_clear = 1;
4581		      break;
4582		    }
4583
4584		  this_node_count = (tree_low_cst (hi_index, 1)
4585				     - tree_low_cst (lo_index, 1) + 1);
4586		}
4587	      else
4588		this_node_count = 1;
4589
4590	      count += this_node_count;
4591	      if (mostly_zeros_p (TREE_VALUE (elt)))
4592		zero_count += this_node_count;
4593	    }
4594
4595	  /* Clear the entire array first if there are any missing elements,
4596	     or if the incidence of zero elements is >= 75%.  */
4597	  if (! need_to_clear
4598	      && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4599	    need_to_clear = 1;
4600	}
4601
4602      if (need_to_clear && size > 0)
4603	{
4604	  if (! cleared)
4605	    clear_storage (target, GEN_INT (size));
4606	  cleared = 1;
4607	}
4608      else if (REG_P (target))
4609	/* Inform later passes that the old value is dead.  */
4610	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4611
4612      /* Store each element of the constructor into
4613	 the corresponding element of TARGET, determined
4614	 by counting the elements.  */
4615      for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4616	   elt;
4617	   elt = TREE_CHAIN (elt), i++)
4618	{
4619	  enum machine_mode mode;
4620	  HOST_WIDE_INT bitsize;
4621	  HOST_WIDE_INT bitpos;
4622	  int unsignedp;
4623	  tree value = TREE_VALUE (elt);
4624	  tree index = TREE_PURPOSE (elt);
4625	  rtx xtarget = target;
4626
4627	  if (cleared && is_zeros_p (value))
4628	    continue;
4629
4630	  unsignedp = TREE_UNSIGNED (elttype);
4631	  mode = TYPE_MODE (elttype);
4632	  if (mode == BLKmode)
4633	    bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4634		       ? tree_low_cst (TYPE_SIZE (elttype), 1)
4635		       : -1);
4636	  else
4637	    bitsize = GET_MODE_BITSIZE (mode);
4638
4639	  if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4640	    {
4641	      tree lo_index = TREE_OPERAND (index, 0);
4642	      tree hi_index = TREE_OPERAND (index, 1);
4643	      rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4644	      struct nesting *loop;
4645	      HOST_WIDE_INT lo, hi, count;
4646	      tree position;
4647
4648	      /* If the range is constant and "small", unroll the loop.  */
4649	      if (const_bounds_p
4650		  && host_integerp (lo_index, 0)
4651		  && host_integerp (hi_index, 0)
4652		  && (lo = tree_low_cst (lo_index, 0),
4653		      hi = tree_low_cst (hi_index, 0),
4654		      count = hi - lo + 1,
4655		      (GET_CODE (target) != MEM
4656		       || count <= 2
4657		       || (host_integerp (TYPE_SIZE (elttype), 1)
4658			   && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4659			       <= 40 * 8)))))
4660		{
4661		  lo -= minelt;  hi -= minelt;
4662		  for (; lo <= hi; lo++)
4663		    {
4664		      bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4665
4666		      if (GET_CODE (target) == MEM
4667			  && !MEM_KEEP_ALIAS_SET_P (target)
4668			  && TYPE_NONALIASED_COMPONENT (type))
4669			{
4670			  target = copy_rtx (target);
4671			  MEM_KEEP_ALIAS_SET_P (target) = 1;
4672			}
4673
4674		      store_constructor_field
4675			(target, bitsize, bitpos, mode, value, type, cleared,
4676			 get_alias_set (elttype));
4677		    }
4678		}
4679	      else
4680		{
4681		  hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4682		  loop_top = gen_label_rtx ();
4683		  loop_end = gen_label_rtx ();
4684
4685		  unsignedp = TREE_UNSIGNED (domain);
4686
4687		  index = build_decl (VAR_DECL, NULL_TREE, domain);
4688
4689		  index_r
4690		    = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4691						 &unsignedp, 0));
4692		  SET_DECL_RTL (index, index_r);
4693		  if (TREE_CODE (value) == SAVE_EXPR
4694		      && SAVE_EXPR_RTL (value) == 0)
4695		    {
4696		      /* Make sure value gets expanded once before the
4697                         loop.  */
4698		      expand_expr (value, const0_rtx, VOIDmode, 0);
4699		      emit_queue ();
4700		    }
4701		  store_expr (lo_index, index_r, 0);
4702		  loop = expand_start_loop (0);
4703
4704		  /* Assign value to element index.  */
4705		  position
4706		    = convert (ssizetype,
4707			       fold (build (MINUS_EXPR, TREE_TYPE (index),
4708					    index, TYPE_MIN_VALUE (domain))));
4709		  position = size_binop (MULT_EXPR, position,
4710					 convert (ssizetype,
4711						  TYPE_SIZE_UNIT (elttype)));
4712
4713		  pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4714		  xtarget = offset_address (target, pos_rtx,
4715					    highest_pow2_factor (position));
4716		  xtarget = adjust_address (xtarget, mode, 0);
4717		  if (TREE_CODE (value) == CONSTRUCTOR)
4718		    store_constructor (value, xtarget, cleared,
4719				       bitsize / BITS_PER_UNIT);
4720		  else
4721		    store_expr (value, xtarget, 0);
4722
4723		  expand_exit_loop_if_false (loop,
4724					     build (LT_EXPR, integer_type_node,
4725						    index, hi_index));
4726
4727		  expand_increment (build (PREINCREMENT_EXPR,
4728					   TREE_TYPE (index),
4729					   index, integer_one_node), 0, 0);
4730		  expand_end_loop ();
4731		  emit_label (loop_end);
4732		}
4733	    }
4734	  else if ((index != 0 && ! host_integerp (index, 0))
4735		   || ! host_integerp (TYPE_SIZE (elttype), 1))
4736	    {
4737	      tree position;
4738
4739	      if (index == 0)
4740		index = ssize_int (1);
4741
4742	      if (minelt)
4743		index = convert (ssizetype,
4744				 fold (build (MINUS_EXPR, index,
4745					      TYPE_MIN_VALUE (domain))));
4746
4747	      position = size_binop (MULT_EXPR, index,
4748				     convert (ssizetype,
4749					      TYPE_SIZE_UNIT (elttype)));
4750	      xtarget = offset_address (target,
4751					expand_expr (position, 0, VOIDmode, 0),
4752					highest_pow2_factor (position));
4753	      xtarget = adjust_address (xtarget, mode, 0);
4754	      store_expr (value, xtarget, 0);
4755	    }
4756	  else
4757	    {
4758	      if (index != 0)
4759		bitpos = ((tree_low_cst (index, 0) - minelt)
4760			  * tree_low_cst (TYPE_SIZE (elttype), 1));
4761	      else
4762		bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4763
4764	      if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4765		  && TYPE_NONALIASED_COMPONENT (type))
4766		{
4767		  target = copy_rtx (target);
4768		  MEM_KEEP_ALIAS_SET_P (target) = 1;
4769		}
4770
4771	      store_constructor_field (target, bitsize, bitpos, mode, value,
4772				       type, cleared, get_alias_set (elttype));
4773
4774	    }
4775	}
4776    }
4777
4778  /* Set constructor assignments.  */
4779  else if (TREE_CODE (type) == SET_TYPE)
4780    {
4781      tree elt = CONSTRUCTOR_ELTS (exp);
4782      unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4783      tree domain = TYPE_DOMAIN (type);
4784      tree domain_min, domain_max, bitlength;
4785
4786      /* The default implementation strategy is to extract the constant
4787	 parts of the constructor, use that to initialize the target,
4788	 and then "or" in whatever non-constant ranges we need in addition.
4789
4790	 If a large set is all zero or all ones, it is
4791	 probably better to set it using memset (if available) or bzero.
4792	 Also, if a large set has just a single range, it may also be
4793	 better to first clear all the first clear the set (using
4794	 bzero/memset), and set the bits we want.  */
4795
4796      /* Check for all zeros.  */
4797      if (elt == NULL_TREE && size > 0)
4798	{
4799	  if (!cleared)
4800	    clear_storage (target, GEN_INT (size));
4801	  return;
4802	}
4803
4804      domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4805      domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4806      bitlength = size_binop (PLUS_EXPR,
4807			      size_diffop (domain_max, domain_min),
4808			      ssize_int (1));
4809
4810      nbits = tree_low_cst (bitlength, 1);
4811
4812      /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4813	 are "complicated" (more than one range), initialize (the
4814	 constant parts) by copying from a constant.  */
4815      if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4816	  || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4817	{
4818	  unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4819	  enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4820	  char *bit_buffer = (char *) alloca (nbits);
4821	  HOST_WIDE_INT word = 0;
4822	  unsigned int bit_pos = 0;
4823	  unsigned int ibit = 0;
4824	  unsigned int offset = 0;  /* In bytes from beginning of set.  */
4825
4826	  elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4827	  for (;;)
4828	    {
4829	      if (bit_buffer[ibit])
4830		{
4831		  if (BYTES_BIG_ENDIAN)
4832		    word |= (1 << (set_word_size - 1 - bit_pos));
4833		  else
4834		    word |= 1 << bit_pos;
4835		}
4836
4837	      bit_pos++;  ibit++;
4838	      if (bit_pos >= set_word_size || ibit == nbits)
4839		{
4840		  if (word != 0 || ! cleared)
4841		    {
4842		      rtx datum = GEN_INT (word);
4843		      rtx to_rtx;
4844
4845		      /* The assumption here is that it is safe to use
4846			 XEXP if the set is multi-word, but not if
4847			 it's single-word.  */
4848		      if (GET_CODE (target) == MEM)
4849			to_rtx = adjust_address (target, mode, offset);
4850		      else if (offset == 0)
4851			to_rtx = target;
4852		      else
4853			abort ();
4854		      emit_move_insn (to_rtx, datum);
4855		    }
4856
4857		  if (ibit == nbits)
4858		    break;
4859		  word = 0;
4860		  bit_pos = 0;
4861		  offset += set_word_size / BITS_PER_UNIT;
4862		}
4863	    }
4864	}
4865      else if (!cleared)
4866	/* Don't bother clearing storage if the set is all ones.  */
4867	if (TREE_CHAIN (elt) != NULL_TREE
4868	    || (TREE_PURPOSE (elt) == NULL_TREE
4869		? nbits != 1
4870		: ( ! host_integerp (TREE_VALUE (elt), 0)
4871		   || ! host_integerp (TREE_PURPOSE (elt), 0)
4872		   || (tree_low_cst (TREE_VALUE (elt), 0)
4873		       - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4874		       != (HOST_WIDE_INT) nbits))))
4875	  clear_storage (target, expr_size (exp));
4876
4877      for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4878	{
4879	  /* Start of range of element or NULL.  */
4880	  tree startbit = TREE_PURPOSE (elt);
4881	  /* End of range of element, or element value.  */
4882	  tree endbit   = TREE_VALUE (elt);
4883#ifdef TARGET_MEM_FUNCTIONS
4884	  HOST_WIDE_INT startb, endb;
4885#endif
4886	  rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4887
4888	  bitlength_rtx = expand_expr (bitlength,
4889				       NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4890
4891	  /* Handle non-range tuple element like [ expr ].  */
4892	  if (startbit == NULL_TREE)
4893	    {
4894	      startbit = save_expr (endbit);
4895	      endbit = startbit;
4896	    }
4897
4898	  startbit = convert (sizetype, startbit);
4899	  endbit = convert (sizetype, endbit);
4900	  if (! integer_zerop (domain_min))
4901	    {
4902	      startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4903	      endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4904	    }
4905	  startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4906				      EXPAND_CONST_ADDRESS);
4907	  endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4908				    EXPAND_CONST_ADDRESS);
4909
4910	  if (REG_P (target))
4911	    {
4912	      targetx
4913		= assign_temp
4914		  ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4915					  TYPE_QUAL_CONST)),
4916		   0, 1, 1);
4917	      emit_move_insn (targetx, target);
4918	    }
4919
4920	  else if (GET_CODE (target) == MEM)
4921	    targetx = target;
4922	  else
4923	    abort ();
4924
4925#ifdef TARGET_MEM_FUNCTIONS
4926	  /* Optimization:  If startbit and endbit are
4927	     constants divisible by BITS_PER_UNIT,
4928	     call memset instead.  */
4929	  if (TREE_CODE (startbit) == INTEGER_CST
4930	      && TREE_CODE (endbit) == INTEGER_CST
4931	      && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4932	      && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4933	    {
4934	      emit_library_call (memset_libfunc, LCT_NORMAL,
4935				 VOIDmode, 3,
4936				 plus_constant (XEXP (targetx, 0),
4937						startb / BITS_PER_UNIT),
4938				 Pmode,
4939				 constm1_rtx, TYPE_MODE (integer_type_node),
4940				 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4941				 TYPE_MODE (sizetype));
4942	    }
4943	  else
4944#endif
4945	    emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4946			       LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4947			       Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4948			       startbit_rtx, TYPE_MODE (sizetype),
4949			       endbit_rtx, TYPE_MODE (sizetype));
4950
4951	  if (REG_P (target))
4952	    emit_move_insn (target, targetx);
4953	}
4954    }
4955
4956  else
4957    abort ();
4958}
4959
4960/* Store the value of EXP (an expression tree)
4961   into a subfield of TARGET which has mode MODE and occupies
4962   BITSIZE bits, starting BITPOS bits from the start of TARGET.
4963   If MODE is VOIDmode, it means that we are storing into a bit-field.
4964
4965   If VALUE_MODE is VOIDmode, return nothing in particular.
4966   UNSIGNEDP is not used in this case.
4967
4968   Otherwise, return an rtx for the value stored.  This rtx
4969   has mode VALUE_MODE if that is convenient to do.
4970   In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4971
4972   TYPE is the type of the underlying object,
4973
4974   ALIAS_SET is the alias set for the destination.  This value will
4975   (in general) be different from that for TARGET, since TARGET is a
4976   reference to the containing structure.  */
4977
4978static rtx
4979store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
4980	     alias_set)
4981     rtx target;
4982     HOST_WIDE_INT bitsize;
4983     HOST_WIDE_INT bitpos;
4984     enum machine_mode mode;
4985     tree exp;
4986     enum machine_mode value_mode;
4987     int unsignedp;
4988     tree type;
4989     int alias_set;
4990{
4991  HOST_WIDE_INT width_mask = 0;
4992
4993  if (TREE_CODE (exp) == ERROR_MARK)
4994    return const0_rtx;
4995
4996  /* If we have nothing to store, do nothing unless the expression has
4997     side-effects.  */
4998  if (bitsize == 0)
4999    return expand_expr (exp, const0_rtx, VOIDmode, 0);
5000  else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5001    width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5002
5003  /* If we are storing into an unaligned field of an aligned union that is
5004     in a register, we may have the mode of TARGET being an integer mode but
5005     MODE == BLKmode.  In that case, get an aligned object whose size and
5006     alignment are the same as TARGET and store TARGET into it (we can avoid
5007     the store if the field being stored is the entire width of TARGET).  Then
5008     call ourselves recursively to store the field into a BLKmode version of
5009     that object.  Finally, load from the object into TARGET.  This is not
5010     very efficient in general, but should only be slightly more expensive
5011     than the otherwise-required unaligned accesses.  Perhaps this can be
5012     cleaned up later.  */
5013
5014  if (mode == BLKmode
5015      && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5016    {
5017      rtx object
5018	= assign_temp
5019	  (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5020	   0, 1, 1);
5021      rtx blk_object = adjust_address (object, BLKmode, 0);
5022
5023      if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5024	emit_move_insn (object, target);
5025
5026      store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5027		   alias_set);
5028
5029      emit_move_insn (target, object);
5030
5031      /* We want to return the BLKmode version of the data.  */
5032      return blk_object;
5033    }
5034
5035  if (GET_CODE (target) == CONCAT)
5036    {
5037      /* We're storing into a struct containing a single __complex.  */
5038
5039      if (bitpos != 0)
5040	abort ();
5041      return store_expr (exp, target, 0);
5042    }
5043
5044  /* If the structure is in a register or if the component
5045     is a bit field, we cannot use addressing to access it.
5046     Use bit-field techniques or SUBREG to store in it.  */
5047
5048  if (mode == VOIDmode
5049      || (mode != BLKmode && ! direct_store[(int) mode]
5050	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5051	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5052      || GET_CODE (target) == REG
5053      || GET_CODE (target) == SUBREG
5054      /* If the field isn't aligned enough to store as an ordinary memref,
5055	 store it as a bit field.  */
5056      || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5057	  && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5058	      || bitpos % GET_MODE_ALIGNMENT (mode)))
5059      /* If the RHS and field are a constant size and the size of the
5060	 RHS isn't the same size as the bitfield, we must use bitfield
5061	 operations.  */
5062      || (bitsize >= 0
5063	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5064	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5065    {
5066      rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5067
5068      /* If BITSIZE is narrower than the size of the type of EXP
5069	 we will be narrowing TEMP.  Normally, what's wanted are the
5070	 low-order bits.  However, if EXP's type is a record and this is
5071	 big-endian machine, we want the upper BITSIZE bits.  */
5072      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5073	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5074	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5075	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5076			     size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5077				       - bitsize),
5078			     temp, 1);
5079
5080      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5081	 MODE.  */
5082      if (mode != VOIDmode && mode != BLKmode
5083	  && mode != TYPE_MODE (TREE_TYPE (exp)))
5084	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5085
5086      /* If the modes of TARGET and TEMP are both BLKmode, both
5087	 must be in memory and BITPOS must be aligned on a byte
5088	 boundary.  If so, we simply do a block copy.  */
5089      if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5090	{
5091	  if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5092	      || bitpos % BITS_PER_UNIT != 0)
5093	    abort ();
5094
5095	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5096	  emit_block_move (target, temp,
5097			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5098				    / BITS_PER_UNIT));
5099
5100	  return value_mode == VOIDmode ? const0_rtx : target;
5101	}
5102
5103      /* Store the value in the bitfield.  */
5104      store_bit_field (target, bitsize, bitpos, mode, temp,
5105		       int_size_in_bytes (type));
5106
5107      if (value_mode != VOIDmode)
5108	{
5109	  /* The caller wants an rtx for the value.
5110	     If possible, avoid refetching from the bitfield itself.  */
5111	  if (width_mask != 0
5112	      && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5113	    {
5114	      tree count;
5115	      enum machine_mode tmode;
5116
5117	      if (unsignedp)
5118		return expand_and (temp,
5119				   GEN_INT
5120				   (trunc_int_for_mode
5121				    (width_mask,
5122				     GET_MODE (temp) == VOIDmode
5123				     ? value_mode
5124				     : GET_MODE (temp))), NULL_RTX);
5125
5126	      tmode = GET_MODE (temp);
5127	      if (tmode == VOIDmode)
5128		tmode = value_mode;
5129	      count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5130	      temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5131	      return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5132	    }
5133
5134	  return extract_bit_field (target, bitsize, bitpos, unsignedp,
5135				    NULL_RTX, value_mode, VOIDmode,
5136				    int_size_in_bytes (type));
5137	}
5138      return const0_rtx;
5139    }
5140  else
5141    {
5142      rtx addr = XEXP (target, 0);
5143      rtx to_rtx = target;
5144
5145      /* If a value is wanted, it must be the lhs;
5146	 so make the address stable for multiple use.  */
5147
5148      if (value_mode != VOIDmode && GET_CODE (addr) != REG
5149	  && ! CONSTANT_ADDRESS_P (addr)
5150	  /* A frame-pointer reference is already stable.  */
5151	  && ! (GET_CODE (addr) == PLUS
5152		&& GET_CODE (XEXP (addr, 1)) == CONST_INT
5153		&& (XEXP (addr, 0) == virtual_incoming_args_rtx
5154		    || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5155	to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5156
5157      /* Now build a reference to just the desired component.  */
5158
5159      to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5160
5161      if (to_rtx == target)
5162	to_rtx = copy_rtx (to_rtx);
5163
5164      MEM_SET_IN_STRUCT_P (to_rtx, 1);
5165      if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5166	set_mem_alias_set (to_rtx, alias_set);
5167
5168      return store_expr (exp, to_rtx, value_mode != VOIDmode);
5169    }
5170}
5171
5172/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5173   an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5174   codes and find the ultimate containing object, which we return.
5175
5176   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5177   bit position, and *PUNSIGNEDP to the signedness of the field.
5178   If the position of the field is variable, we store a tree
5179   giving the variable offset (in units) in *POFFSET.
5180   This offset is in addition to the bit position.
5181   If the position is not variable, we store 0 in *POFFSET.
5182
5183   If any of the extraction expressions is volatile,
5184   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
5185
5186   If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
5187   is a mode that can be used to access the field.  In that case, *PBITSIZE
5188   is redundant.
5189
5190   If the field describes a variable-sized object, *PMODE is set to
5191   VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
5192   this case, but the address of the object can be found.  */
5193
5194tree
5195get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5196		     punsignedp, pvolatilep)
5197     tree exp;
5198     HOST_WIDE_INT *pbitsize;
5199     HOST_WIDE_INT *pbitpos;
5200     tree *poffset;
5201     enum machine_mode *pmode;
5202     int *punsignedp;
5203     int *pvolatilep;
5204{
5205  tree size_tree = 0;
5206  enum machine_mode mode = VOIDmode;
5207  tree offset = size_zero_node;
5208  tree bit_offset = bitsize_zero_node;
5209  tree placeholder_ptr = 0;
5210  tree tem;
5211
5212  /* First get the mode, signedness, and size.  We do this from just the
5213     outermost expression.  */
5214  if (TREE_CODE (exp) == COMPONENT_REF)
5215    {
5216      size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5217      if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5218	mode = DECL_MODE (TREE_OPERAND (exp, 1));
5219
5220      *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5221    }
5222  else if (TREE_CODE (exp) == BIT_FIELD_REF)
5223    {
5224      size_tree = TREE_OPERAND (exp, 1);
5225      *punsignedp = TREE_UNSIGNED (exp);
5226    }
5227  else
5228    {
5229      mode = TYPE_MODE (TREE_TYPE (exp));
5230      *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5231
5232      if (mode == BLKmode)
5233	size_tree = TYPE_SIZE (TREE_TYPE (exp));
5234      else
5235	*pbitsize = GET_MODE_BITSIZE (mode);
5236    }
5237
5238  if (size_tree != 0)
5239    {
5240      if (! host_integerp (size_tree, 1))
5241	mode = BLKmode, *pbitsize = -1;
5242      else
5243	*pbitsize = tree_low_cst (size_tree, 1);
5244    }
5245
5246  /* Compute cumulative bit-offset for nested component-refs and array-refs,
5247     and find the ultimate containing object.  */
5248  while (1)
5249    {
5250      if (TREE_CODE (exp) == BIT_FIELD_REF)
5251	bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5252      else if (TREE_CODE (exp) == COMPONENT_REF)
5253	{
5254	  tree field = TREE_OPERAND (exp, 1);
5255	  tree this_offset = DECL_FIELD_OFFSET (field);
5256
5257	  /* If this field hasn't been filled in yet, don't go
5258	     past it.  This should only happen when folding expressions
5259	     made during type construction.  */
5260	  if (this_offset == 0)
5261	    break;
5262	  else if (! TREE_CONSTANT (this_offset)
5263		   && contains_placeholder_p (this_offset))
5264	    this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5265
5266	  offset = size_binop (PLUS_EXPR, offset, this_offset);
5267	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
5268				   DECL_FIELD_BIT_OFFSET (field));
5269
5270	  /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
5271	}
5272
5273      else if (TREE_CODE (exp) == ARRAY_REF
5274	       || TREE_CODE (exp) == ARRAY_RANGE_REF)
5275	{
5276	  tree index = TREE_OPERAND (exp, 1);
5277	  tree array = TREE_OPERAND (exp, 0);
5278	  tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5279	  tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5280	  tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5281
5282	  /* We assume all arrays have sizes that are a multiple of a byte.
5283	     First subtract the lower bound, if any, in the type of the
5284	     index, then convert to sizetype and multiply by the size of the
5285	     array element.  */
5286	  if (low_bound != 0 && ! integer_zerop (low_bound))
5287	    index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5288				 index, low_bound));
5289
5290	  /* If the index has a self-referential type, pass it to a
5291	     WITH_RECORD_EXPR; if the component size is, pass our
5292	     component to one.  */
5293	  if (! TREE_CONSTANT (index)
5294	      && contains_placeholder_p (index))
5295	    index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5296	  if (! TREE_CONSTANT (unit_size)
5297	      && contains_placeholder_p (unit_size))
5298	    unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5299
5300	  offset = size_binop (PLUS_EXPR, offset,
5301			       size_binop (MULT_EXPR,
5302					   convert (sizetype, index),
5303					   unit_size));
5304	}
5305
5306      else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5307	{
5308	  tree new = find_placeholder (exp, &placeholder_ptr);
5309
5310	  /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5311	     We might have been called from tree optimization where we
5312	     haven't set up an object yet.  */
5313	  if (new == 0)
5314	    break;
5315	  else
5316	    exp = new;
5317
5318	  continue;
5319	}
5320      else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5321	       && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5322	       && ! ((TREE_CODE (exp) == NOP_EXPR
5323		      || TREE_CODE (exp) == CONVERT_EXPR)
5324		     && (TYPE_MODE (TREE_TYPE (exp))
5325			 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5326	break;
5327
5328      /* If any reference in the chain is volatile, the effect is volatile.  */
5329      if (TREE_THIS_VOLATILE (exp))
5330	*pvolatilep = 1;
5331
5332      exp = TREE_OPERAND (exp, 0);
5333    }
5334
5335  /* If OFFSET is constant, see if we can return the whole thing as a
5336     constant bit position.  Otherwise, split it up.  */
5337  if (host_integerp (offset, 0)
5338      && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5339				 bitsize_unit_node))
5340      && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5341      && host_integerp (tem, 0))
5342    *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5343  else
5344    *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5345
5346  *pmode = mode;
5347  return exp;
5348}
5349
5350/* Return 1 if T is an expression that get_inner_reference handles.  */
5351
5352int
5353handled_component_p (t)
5354     tree t;
5355{
5356  switch (TREE_CODE (t))
5357    {
5358    case BIT_FIELD_REF:
5359    case COMPONENT_REF:
5360    case ARRAY_REF:
5361    case ARRAY_RANGE_REF:
5362    case NON_LVALUE_EXPR:
5363    case VIEW_CONVERT_EXPR:
5364      return 1;
5365
5366    case NOP_EXPR:
5367    case CONVERT_EXPR:
5368      return (TYPE_MODE (TREE_TYPE (t))
5369	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5370
5371    default:
5372      return 0;
5373    }
5374}
5375
5376/* Given an rtx VALUE that may contain additions and multiplications, return
5377   an equivalent value that just refers to a register, memory, or constant.
5378   This is done by generating instructions to perform the arithmetic and
5379   returning a pseudo-register containing the value.
5380
5381   The returned value may be a REG, SUBREG, MEM or constant.  */
5382
5383rtx
5384force_operand (value, target)
5385     rtx value, target;
5386{
5387  optab binoptab = 0;
5388  /* Use a temporary to force order of execution of calls to
5389     `force_operand'.  */
5390  rtx tmp;
5391  rtx op2;
5392  /* Use subtarget as the target for operand 0 of a binary operation.  */
5393  rtx subtarget = get_subtarget (target);
5394
5395  /* Check for a PIC address load.  */
5396  if ((GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5397      && XEXP (value, 0) == pic_offset_table_rtx
5398      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5399	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
5400	  || GET_CODE (XEXP (value, 1)) == CONST))
5401    {
5402      if (!subtarget)
5403	subtarget = gen_reg_rtx (GET_MODE (value));
5404      emit_move_insn (subtarget, value);
5405      return subtarget;
5406    }
5407
5408  if (GET_CODE (value) == PLUS)
5409    binoptab = add_optab;
5410  else if (GET_CODE (value) == MINUS)
5411    binoptab = sub_optab;
5412  else if (GET_CODE (value) == MULT)
5413    {
5414      op2 = XEXP (value, 1);
5415      if (!CONSTANT_P (op2)
5416	  && !(GET_CODE (op2) == REG && op2 != subtarget))
5417	subtarget = 0;
5418      tmp = force_operand (XEXP (value, 0), subtarget);
5419      return expand_mult (GET_MODE (value), tmp,
5420			  force_operand (op2, NULL_RTX),
5421			  target, 1);
5422    }
5423
5424  if (binoptab)
5425    {
5426      op2 = XEXP (value, 1);
5427      if (!CONSTANT_P (op2)
5428	  && !(GET_CODE (op2) == REG && op2 != subtarget))
5429	subtarget = 0;
5430      if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5431	{
5432	  binoptab = add_optab;
5433	  op2 = negate_rtx (GET_MODE (value), op2);
5434	}
5435
5436      /* Check for an addition with OP2 a constant integer and our first
5437	 operand a PLUS of a virtual register and something else.  In that
5438	 case, we want to emit the sum of the virtual register and the
5439	 constant first and then add the other value.  This allows virtual
5440	 register instantiation to simply modify the constant rather than
5441	 creating another one around this addition.  */
5442      if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5443	  && GET_CODE (XEXP (value, 0)) == PLUS
5444	  && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5445	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5446	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5447	{
5448	  rtx temp = expand_binop (GET_MODE (value), binoptab,
5449				   XEXP (XEXP (value, 0), 0), op2,
5450				   subtarget, 0, OPTAB_LIB_WIDEN);
5451	  return expand_binop (GET_MODE (value), binoptab, temp,
5452			       force_operand (XEXP (XEXP (value, 0), 1), 0),
5453			       target, 0, OPTAB_LIB_WIDEN);
5454	}
5455
5456      tmp = force_operand (XEXP (value, 0), subtarget);
5457      return expand_binop (GET_MODE (value), binoptab, tmp,
5458			   force_operand (op2, NULL_RTX),
5459			   target, 0, OPTAB_LIB_WIDEN);
5460      /* We give UNSIGNEDP = 0 to expand_binop
5461	 because the only operations we are expanding here are signed ones.  */
5462    }
5463
5464#ifdef INSN_SCHEDULING
5465  /* On machines that have insn scheduling, we want all memory reference to be
5466     explicit, so we need to deal with such paradoxical SUBREGs.  */
5467  if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5468      && (GET_MODE_SIZE (GET_MODE (value))
5469	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5470    value
5471      = simplify_gen_subreg (GET_MODE (value),
5472			     force_reg (GET_MODE (SUBREG_REG (value)),
5473					force_operand (SUBREG_REG (value),
5474						       NULL_RTX)),
5475			     GET_MODE (SUBREG_REG (value)),
5476			     SUBREG_BYTE (value));
5477#endif
5478
5479  return value;
5480}
5481
5482/* Subroutine of expand_expr: return nonzero iff there is no way that
5483   EXP can reference X, which is being modified.  TOP_P is nonzero if this
5484   call is going to be used to determine whether we need a temporary
5485   for EXP, as opposed to a recursive call to this function.
5486
5487   It is always safe for this routine to return zero since it merely
5488   searches for optimization opportunities.  */
5489
5490int
5491safe_from_p (x, exp, top_p)
5492     rtx x;
5493     tree exp;
5494     int top_p;
5495{
5496  rtx exp_rtl = 0;
5497  int i, nops;
5498  static tree save_expr_list;
5499
5500  if (x == 0
5501      /* If EXP has varying size, we MUST use a target since we currently
5502	 have no way of allocating temporaries of variable size
5503	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5504	 So we assume here that something at a higher level has prevented a
5505	 clash.  This is somewhat bogus, but the best we can do.  Only
5506	 do this when X is BLKmode and when we are at the top level.  */
5507      || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5508	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5509	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5510	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5511	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5512	      != INTEGER_CST)
5513	  && GET_MODE (x) == BLKmode)
5514      /* If X is in the outgoing argument area, it is always safe.  */
5515      || (GET_CODE (x) == MEM
5516	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
5517	      || (GET_CODE (XEXP (x, 0)) == PLUS
5518		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5519    return 1;
5520
5521  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5522     find the underlying pseudo.  */
5523  if (GET_CODE (x) == SUBREG)
5524    {
5525      x = SUBREG_REG (x);
5526      if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5527	return 0;
5528    }
5529
5530  /* A SAVE_EXPR might appear many times in the expression passed to the
5531     top-level safe_from_p call, and if it has a complex subexpression,
5532     examining it multiple times could result in a combinatorial explosion.
5533     E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5534     with optimization took about 28 minutes to compile -- even though it was
5535     only a few lines long.  So we mark each SAVE_EXPR we see with TREE_PRIVATE
5536     and turn that off when we are done.  We keep a list of the SAVE_EXPRs
5537     we have processed.  Note that the only test of top_p was above.  */
5538
5539  if (top_p)
5540    {
5541      int rtn;
5542      tree t;
5543
5544      save_expr_list = 0;
5545
5546      rtn = safe_from_p (x, exp, 0);
5547
5548      for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5549	TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5550
5551      return rtn;
5552    }
5553
5554  /* Now look at our tree code and possibly recurse.  */
5555  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5556    {
5557    case 'd':
5558      exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5559      break;
5560
5561    case 'c':
5562      return 1;
5563
5564    case 'x':
5565      if (TREE_CODE (exp) == TREE_LIST)
5566	return ((TREE_VALUE (exp) == 0
5567		 || safe_from_p (x, TREE_VALUE (exp), 0))
5568		&& (TREE_CHAIN (exp) == 0
5569		    || safe_from_p (x, TREE_CHAIN (exp), 0)));
5570      else if (TREE_CODE (exp) == ERROR_MARK)
5571	return 1;	/* An already-visited SAVE_EXPR? */
5572      else
5573	return 0;
5574
5575    case '1':
5576      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5577
5578    case '2':
5579    case '<':
5580      return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5581	      && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5582
5583    case 'e':
5584    case 'r':
5585      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
5586	 the expression.  If it is set, we conflict iff we are that rtx or
5587	 both are in memory.  Otherwise, we check all operands of the
5588	 expression recursively.  */
5589
5590      switch (TREE_CODE (exp))
5591	{
5592	case ADDR_EXPR:
5593	  /* If the operand is static or we are static, we can't conflict.
5594	     Likewise if we don't conflict with the operand at all.  */
5595	  if (staticp (TREE_OPERAND (exp, 0))
5596	      || TREE_STATIC (exp)
5597	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5598	    return 1;
5599
5600	  /* Otherwise, the only way this can conflict is if we are taking
5601	     the address of a DECL a that address if part of X, which is
5602	     very rare.  */
5603	  exp = TREE_OPERAND (exp, 0);
5604	  if (DECL_P (exp))
5605	    {
5606	      if (!DECL_RTL_SET_P (exp)
5607		  || GET_CODE (DECL_RTL (exp)) != MEM)
5608		return 0;
5609	      else
5610		exp_rtl = XEXP (DECL_RTL (exp), 0);
5611	    }
5612	  break;
5613
5614	case INDIRECT_REF:
5615	  if (GET_CODE (x) == MEM
5616	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5617					get_alias_set (exp)))
5618	    return 0;
5619	  break;
5620
5621	case CALL_EXPR:
5622	  /* Assume that the call will clobber all hard registers and
5623	     all of memory.  */
5624	  if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5625	      || GET_CODE (x) == MEM)
5626	    return 0;
5627	  break;
5628
5629	case RTL_EXPR:
5630	  /* If a sequence exists, we would have to scan every instruction
5631	     in the sequence to see if it was safe.  This is probably not
5632	     worthwhile.  */
5633	  if (RTL_EXPR_SEQUENCE (exp))
5634	    return 0;
5635
5636	  exp_rtl = RTL_EXPR_RTL (exp);
5637	  break;
5638
5639	case WITH_CLEANUP_EXPR:
5640	  exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5641	  break;
5642
5643	case CLEANUP_POINT_EXPR:
5644	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5645
5646	case SAVE_EXPR:
5647	  exp_rtl = SAVE_EXPR_RTL (exp);
5648	  if (exp_rtl)
5649	    break;
5650
5651	  /* If we've already scanned this, don't do it again.  Otherwise,
5652	     show we've scanned it and record for clearing the flag if we're
5653	     going on.  */
5654	  if (TREE_PRIVATE (exp))
5655	    return 1;
5656
5657	  TREE_PRIVATE (exp) = 1;
5658	  if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5659	    {
5660	      TREE_PRIVATE (exp) = 0;
5661	      return 0;
5662	    }
5663
5664	  save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5665	  return 1;
5666
5667	case BIND_EXPR:
5668	  /* The only operand we look at is operand 1.  The rest aren't
5669	     part of the expression.  */
5670	  return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5671
5672	case METHOD_CALL_EXPR:
5673	  /* This takes an rtx argument, but shouldn't appear here.  */
5674	  abort ();
5675
5676	default:
5677	  break;
5678	}
5679
5680      /* If we have an rtx, we do not need to scan our operands.  */
5681      if (exp_rtl)
5682	break;
5683
5684      nops = first_rtl_op (TREE_CODE (exp));
5685      for (i = 0; i < nops; i++)
5686	if (TREE_OPERAND (exp, i) != 0
5687	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5688	  return 0;
5689
5690      /* If this is a language-specific tree code, it may require
5691	 special handling.  */
5692      if ((unsigned int) TREE_CODE (exp)
5693	  >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5694	  && !(*lang_hooks.safe_from_p) (x, exp))
5695	return 0;
5696    }
5697
5698  /* If we have an rtl, find any enclosed object.  Then see if we conflict
5699     with it.  */
5700  if (exp_rtl)
5701    {
5702      if (GET_CODE (exp_rtl) == SUBREG)
5703	{
5704	  exp_rtl = SUBREG_REG (exp_rtl);
5705	  if (GET_CODE (exp_rtl) == REG
5706	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5707	    return 0;
5708	}
5709
5710      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
5711	 are memory and they conflict.  */
5712      return ! (rtx_equal_p (x, exp_rtl)
5713		|| (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5714		    && true_dependence (exp_rtl, GET_MODE (x), x,
5715					rtx_addr_varies_p)));
5716    }
5717
5718  /* If we reach here, it is safe.  */
5719  return 1;
5720}
5721
5722/* Subroutine of expand_expr: return rtx if EXP is a
5723   variable or parameter; else return 0.  */
5724
5725static rtx
5726var_rtx (exp)
5727     tree exp;
5728{
5729  STRIP_NOPS (exp);
5730  switch (TREE_CODE (exp))
5731    {
5732    case PARM_DECL:
5733    case VAR_DECL:
5734      return DECL_RTL (exp);
5735    default:
5736      return 0;
5737    }
5738}
5739
5740#ifdef MAX_INTEGER_COMPUTATION_MODE
5741
5742void
5743check_max_integer_computation_mode (exp)
5744     tree exp;
5745{
5746  enum tree_code code;
5747  enum machine_mode mode;
5748
5749  /* Strip any NOPs that don't change the mode.  */
5750  STRIP_NOPS (exp);
5751  code = TREE_CODE (exp);
5752
5753  /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE.  */
5754  if (code == NOP_EXPR
5755      && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5756    return;
5757
5758  /* First check the type of the overall operation.   We need only look at
5759     unary, binary and relational operations.  */
5760  if (TREE_CODE_CLASS (code) == '1'
5761      || TREE_CODE_CLASS (code) == '2'
5762      || TREE_CODE_CLASS (code) == '<')
5763    {
5764      mode = TYPE_MODE (TREE_TYPE (exp));
5765      if (GET_MODE_CLASS (mode) == MODE_INT
5766	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5767	internal_error ("unsupported wide integer operation");
5768    }
5769
5770  /* Check operand of a unary op.  */
5771  if (TREE_CODE_CLASS (code) == '1')
5772    {
5773      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5774      if (GET_MODE_CLASS (mode) == MODE_INT
5775	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5776	internal_error ("unsupported wide integer operation");
5777    }
5778
5779  /* Check operands of a binary/comparison op.  */
5780  if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5781    {
5782      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5783      if (GET_MODE_CLASS (mode) == MODE_INT
5784	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5785	internal_error ("unsupported wide integer operation");
5786
5787      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5788      if (GET_MODE_CLASS (mode) == MODE_INT
5789	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5790	internal_error ("unsupported wide integer operation");
5791    }
5792}
5793#endif
5794
5795/* Return the highest power of two that EXP is known to be a multiple of.
5796   This is used in updating alignment of MEMs in array references.  */
5797
5798static HOST_WIDE_INT
5799highest_pow2_factor (exp)
5800     tree exp;
5801{
5802  HOST_WIDE_INT c0, c1;
5803
5804  switch (TREE_CODE (exp))
5805    {
5806    case INTEGER_CST:
5807      /* If the integer is expressable in a HOST_WIDE_INT, we can find the
5808	 lowest bit that's a one.  If the result is zero, return
5809	 BIGGEST_ALIGNMENT.  We need to handle this case since we can find it
5810	 in a COND_EXPR, a MIN_EXPR, or a MAX_EXPR.  If the constant overlows,
5811	 we have an erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5812	 later ICE.  */
5813      if (TREE_CONSTANT_OVERFLOW (exp)
5814	  || integer_zerop (exp))
5815	return BIGGEST_ALIGNMENT;
5816      else if (host_integerp (exp, 0))
5817	{
5818	  c0 = tree_low_cst (exp, 0);
5819	  c0 = c0 < 0 ? - c0 : c0;
5820	  return c0 & -c0;
5821	}
5822      break;
5823
5824    case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
5825      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5826      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5827      return MIN (c0, c1);
5828
5829    case MULT_EXPR:
5830      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5831      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5832      return c0 * c1;
5833
5834    case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
5835    case CEIL_DIV_EXPR:
5836      if (integer_pow2p (TREE_OPERAND (exp, 1))
5837	  && host_integerp (TREE_OPERAND (exp, 1), 1))
5838	{
5839	  c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5840	  c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5841	  return MAX (1, c0 / c1);
5842	}
5843      break;
5844
5845    case NON_LVALUE_EXPR:  case NOP_EXPR:  case CONVERT_EXPR:
5846    case SAVE_EXPR: case WITH_RECORD_EXPR:
5847      return highest_pow2_factor (TREE_OPERAND (exp, 0));
5848
5849    case COMPOUND_EXPR:
5850      return highest_pow2_factor (TREE_OPERAND (exp, 1));
5851
5852    case COND_EXPR:
5853      c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5854      c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5855      return MIN (c0, c1);
5856
5857    default:
5858      break;
5859    }
5860
5861  return 1;
5862}
5863
5864/* Return an object on the placeholder list that matches EXP, a
5865   PLACEHOLDER_EXPR.  An object "matches" if it is of the type of the
5866   PLACEHOLDER_EXPR or a pointer type to it.  For further information, see
5867   tree.def.  If no such object is found, return 0.  If PLIST is nonzero, it
5868   is a location which initially points to a starting location in the
5869   placeholder list (zero means start of the list) and where a pointer into
5870   the placeholder list at which the object is found is placed.  */
5871
5872tree
5873find_placeholder (exp, plist)
5874     tree exp;
5875     tree *plist;
5876{
5877  tree type = TREE_TYPE (exp);
5878  tree placeholder_expr;
5879
5880  for (placeholder_expr
5881       = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5882       placeholder_expr != 0;
5883       placeholder_expr = TREE_CHAIN (placeholder_expr))
5884    {
5885      tree need_type = TYPE_MAIN_VARIANT (type);
5886      tree elt;
5887
5888      /* Find the outermost reference that is of the type we want.  If none,
5889	 see if any object has a type that is a pointer to the type we
5890	 want.  */
5891      for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5892	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5893		   || TREE_CODE (elt) == COND_EXPR)
5894		  ? TREE_OPERAND (elt, 1)
5895		  : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5896		     || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5897		     || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5898		     || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5899		  ? TREE_OPERAND (elt, 0) : 0))
5900	if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5901	  {
5902	    if (plist)
5903	      *plist = placeholder_expr;
5904	    return elt;
5905	  }
5906
5907      for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5908	   elt
5909	   = ((TREE_CODE (elt) == COMPOUND_EXPR
5910	       || TREE_CODE (elt) == COND_EXPR)
5911	      ? TREE_OPERAND (elt, 1)
5912	      : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5913		 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5914		 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5915		 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5916	      ? TREE_OPERAND (elt, 0) : 0))
5917	if (POINTER_TYPE_P (TREE_TYPE (elt))
5918	    && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5919		== need_type))
5920	  {
5921	    if (plist)
5922	      *plist = placeholder_expr;
5923	    return build1 (INDIRECT_REF, need_type, elt);
5924	  }
5925    }
5926
5927  return 0;
5928}
5929
5930/* expand_expr: generate code for computing expression EXP.
5931   An rtx for the computed value is returned.  The value is never null.
5932   In the case of a void EXP, const0_rtx is returned.
5933
5934   The value may be stored in TARGET if TARGET is nonzero.
5935   TARGET is just a suggestion; callers must assume that
5936   the rtx returned may not be the same as TARGET.
5937
5938   If TARGET is CONST0_RTX, it means that the value will be ignored.
5939
5940   If TMODE is not VOIDmode, it suggests generating the
5941   result in mode TMODE.  But this is done only when convenient.
5942   Otherwise, TMODE is ignored and the value generated in its natural mode.
5943   TMODE is just a suggestion; callers must assume that
5944   the rtx returned may not have mode TMODE.
5945
5946   Note that TARGET may have neither TMODE nor MODE.  In that case, it
5947   probably will not be used.
5948
5949   If MODIFIER is EXPAND_SUM then when EXP is an addition
5950   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5951   or a nest of (PLUS ...) and (MINUS ...) where the terms are
5952   products as above, or REG or MEM, or constant.
5953   Ordinarily in such cases we would output mul or add instructions
5954   and then return a pseudo reg containing the sum.
5955
5956   EXPAND_INITIALIZER is much like EXPAND_SUM except that
5957   it also marks a label as absolutely required (it can't be dead).
5958   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5959   This is used for outputting expressions used in initializers.
5960
5961   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5962   with a constant address even if that address is not normally legitimate.
5963   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.  */
5964
5965rtx
5966expand_expr (exp, target, tmode, modifier)
5967     tree exp;
5968     rtx target;
5969     enum machine_mode tmode;
5970     enum expand_modifier modifier;
5971{
5972  rtx op0, op1, temp;
5973  tree type = TREE_TYPE (exp);
5974  int unsignedp = TREE_UNSIGNED (type);
5975  enum machine_mode mode;
5976  enum tree_code code = TREE_CODE (exp);
5977  optab this_optab;
5978  rtx subtarget, original_target;
5979  int ignore;
5980  tree context;
5981
5982  /* Handle ERROR_MARK before anybody tries to access its type.  */
5983  if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5984    {
5985      op0 = CONST0_RTX (tmode);
5986      if (op0 != 0)
5987	return op0;
5988      return const0_rtx;
5989    }
5990
5991  mode = TYPE_MODE (type);
5992  /* Use subtarget as the target for operand 0 of a binary operation.  */
5993  subtarget = get_subtarget (target);
5994  original_target = target;
5995  ignore = (target == const0_rtx
5996	    || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5997		 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5998		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
5999		&& TREE_CODE (type) == VOID_TYPE));
6000
6001  /* If we are going to ignore this result, we need only do something
6002     if there is a side-effect somewhere in the expression.  If there
6003     is, short-circuit the most common cases here.  Note that we must
6004     not call expand_expr with anything but const0_rtx in case this
6005     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
6006
6007  if (ignore)
6008    {
6009      if (! TREE_SIDE_EFFECTS (exp))
6010	return const0_rtx;
6011
6012      /* Ensure we reference a volatile object even if value is ignored, but
6013	 don't do this if all we are doing is taking its address.  */
6014      if (TREE_THIS_VOLATILE (exp)
6015	  && TREE_CODE (exp) != FUNCTION_DECL
6016	  && mode != VOIDmode && mode != BLKmode
6017	  && modifier != EXPAND_CONST_ADDRESS)
6018	{
6019	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6020	  if (GET_CODE (temp) == MEM)
6021	    temp = copy_to_reg (temp);
6022	  return const0_rtx;
6023	}
6024
6025      if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6026	  || code == INDIRECT_REF || code == BUFFER_REF)
6027	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6028			    modifier);
6029
6030      else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6031	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6032	{
6033	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6034	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6035	  return const0_rtx;
6036	}
6037      else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6038	       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6039	/* If the second operand has no side effects, just evaluate
6040	   the first.  */
6041	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6042			    modifier);
6043      else if (code == BIT_FIELD_REF)
6044	{
6045	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6046	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6047	  expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6048	  return const0_rtx;
6049	}
6050
6051      target = 0;
6052    }
6053
6054#ifdef MAX_INTEGER_COMPUTATION_MODE
6055  /* Only check stuff here if the mode we want is different from the mode
6056     of the expression; if it's the same, check_max_integer_computiation_mode
6057     will handle it.  Do we really need to check this stuff at all?  */
6058
6059  if (target
6060      && GET_MODE (target) != mode
6061      && TREE_CODE (exp) != INTEGER_CST
6062      && TREE_CODE (exp) != PARM_DECL
6063      && TREE_CODE (exp) != ARRAY_REF
6064      && TREE_CODE (exp) != ARRAY_RANGE_REF
6065      && TREE_CODE (exp) != COMPONENT_REF
6066      && TREE_CODE (exp) != BIT_FIELD_REF
6067      && TREE_CODE (exp) != INDIRECT_REF
6068      && TREE_CODE (exp) != CALL_EXPR
6069      && TREE_CODE (exp) != VAR_DECL
6070      && TREE_CODE (exp) != RTL_EXPR)
6071    {
6072      enum machine_mode mode = GET_MODE (target);
6073
6074      if (GET_MODE_CLASS (mode) == MODE_INT
6075	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6076	internal_error ("unsupported wide integer operation");
6077    }
6078
6079  if (tmode != mode
6080      && TREE_CODE (exp) != INTEGER_CST
6081      && TREE_CODE (exp) != PARM_DECL
6082      && TREE_CODE (exp) != ARRAY_REF
6083      && TREE_CODE (exp) != ARRAY_RANGE_REF
6084      && TREE_CODE (exp) != COMPONENT_REF
6085      && TREE_CODE (exp) != BIT_FIELD_REF
6086      && TREE_CODE (exp) != INDIRECT_REF
6087      && TREE_CODE (exp) != VAR_DECL
6088      && TREE_CODE (exp) != CALL_EXPR
6089      && TREE_CODE (exp) != RTL_EXPR
6090      && GET_MODE_CLASS (tmode) == MODE_INT
6091      && tmode > MAX_INTEGER_COMPUTATION_MODE)
6092    internal_error ("unsupported wide integer operation");
6093
6094  check_max_integer_computation_mode (exp);
6095#endif
6096
6097  /* If will do cse, generate all results into pseudo registers
6098     since 1) that allows cse to find more things
6099     and 2) otherwise cse could produce an insn the machine
6100     cannot support.  And exception is a CONSTRUCTOR into a multi-word
6101     MEM: that's much more likely to be most efficient into the MEM.  */
6102
6103  if (! cse_not_expected && mode != BLKmode && target
6104      && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6105      && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6106    target = subtarget;
6107
6108  switch (code)
6109    {
6110    case LABEL_DECL:
6111      {
6112	tree function = decl_function_context (exp);
6113	/* Handle using a label in a containing function.  */
6114	if (function != current_function_decl
6115	    && function != inline_function_decl && function != 0)
6116	  {
6117	    struct function *p = find_function_data (function);
6118	    p->expr->x_forced_labels
6119	      = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6120				   p->expr->x_forced_labels);
6121	  }
6122	else
6123	  {
6124	    if (modifier == EXPAND_INITIALIZER)
6125	      forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6126						 label_rtx (exp),
6127						 forced_labels);
6128	  }
6129
6130	temp = gen_rtx_MEM (FUNCTION_MODE,
6131			    gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6132	if (function != current_function_decl
6133	    && function != inline_function_decl && function != 0)
6134	  LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6135	return temp;
6136      }
6137
6138    case PARM_DECL:
6139      if (DECL_RTL (exp) == 0)
6140	{
6141	  error_with_decl (exp, "prior parameter's size depends on `%s'");
6142	  return CONST0_RTX (mode);
6143	}
6144
6145      /* ... fall through ...  */
6146
6147    case VAR_DECL:
6148      /* If a static var's type was incomplete when the decl was written,
6149	 but the type is complete now, lay out the decl now.  */
6150      if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6151	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6152	{
6153	  rtx value = DECL_RTL_IF_SET (exp);
6154
6155	  layout_decl (exp, 0);
6156
6157	  /* If the RTL was already set, update its mode and memory
6158	     attributes.  */
6159	  if (value != 0)
6160	    {
6161	      PUT_MODE (value, DECL_MODE (exp));
6162	      SET_DECL_RTL (exp, 0);
6163	      set_mem_attributes (value, exp, 1);
6164	      SET_DECL_RTL (exp, value);
6165	    }
6166	}
6167
6168      /* ... fall through ...  */
6169
6170    case FUNCTION_DECL:
6171    case RESULT_DECL:
6172      if (DECL_RTL (exp) == 0)
6173	abort ();
6174
6175      /* Ensure variable marked as used even if it doesn't go through
6176	 a parser.  If it hasn't be used yet, write out an external
6177	 definition.  */
6178      if (! TREE_USED (exp))
6179	{
6180	  assemble_external (exp);
6181	  TREE_USED (exp) = 1;
6182	}
6183
6184      /* Show we haven't gotten RTL for this yet.  */
6185      temp = 0;
6186
6187      /* Handle variables inherited from containing functions.  */
6188      context = decl_function_context (exp);
6189
6190      /* We treat inline_function_decl as an alias for the current function
6191	 because that is the inline function whose vars, types, etc.
6192	 are being merged into the current function.
6193	 See expand_inline_function.  */
6194
6195      if (context != 0 && context != current_function_decl
6196	  && context != inline_function_decl
6197	  /* If var is static, we don't need a static chain to access it.  */
6198	  && ! (GET_CODE (DECL_RTL (exp)) == MEM
6199		&& CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6200	{
6201	  rtx addr;
6202
6203	  /* Mark as non-local and addressable.  */
6204	  DECL_NONLOCAL (exp) = 1;
6205	  if (DECL_NO_STATIC_CHAIN (current_function_decl))
6206	    abort ();
6207	  mark_addressable (exp);
6208	  if (GET_CODE (DECL_RTL (exp)) != MEM)
6209	    abort ();
6210	  addr = XEXP (DECL_RTL (exp), 0);
6211	  if (GET_CODE (addr) == MEM)
6212	    addr
6213	      = replace_equiv_address (addr,
6214				       fix_lexical_addr (XEXP (addr, 0), exp));
6215	  else
6216	    addr = fix_lexical_addr (addr, exp);
6217
6218	  temp = replace_equiv_address (DECL_RTL (exp), addr);
6219	}
6220
6221      /* This is the case of an array whose size is to be determined
6222	 from its initializer, while the initializer is still being parsed.
6223	 See expand_decl.  */
6224
6225      else if (GET_CODE (DECL_RTL (exp)) == MEM
6226	       && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6227	temp = validize_mem (DECL_RTL (exp));
6228
6229      /* If DECL_RTL is memory, we are in the normal case and either
6230	 the address is not valid or it is not a register and -fforce-addr
6231	 is specified, get the address into a register.  */
6232
6233      else if (GET_CODE (DECL_RTL (exp)) == MEM
6234	       && modifier != EXPAND_CONST_ADDRESS
6235	       && modifier != EXPAND_SUM
6236	       && modifier != EXPAND_INITIALIZER
6237	       && (! memory_address_p (DECL_MODE (exp),
6238				       XEXP (DECL_RTL (exp), 0))
6239		   || (flag_force_addr
6240		       && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6241	temp = replace_equiv_address (DECL_RTL (exp),
6242				      copy_rtx (XEXP (DECL_RTL (exp), 0)));
6243
6244      /* If we got something, return it.  But first, set the alignment
6245	 if the address is a register.  */
6246      if (temp != 0)
6247	{
6248	  if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6249	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6250
6251	  return temp;
6252	}
6253
6254      /* If the mode of DECL_RTL does not match that of the decl, it
6255	 must be a promoted value.  We return a SUBREG of the wanted mode,
6256	 but mark it so that we know that it was already extended.  */
6257
6258      if (GET_CODE (DECL_RTL (exp)) == REG
6259	  && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6260	{
6261	  /* Get the signedness used for this variable.  Ensure we get the
6262	     same mode we got when the variable was declared.  */
6263	  if (GET_MODE (DECL_RTL (exp))
6264	      != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6265	    abort ();
6266
6267	  temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6268	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6269	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6270	  return temp;
6271	}
6272
6273      return DECL_RTL (exp);
6274
6275    case INTEGER_CST:
6276      return immed_double_const (TREE_INT_CST_LOW (exp),
6277				 TREE_INT_CST_HIGH (exp), mode);
6278
6279    case CONST_DECL:
6280      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6281
6282    case REAL_CST:
6283      /* If optimized, generate immediate CONST_DOUBLE
6284	 which will be turned into memory by reload if necessary.
6285
6286	 We used to force a register so that loop.c could see it.  But
6287	 this does not allow gen_* patterns to perform optimizations with
6288	 the constants.  It also produces two insns in cases like "x = 1.0;".
6289	 On most machines, floating-point constants are not permitted in
6290	 many insns, so we'd end up copying it to a register in any case.
6291
6292	 Now, we do the copying in expand_binop, if appropriate.  */
6293      return immed_real_const (exp);
6294
6295    case COMPLEX_CST:
6296    case STRING_CST:
6297      if (! TREE_CST_RTL (exp))
6298	output_constant_def (exp, 1);
6299
6300      /* TREE_CST_RTL probably contains a constant address.
6301	 On RISC machines where a constant address isn't valid,
6302	 make some insns to get that address into a register.  */
6303      if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6304	  && modifier != EXPAND_CONST_ADDRESS
6305	  && modifier != EXPAND_INITIALIZER
6306	  && modifier != EXPAND_SUM
6307	  && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6308	      || (flag_force_addr
6309		  && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6310	return replace_equiv_address (TREE_CST_RTL (exp),
6311				      copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6312      return TREE_CST_RTL (exp);
6313
6314    case EXPR_WITH_FILE_LOCATION:
6315      {
6316	rtx to_return;
6317	const char *saved_input_filename = input_filename;
6318	int saved_lineno = lineno;
6319	input_filename = EXPR_WFL_FILENAME (exp);
6320	lineno = EXPR_WFL_LINENO (exp);
6321	if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6322	  emit_line_note (input_filename, lineno);
6323	/* Possibly avoid switching back and forth here.  */
6324	to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6325	input_filename = saved_input_filename;
6326	lineno = saved_lineno;
6327	return to_return;
6328      }
6329
6330    case SAVE_EXPR:
6331      context = decl_function_context (exp);
6332
6333      /* If this SAVE_EXPR was at global context, assume we are an
6334	 initialization function and move it into our context.  */
6335      if (context == 0)
6336	SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6337
6338      /* We treat inline_function_decl as an alias for the current function
6339	 because that is the inline function whose vars, types, etc.
6340	 are being merged into the current function.
6341	 See expand_inline_function.  */
6342      if (context == current_function_decl || context == inline_function_decl)
6343	context = 0;
6344
6345      /* If this is non-local, handle it.  */
6346      if (context)
6347	{
6348	  /* The following call just exists to abort if the context is
6349	     not of a containing function.  */
6350	  find_function_data (context);
6351
6352	  temp = SAVE_EXPR_RTL (exp);
6353	  if (temp && GET_CODE (temp) == REG)
6354	    {
6355	      put_var_into_stack (exp);
6356	      temp = SAVE_EXPR_RTL (exp);
6357	    }
6358	  if (temp == 0 || GET_CODE (temp) != MEM)
6359	    abort ();
6360	  return
6361	    replace_equiv_address (temp,
6362				   fix_lexical_addr (XEXP (temp, 0), exp));
6363	}
6364      if (SAVE_EXPR_RTL (exp) == 0)
6365	{
6366	  if (mode == VOIDmode)
6367	    temp = const0_rtx;
6368	  else
6369	    temp = assign_temp (build_qualified_type (type,
6370						      (TYPE_QUALS (type)
6371						       | TYPE_QUAL_CONST)),
6372				3, 0, 0);
6373
6374	  SAVE_EXPR_RTL (exp) = temp;
6375	  if (!optimize && GET_CODE (temp) == REG)
6376	    save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6377						save_expr_regs);
6378
6379	  /* If the mode of TEMP does not match that of the expression, it
6380	     must be a promoted value.  We pass store_expr a SUBREG of the
6381	     wanted mode but mark it so that we know that it was already
6382	     extended.  Note that `unsignedp' was modified above in
6383	     this case.  */
6384
6385	  if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6386	    {
6387	      temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6388	      SUBREG_PROMOTED_VAR_P (temp) = 1;
6389	      SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6390	    }
6391
6392	  if (temp == const0_rtx)
6393	    expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6394	  else
6395	    store_expr (TREE_OPERAND (exp, 0), temp, 0);
6396
6397	  TREE_USED (exp) = 1;
6398	}
6399
6400      /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6401	 must be a promoted value.  We return a SUBREG of the wanted mode,
6402	 but mark it so that we know that it was already extended.  */
6403
6404      if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6405	  && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6406	{
6407	  /* Compute the signedness and make the proper SUBREG.  */
6408	  promote_mode (type, mode, &unsignedp, 0);
6409	  temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6410	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6411	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6412	  return temp;
6413	}
6414
6415      return SAVE_EXPR_RTL (exp);
6416
6417    case UNSAVE_EXPR:
6418      {
6419	rtx temp;
6420	temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6421	TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6422	return temp;
6423      }
6424
6425    case PLACEHOLDER_EXPR:
6426      {
6427	tree old_list = placeholder_list;
6428	tree placeholder_expr = 0;
6429
6430	exp = find_placeholder (exp, &placeholder_expr);
6431	if (exp == 0)
6432	  abort ();
6433
6434	placeholder_list = TREE_CHAIN (placeholder_expr);
6435	temp = expand_expr (exp, original_target, tmode, modifier);
6436	placeholder_list = old_list;
6437	return temp;
6438      }
6439
6440      /* We can't find the object or there was a missing WITH_RECORD_EXPR.  */
6441      abort ();
6442
6443    case WITH_RECORD_EXPR:
6444      /* Put the object on the placeholder list, expand our first operand,
6445	 and pop the list.  */
6446      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6447				    placeholder_list);
6448      target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6449			    modifier);
6450      placeholder_list = TREE_CHAIN (placeholder_list);
6451      return target;
6452
6453    case GOTO_EXPR:
6454      if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6455	expand_goto (TREE_OPERAND (exp, 0));
6456      else
6457	expand_computed_goto (TREE_OPERAND (exp, 0));
6458      return const0_rtx;
6459
6460    case EXIT_EXPR:
6461      expand_exit_loop_if_false (NULL,
6462				 invert_truthvalue (TREE_OPERAND (exp, 0)));
6463      return const0_rtx;
6464
6465    case LABELED_BLOCK_EXPR:
6466      if (LABELED_BLOCK_BODY (exp))
6467	expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6468      /* Should perhaps use expand_label, but this is simpler and safer.  */
6469      do_pending_stack_adjust ();
6470      emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6471      return const0_rtx;
6472
6473    case EXIT_BLOCK_EXPR:
6474      if (EXIT_BLOCK_RETURN (exp))
6475	sorry ("returned value in block_exit_expr");
6476      expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6477      return const0_rtx;
6478
6479    case LOOP_EXPR:
6480      push_temp_slots ();
6481      expand_start_loop (1);
6482      expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6483      expand_end_loop ();
6484      pop_temp_slots ();
6485
6486      return const0_rtx;
6487
6488    case BIND_EXPR:
6489      {
6490	tree vars = TREE_OPERAND (exp, 0);
6491	int vars_need_expansion = 0;
6492
6493	/* Need to open a binding contour here because
6494	   if there are any cleanups they must be contained here.  */
6495	expand_start_bindings (2);
6496
6497	/* Mark the corresponding BLOCK for output in its proper place.  */
6498	if (TREE_OPERAND (exp, 2) != 0
6499	    && ! TREE_USED (TREE_OPERAND (exp, 2)))
6500	  insert_block (TREE_OPERAND (exp, 2));
6501
6502	/* If VARS have not yet been expanded, expand them now.  */
6503	while (vars)
6504	  {
6505	    if (!DECL_RTL_SET_P (vars))
6506	      {
6507		vars_need_expansion = 1;
6508		expand_decl (vars);
6509	      }
6510	    expand_decl_init (vars);
6511	    vars = TREE_CHAIN (vars);
6512	  }
6513
6514	temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6515
6516	expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6517
6518	return temp;
6519      }
6520
6521    case RTL_EXPR:
6522      if (RTL_EXPR_SEQUENCE (exp))
6523	{
6524	  if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6525	    abort ();
6526	  emit_insns (RTL_EXPR_SEQUENCE (exp));
6527	  RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6528	}
6529      preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6530      free_temps_for_rtl_expr (exp);
6531      return RTL_EXPR_RTL (exp);
6532
6533    case CONSTRUCTOR:
6534      /* If we don't need the result, just ensure we evaluate any
6535	 subexpressions.  */
6536      if (ignore)
6537	{
6538	  tree elt;
6539
6540	  for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6541	    expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6542
6543	  return const0_rtx;
6544	}
6545
6546      /* All elts simple constants => refer to a constant in memory.  But
6547	 if this is a non-BLKmode mode, let it store a field at a time
6548	 since that should make a CONST_INT or CONST_DOUBLE when we
6549	 fold.  Likewise, if we have a target we can use, it is best to
6550	 store directly into the target unless the type is large enough
6551	 that memcpy will be used.  If we are making an initializer and
6552	 all operands are constant, put it in memory as well.  */
6553      else if ((TREE_STATIC (exp)
6554		&& ((mode == BLKmode
6555		     && ! (target != 0 && safe_from_p (target, exp, 1)))
6556		    || TREE_ADDRESSABLE (exp)
6557		    || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6558			&& (! MOVE_BY_PIECES_P
6559			    (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6560			     TYPE_ALIGN (type)))
6561			&& ! mostly_zeros_p (exp))))
6562	       || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6563	{
6564	  rtx constructor = output_constant_def (exp, 1);
6565
6566	  if (modifier != EXPAND_CONST_ADDRESS
6567	      && modifier != EXPAND_INITIALIZER
6568	      && modifier != EXPAND_SUM)
6569	    constructor = validize_mem (constructor);
6570
6571	  return constructor;
6572	}
6573      else
6574	{
6575	  /* Handle calls that pass values in multiple non-contiguous
6576	     locations.  The Irix 6 ABI has examples of this.  */
6577	  if (target == 0 || ! safe_from_p (target, exp, 1)
6578	      || GET_CODE (target) == PARALLEL)
6579	    target
6580	      = assign_temp (build_qualified_type (type,
6581						   (TYPE_QUALS (type)
6582						    | (TREE_READONLY (exp)
6583						       * TYPE_QUAL_CONST))),
6584			     0, TREE_ADDRESSABLE (exp), 1);
6585
6586	  store_constructor (exp, target, 0,
6587			     int_size_in_bytes (TREE_TYPE (exp)));
6588	  return target;
6589	}
6590
6591    case INDIRECT_REF:
6592      {
6593	tree exp1 = TREE_OPERAND (exp, 0);
6594	tree index;
6595	tree string = string_constant (exp1, &index);
6596
6597	/* Try to optimize reads from const strings.  */
6598 	if (string
6599 	    && TREE_CODE (string) == STRING_CST
6600 	    && TREE_CODE (index) == INTEGER_CST
6601	    && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6602 	    && GET_MODE_CLASS (mode) == MODE_INT
6603 	    && GET_MODE_SIZE (mode) == 1
6604	    && modifier != EXPAND_WRITE)
6605 	  return
6606	    GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6607
6608	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6609	op0 = memory_address (mode, op0);
6610	temp = gen_rtx_MEM (mode, op0);
6611	set_mem_attributes (temp, exp, 0);
6612
6613	/* If we are writing to this object and its type is a record with
6614	   readonly fields, we must mark it as readonly so it will
6615	   conflict with readonly references to those fields.  */
6616	if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6617	  RTX_UNCHANGING_P (temp) = 1;
6618
6619	return temp;
6620      }
6621
6622    case ARRAY_REF:
6623      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6624	abort ();
6625
6626      {
6627	tree array = TREE_OPERAND (exp, 0);
6628	tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6629	tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6630	tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6631	HOST_WIDE_INT i;
6632
6633	/* Optimize the special-case of a zero lower bound.
6634
6635	   We convert the low_bound to sizetype to avoid some problems
6636	   with constant folding.  (E.g. suppose the lower bound is 1,
6637	   and its mode is QI.  Without the conversion,  (ARRAY
6638	   +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6639	   +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
6640
6641	if (! integer_zerop (low_bound))
6642	  index = size_diffop (index, convert (sizetype, low_bound));
6643
6644	/* Fold an expression like: "foo"[2].
6645	   This is not done in fold so it won't happen inside &.
6646	   Don't fold if this is for wide characters since it's too
6647	   difficult to do correctly and this is a very rare case.  */
6648
6649	if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6650	    && TREE_CODE (array) == STRING_CST
6651	    && TREE_CODE (index) == INTEGER_CST
6652	    && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6653	    && GET_MODE_CLASS (mode) == MODE_INT
6654	    && GET_MODE_SIZE (mode) == 1)
6655	  return
6656	    GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6657
6658	/* If this is a constant index into a constant array,
6659	   just get the value from the array.  Handle both the cases when
6660	   we have an explicit constructor and when our operand is a variable
6661	   that was declared const.  */
6662
6663	if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6664	    && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6665	    && TREE_CODE (index) == INTEGER_CST
6666	    && 0 > compare_tree_int (index,
6667				     list_length (CONSTRUCTOR_ELTS
6668						  (TREE_OPERAND (exp, 0)))))
6669	  {
6670	    tree elem;
6671
6672	    for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6673		 i = TREE_INT_CST_LOW (index);
6674		 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6675	      ;
6676
6677	    if (elem)
6678	      return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6679				  modifier);
6680	  }
6681
6682	else if (optimize >= 1
6683		 && modifier != EXPAND_CONST_ADDRESS
6684		 && modifier != EXPAND_INITIALIZER
6685		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6686		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6687		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6688	  {
6689	    if (TREE_CODE (index) == INTEGER_CST)
6690	      {
6691		tree init = DECL_INITIAL (array);
6692
6693		if (TREE_CODE (init) == CONSTRUCTOR)
6694		  {
6695		    tree elem;
6696
6697		    for (elem = CONSTRUCTOR_ELTS (init);
6698			 (elem
6699			  && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6700			 elem = TREE_CHAIN (elem))
6701		      ;
6702
6703		    if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6704		      return expand_expr (fold (TREE_VALUE (elem)), target,
6705					  tmode, modifier);
6706		  }
6707		else if (TREE_CODE (init) == STRING_CST
6708			 && 0 > compare_tree_int (index,
6709						  TREE_STRING_LENGTH (init)))
6710		  {
6711		    tree type = TREE_TYPE (TREE_TYPE (init));
6712		    enum machine_mode mode = TYPE_MODE (type);
6713
6714		    if (GET_MODE_CLASS (mode) == MODE_INT
6715			&& GET_MODE_SIZE (mode) == 1)
6716		      return (GEN_INT
6717			      (TREE_STRING_POINTER
6718			       (init)[TREE_INT_CST_LOW (index)]));
6719		  }
6720	      }
6721	  }
6722      }
6723      /* Fall through.  */
6724
6725    case COMPONENT_REF:
6726    case BIT_FIELD_REF:
6727    case ARRAY_RANGE_REF:
6728      /* If the operand is a CONSTRUCTOR, we can just extract the
6729	 appropriate field if it is present.  Don't do this if we have
6730	 already written the data since we want to refer to that copy
6731	 and varasm.c assumes that's what we'll do.  */
6732      if (code == COMPONENT_REF
6733	  && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6734	  && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6735	{
6736	  tree elt;
6737
6738	  for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6739	       elt = TREE_CHAIN (elt))
6740	    if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6741		/* We can normally use the value of the field in the
6742		   CONSTRUCTOR.  However, if this is a bitfield in
6743		   an integral mode that we can fit in a HOST_WIDE_INT,
6744		   we must mask only the number of bits in the bitfield,
6745		   since this is done implicitly by the constructor.  If
6746		   the bitfield does not meet either of those conditions,
6747		   we can't do this optimization.  */
6748		&& (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6749		    || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6750			 == MODE_INT)
6751			&& (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6752			    <= HOST_BITS_PER_WIDE_INT))))
6753	      {
6754		op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6755		if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6756		  {
6757		    HOST_WIDE_INT bitsize
6758		      = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6759
6760		    if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6761		      {
6762			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6763			op0 = expand_and (op0, op1, target);
6764		      }
6765		    else
6766		      {
6767			enum machine_mode imode
6768			  = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6769			tree count
6770			  = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6771					 0);
6772
6773			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6774					    target, 0);
6775			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6776					    target, 0);
6777		      }
6778		  }
6779
6780		return op0;
6781	      }
6782	}
6783
6784      {
6785	enum machine_mode mode1;
6786	HOST_WIDE_INT bitsize, bitpos;
6787	tree offset;
6788	int volatilep = 0;
6789	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6790					&mode1, &unsignedp, &volatilep);
6791	rtx orig_op0;
6792
6793	/* If we got back the original object, something is wrong.  Perhaps
6794	   we are evaluating an expression too early.  In any event, don't
6795	   infinitely recurse.  */
6796	if (tem == exp)
6797	  abort ();
6798
6799	/* If TEM's type is a union of variable size, pass TARGET to the inner
6800	   computation, since it will need a temporary and TARGET is known
6801	   to have to do.  This occurs in unchecked conversion in Ada.  */
6802
6803	orig_op0 = op0
6804	  = expand_expr (tem,
6805			 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6806			  && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6807			      != INTEGER_CST)
6808			  ? target : NULL_RTX),
6809			 VOIDmode,
6810			 (modifier == EXPAND_INITIALIZER
6811			  || modifier == EXPAND_CONST_ADDRESS)
6812			 ? modifier : EXPAND_NORMAL);
6813
6814	/* If this is a constant, put it into a register if it is a
6815	   legitimate constant and OFFSET is 0 and memory if it isn't.  */
6816	if (CONSTANT_P (op0))
6817	  {
6818	    enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6819	    if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6820		&& offset == 0)
6821	      op0 = force_reg (mode, op0);
6822	    else
6823	      op0 = validize_mem (force_const_mem (mode, op0));
6824	  }
6825
6826	if (offset != 0)
6827	  {
6828	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6829
6830	    /* If this object is in a register, put it into memory.
6831	       This case can't occur in C, but can in Ada if we have
6832	       unchecked conversion of an expression from a scalar type to
6833	       an array or record type.  */
6834	    if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6835		|| GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6836	      {
6837		/* If the operand is a SAVE_EXPR, we can deal with this by
6838		   forcing the SAVE_EXPR into memory.  */
6839		if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6840		  {
6841		    put_var_into_stack (TREE_OPERAND (exp, 0));
6842		    op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6843		  }
6844		else
6845		  {
6846		    tree nt
6847		      = build_qualified_type (TREE_TYPE (tem),
6848					      (TYPE_QUALS (TREE_TYPE (tem))
6849					       | TYPE_QUAL_CONST));
6850		    rtx memloc = assign_temp (nt, 1, 1, 1);
6851
6852		    emit_move_insn (memloc, op0);
6853		    op0 = memloc;
6854		  }
6855	      }
6856
6857	    if (GET_CODE (op0) != MEM)
6858	      abort ();
6859
6860	    if (GET_MODE (offset_rtx) != ptr_mode)
6861	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6862
6863#ifdef POINTERS_EXTEND_UNSIGNED
6864	    if (GET_MODE (offset_rtx) != Pmode)
6865	      offset_rtx = convert_memory_address (Pmode, offset_rtx);
6866#endif
6867
6868	    /* A constant address in OP0 can have VOIDmode, we must not try
6869	       to call force_reg for that case.  Avoid that case.  */
6870	    if (GET_CODE (op0) == MEM
6871		&& GET_MODE (op0) == BLKmode
6872		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
6873		&& bitsize != 0
6874		&& (bitpos % bitsize) == 0
6875		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6876		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6877	      {
6878		rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6879
6880		if (GET_CODE (XEXP (temp, 0)) == REG)
6881		  op0 = temp;
6882		else
6883		  op0 = (replace_equiv_address
6884			 (op0,
6885			  force_reg (GET_MODE (XEXP (temp, 0)),
6886				     XEXP (temp, 0))));
6887		bitpos = 0;
6888	      }
6889
6890	    op0 = offset_address (op0, offset_rtx,
6891				  highest_pow2_factor (offset));
6892	  }
6893
6894	/* Don't forget about volatility even if this is a bitfield.  */
6895	if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6896	  {
6897	    if (op0 == orig_op0)
6898	      op0 = copy_rtx (op0);
6899
6900	    MEM_VOLATILE_P (op0) = 1;
6901	  }
6902
6903	/* In cases where an aligned union has an unaligned object
6904	   as a field, we might be extracting a BLKmode value from
6905	   an integer-mode (e.g., SImode) object.  Handle this case
6906	   by doing the extract into an object as wide as the field
6907	   (which we know to be the width of a basic mode), then
6908	   storing into memory, and changing the mode to BLKmode.  */
6909	if (mode1 == VOIDmode
6910	    || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6911	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
6912		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6913		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6914		&& modifier != EXPAND_CONST_ADDRESS
6915		&& modifier != EXPAND_INITIALIZER)
6916	    /* If the field isn't aligned enough to fetch as a memref,
6917	       fetch it as a bit field.  */
6918	    || (mode1 != BLKmode
6919		&& SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
6920		&& ((TYPE_ALIGN (TREE_TYPE (tem))
6921		     < GET_MODE_ALIGNMENT (mode))
6922		    || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6923	    /* If the type and the field are a constant size and the
6924	       size of the type isn't the same size as the bitfield,
6925	       we must use bitfield operations.  */
6926	    || (bitsize >= 0
6927		&& (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6928		    == INTEGER_CST)
6929		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6930					  bitsize)))
6931	  {
6932	    enum machine_mode ext_mode = mode;
6933
6934	    if (ext_mode == BLKmode
6935		&& ! (target != 0 && GET_CODE (op0) == MEM
6936		      && GET_CODE (target) == MEM
6937		      && bitpos % BITS_PER_UNIT == 0))
6938	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6939
6940	    if (ext_mode == BLKmode)
6941	      {
6942		/* In this case, BITPOS must start at a byte boundary and
6943		   TARGET, if specified, must be a MEM.  */
6944		if (GET_CODE (op0) != MEM
6945		    || (target != 0 && GET_CODE (target) != MEM)
6946		    || bitpos % BITS_PER_UNIT != 0)
6947		  abort ();
6948
6949		op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
6950		if (target == 0)
6951		  target = assign_temp (type, 0, 1, 1);
6952
6953		emit_block_move (target, op0,
6954				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6955					  / BITS_PER_UNIT));
6956
6957		return target;
6958	      }
6959
6960	    op0 = validize_mem (op0);
6961
6962	    if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6963	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6964
6965	    op0 = extract_bit_field (op0, bitsize, bitpos,
6966				     unsignedp, target, ext_mode, ext_mode,
6967				     int_size_in_bytes (TREE_TYPE (tem)));
6968
6969	    /* If the result is a record type and BITSIZE is narrower than
6970	       the mode of OP0, an integral mode, and this is a big endian
6971	       machine, we must put the field into the high-order bits.  */
6972	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6973		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6974		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
6975	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6976				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6977					    - bitsize),
6978				  op0, 1);
6979
6980	    if (mode == BLKmode)
6981	      {
6982		rtx new = assign_temp (build_qualified_type
6983				       (type_for_mode (ext_mode, 0),
6984					TYPE_QUAL_CONST), 0, 1, 1);
6985
6986		emit_move_insn (new, op0);
6987		op0 = copy_rtx (new);
6988		PUT_MODE (op0, BLKmode);
6989		set_mem_attributes (op0, exp, 1);
6990	      }
6991
6992	    return op0;
6993	  }
6994
6995	/* If the result is BLKmode, use that to access the object
6996	   now as well.  */
6997	if (mode == BLKmode)
6998	  mode1 = BLKmode;
6999
7000	/* Get a reference to just this component.  */
7001	if (modifier == EXPAND_CONST_ADDRESS
7002	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7003	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7004	else
7005	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7006
7007	if (op0 == orig_op0)
7008	  op0 = copy_rtx (op0);
7009
7010	set_mem_attributes (op0, exp, 0);
7011	if (GET_CODE (XEXP (op0, 0)) == REG)
7012	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7013
7014	MEM_VOLATILE_P (op0) |= volatilep;
7015	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7016	    || modifier == EXPAND_CONST_ADDRESS
7017	    || modifier == EXPAND_INITIALIZER)
7018	  return op0;
7019	else if (target == 0)
7020	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7021
7022	convert_move (target, op0, unsignedp);
7023	return target;
7024      }
7025
7026    case VTABLE_REF:
7027      {
7028	rtx insn, before = get_last_insn (), vtbl_ref;
7029
7030	/* Evaluate the interior expression.  */
7031	subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7032				 tmode, modifier);
7033
7034	/* Get or create an instruction off which to hang a note.  */
7035	if (REG_P (subtarget))
7036	  {
7037	    target = subtarget;
7038	    insn = get_last_insn ();
7039	    if (insn == before)
7040	      abort ();
7041	    if (! INSN_P (insn))
7042	      insn = prev_nonnote_insn (insn);
7043	  }
7044	else
7045	  {
7046	    target = gen_reg_rtx (GET_MODE (subtarget));
7047	    insn = emit_move_insn (target, subtarget);
7048	  }
7049
7050	/* Collect the data for the note.  */
7051	vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7052	vtbl_ref = plus_constant (vtbl_ref,
7053				  tree_low_cst (TREE_OPERAND (exp, 2), 0));
7054	/* Discard the initial CONST that was added.  */
7055	vtbl_ref = XEXP (vtbl_ref, 0);
7056
7057	REG_NOTES (insn)
7058	  = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7059
7060	return target;
7061      }
7062
7063      /* Intended for a reference to a buffer of a file-object in Pascal.
7064	 But it's not certain that a special tree code will really be
7065	 necessary for these.  INDIRECT_REF might work for them.  */
7066    case BUFFER_REF:
7067      abort ();
7068
7069    case IN_EXPR:
7070      {
7071	/* Pascal set IN expression.
7072
7073	   Algorithm:
7074	       rlo       = set_low - (set_low%bits_per_word);
7075	       the_word  = set [ (index - rlo)/bits_per_word ];
7076	       bit_index = index % bits_per_word;
7077	       bitmask   = 1 << bit_index;
7078	       return !!(the_word & bitmask);  */
7079
7080	tree set = TREE_OPERAND (exp, 0);
7081	tree index = TREE_OPERAND (exp, 1);
7082	int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7083	tree set_type = TREE_TYPE (set);
7084	tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7085	tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7086	rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7087	rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7088	rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7089	rtx setval = expand_expr (set, 0, VOIDmode, 0);
7090	rtx setaddr = XEXP (setval, 0);
7091	enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7092	rtx rlow;
7093	rtx diff, quo, rem, addr, bit, result;
7094
7095	/* If domain is empty, answer is no.  Likewise if index is constant
7096	   and out of bounds.  */
7097	if (((TREE_CODE (set_high_bound) == INTEGER_CST
7098	     && TREE_CODE (set_low_bound) == INTEGER_CST
7099	     && tree_int_cst_lt (set_high_bound, set_low_bound))
7100	     || (TREE_CODE (index) == INTEGER_CST
7101		 && TREE_CODE (set_low_bound) == INTEGER_CST
7102		 && tree_int_cst_lt (index, set_low_bound))
7103	     || (TREE_CODE (set_high_bound) == INTEGER_CST
7104		 && TREE_CODE (index) == INTEGER_CST
7105		 && tree_int_cst_lt (set_high_bound, index))))
7106	  return const0_rtx;
7107
7108	if (target == 0)
7109	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7110
7111	/* If we get here, we have to generate the code for both cases
7112	   (in range and out of range).  */
7113
7114	op0 = gen_label_rtx ();
7115	op1 = gen_label_rtx ();
7116
7117	if (! (GET_CODE (index_val) == CONST_INT
7118	       && GET_CODE (lo_r) == CONST_INT))
7119	  emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7120				   GET_MODE (index_val), iunsignedp, op1);
7121
7122	if (! (GET_CODE (index_val) == CONST_INT
7123	       && GET_CODE (hi_r) == CONST_INT))
7124	  emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7125				   GET_MODE (index_val), iunsignedp, op1);
7126
7127	/* Calculate the element number of bit zero in the first word
7128	   of the set.  */
7129	if (GET_CODE (lo_r) == CONST_INT)
7130	  rlow = GEN_INT (INTVAL (lo_r)
7131			  & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7132	else
7133	  rlow = expand_binop (index_mode, and_optab, lo_r,
7134			       GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7135			       NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7136
7137	diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7138			     NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7139
7140	quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7141			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7142	rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7143			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7144
7145	addr = memory_address (byte_mode,
7146			       expand_binop (index_mode, add_optab, diff,
7147					     setaddr, NULL_RTX, iunsignedp,
7148					     OPTAB_LIB_WIDEN));
7149
7150	/* Extract the bit we want to examine.  */
7151	bit = expand_shift (RSHIFT_EXPR, byte_mode,
7152			    gen_rtx_MEM (byte_mode, addr),
7153			    make_tree (TREE_TYPE (index), rem),
7154			    NULL_RTX, 1);
7155	result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7156			       GET_MODE (target) == byte_mode ? target : 0,
7157			       1, OPTAB_LIB_WIDEN);
7158
7159	if (result != target)
7160	  convert_move (target, result, 1);
7161
7162	/* Output the code to handle the out-of-range case.  */
7163	emit_jump (op0);
7164	emit_label (op1);
7165	emit_move_insn (target, const0_rtx);
7166	emit_label (op0);
7167	return target;
7168      }
7169
7170    case WITH_CLEANUP_EXPR:
7171      if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7172	{
7173	  WITH_CLEANUP_EXPR_RTL (exp)
7174	    = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7175	  expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7176
7177	  /* That's it for this cleanup.  */
7178	  TREE_OPERAND (exp, 1) = 0;
7179	}
7180      return WITH_CLEANUP_EXPR_RTL (exp);
7181
7182    case CLEANUP_POINT_EXPR:
7183      {
7184	/* Start a new binding layer that will keep track of all cleanup
7185	   actions to be performed.  */
7186	expand_start_bindings (2);
7187
7188	target_temp_slot_level = temp_slot_level;
7189
7190	op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7191	/* If we're going to use this value, load it up now.  */
7192	if (! ignore)
7193	  op0 = force_not_mem (op0);
7194	preserve_temp_slots (op0);
7195	expand_end_bindings (NULL_TREE, 0, 0);
7196      }
7197      return op0;
7198
7199    case CALL_EXPR:
7200      /* Check for a built-in function.  */
7201      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7202	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7203	      == FUNCTION_DECL)
7204	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7205        {
7206	  if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7207	      == BUILT_IN_FRONTEND)
7208	    return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7209	  else
7210	    return expand_builtin (exp, target, subtarget, tmode, ignore);
7211	}
7212
7213      return expand_call (exp, target, ignore);
7214
7215    case NON_LVALUE_EXPR:
7216    case NOP_EXPR:
7217    case CONVERT_EXPR:
7218    case REFERENCE_EXPR:
7219      if (TREE_OPERAND (exp, 0) == error_mark_node)
7220	return const0_rtx;
7221
7222      if (TREE_CODE (type) == UNION_TYPE)
7223	{
7224	  tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7225
7226	  /* If both input and output are BLKmode, this conversion isn't doing
7227	     anything except possibly changing memory attribute.  */
7228	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7229	    {
7230	      rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7231					modifier);
7232
7233	      result = copy_rtx (result);
7234	      set_mem_attributes (result, exp, 0);
7235	      return result;
7236	    }
7237
7238	  if (target == 0)
7239	    target = assign_temp (type, 0, 1, 1);
7240
7241	  if (GET_CODE (target) == MEM)
7242	    /* Store data into beginning of memory target.  */
7243	    store_expr (TREE_OPERAND (exp, 0),
7244			adjust_address (target, TYPE_MODE (valtype), 0), 0);
7245
7246	  else if (GET_CODE (target) == REG)
7247	    /* Store this field into a union of the proper type.  */
7248	    store_field (target,
7249			 MIN ((int_size_in_bytes (TREE_TYPE
7250						  (TREE_OPERAND (exp, 0)))
7251			       * BITS_PER_UNIT),
7252			      (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7253			 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7254			 VOIDmode, 0, type, 0);
7255	  else
7256	    abort ();
7257
7258	  /* Return the entire union.  */
7259	  return target;
7260	}
7261
7262      if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7263	{
7264	  op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7265			     modifier);
7266
7267	  /* If the signedness of the conversion differs and OP0 is
7268	     a promoted SUBREG, clear that indication since we now
7269	     have to do the proper extension.  */
7270	  if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7271	      && GET_CODE (op0) == SUBREG)
7272	    SUBREG_PROMOTED_VAR_P (op0) = 0;
7273
7274	  return op0;
7275	}
7276
7277      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7278      if (GET_MODE (op0) == mode)
7279	return op0;
7280
7281      /* If OP0 is a constant, just convert it into the proper mode.  */
7282      if (CONSTANT_P (op0))
7283	return
7284	  convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7285			 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7286
7287      if (modifier == EXPAND_INITIALIZER)
7288	return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7289
7290      if (target == 0)
7291	return
7292	  convert_to_mode (mode, op0,
7293			   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7294      else
7295	convert_move (target, op0,
7296		      TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7297      return target;
7298
7299    case VIEW_CONVERT_EXPR:
7300      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7301
7302      /* If the input and output modes are both the same, we are done.
7303	 Otherwise, if neither mode is BLKmode and both are within a word, we
7304	 can use gen_lowpart.  If neither is true, make sure the operand is
7305	 in memory and convert the MEM to the new mode.  */
7306      if (TYPE_MODE (type) == GET_MODE (op0))
7307	;
7308      else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7309	       && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7310	       && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7311	op0 = gen_lowpart (TYPE_MODE (type), op0);
7312      else if (GET_CODE (op0) != MEM)
7313	{
7314	  /* If the operand is not a MEM, force it into memory.  Since we
7315	     are going to be be changing the mode of the MEM, don't call
7316	     force_const_mem for constants because we don't allow pool
7317	     constants to change mode.  */
7318	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7319
7320	  if (TREE_ADDRESSABLE (exp))
7321	    abort ();
7322
7323	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7324	    target
7325	      = assign_stack_temp_for_type
7326		(TYPE_MODE (inner_type),
7327		 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7328
7329	  emit_move_insn (target, op0);
7330	  op0 = target;
7331	}
7332
7333      /* At this point, OP0 is in the correct mode.  If the output type is such
7334	 that the operand is known to be aligned, indicate that it is.
7335	 Otherwise, we need only be concerned about alignment for non-BLKmode
7336	 results.  */
7337      if (GET_CODE (op0) == MEM)
7338	{
7339	  op0 = copy_rtx (op0);
7340
7341	  if (TYPE_ALIGN_OK (type))
7342	    set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7343	  else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7344		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7345	    {
7346	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7347	      HOST_WIDE_INT temp_size
7348		= MAX (int_size_in_bytes (inner_type),
7349		       (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7350	      rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7351						    temp_size, 0, type);
7352	      rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7353
7354	      if (TREE_ADDRESSABLE (exp))
7355		abort ();
7356
7357	      if (GET_MODE (op0) == BLKmode)
7358		emit_block_move (new_with_op0_mode, op0,
7359				 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7360	      else
7361		emit_move_insn (new_with_op0_mode, op0);
7362
7363	      op0 = new;
7364	    }
7365
7366	  op0 = adjust_address (op0, TYPE_MODE (type), 0);
7367	}
7368
7369      return op0;
7370
7371    case PLUS_EXPR:
7372      /* We come here from MINUS_EXPR when the second operand is a
7373         constant.  */
7374    plus_expr:
7375      this_optab = ! unsignedp && flag_trapv
7376                   && (GET_MODE_CLASS (mode) == MODE_INT)
7377                   ? addv_optab : add_optab;
7378
7379      /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7380	 something else, make sure we add the register to the constant and
7381	 then to the other thing.  This case can occur during strength
7382	 reduction and doing it this way will produce better code if the
7383	 frame pointer or argument pointer is eliminated.
7384
7385	 fold-const.c will ensure that the constant is always in the inner
7386	 PLUS_EXPR, so the only case we need to do anything about is if
7387	 sp, ap, or fp is our second argument, in which case we must swap
7388	 the innermost first argument and our second argument.  */
7389
7390      if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7391	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7392	  && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7393	  && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7394	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7395	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7396	{
7397	  tree t = TREE_OPERAND (exp, 1);
7398
7399	  TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7400	  TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7401	}
7402
7403      /* If the result is to be ptr_mode and we are adding an integer to
7404	 something, we might be forming a constant.  So try to use
7405	 plus_constant.  If it produces a sum and we can't accept it,
7406	 use force_operand.  This allows P = &ARR[const] to generate
7407	 efficient code on machines where a SYMBOL_REF is not a valid
7408	 address.
7409
7410	 If this is an EXPAND_SUM call, always return the sum.  */
7411      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7412          || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7413	{
7414	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7415	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7416	      && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7417	    {
7418	      rtx constant_part;
7419
7420	      op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7421				 EXPAND_SUM);
7422	      /* Use immed_double_const to ensure that the constant is
7423		 truncated according to the mode of OP1, then sign extended
7424		 to a HOST_WIDE_INT.  Using the constant directly can result
7425		 in non-canonical RTL in a 64x32 cross compile.  */
7426	      constant_part
7427		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7428				      (HOST_WIDE_INT) 0,
7429				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7430	      op1 = plus_constant (op1, INTVAL (constant_part));
7431	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7432		op1 = force_operand (op1, target);
7433	      return op1;
7434	    }
7435
7436	  else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7437		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7438		   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7439	    {
7440	      rtx constant_part;
7441
7442	      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7443				 EXPAND_SUM);
7444	      if (! CONSTANT_P (op0))
7445		{
7446		  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7447				     VOIDmode, modifier);
7448		  /* Don't go to both_summands if modifier
7449		     says it's not right to return a PLUS.  */
7450		  if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7451		    goto binop2;
7452		  goto both_summands;
7453		}
7454	      /* Use immed_double_const to ensure that the constant is
7455		 truncated according to the mode of OP1, then sign extended
7456		 to a HOST_WIDE_INT.  Using the constant directly can result
7457		 in non-canonical RTL in a 64x32 cross compile.  */
7458	      constant_part
7459		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7460				      (HOST_WIDE_INT) 0,
7461				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7462	      op0 = plus_constant (op0, INTVAL (constant_part));
7463	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7464		op0 = force_operand (op0, target);
7465	      return op0;
7466	    }
7467	}
7468
7469      /* No sense saving up arithmetic to be done
7470	 if it's all in the wrong mode to form part of an address.
7471	 And force_operand won't know whether to sign-extend or
7472	 zero-extend.  */
7473      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7474	  || mode != ptr_mode)
7475	goto binop;
7476
7477      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7478	subtarget = 0;
7479
7480      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7481      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7482
7483    both_summands:
7484      /* Make sure any term that's a sum with a constant comes last.  */
7485      if (GET_CODE (op0) == PLUS
7486	  && CONSTANT_P (XEXP (op0, 1)))
7487	{
7488	  temp = op0;
7489	  op0 = op1;
7490	  op1 = temp;
7491	}
7492      /* If adding to a sum including a constant,
7493	 associate it to put the constant outside.  */
7494      if (GET_CODE (op1) == PLUS
7495	  && CONSTANT_P (XEXP (op1, 1)))
7496	{
7497	  rtx constant_term = const0_rtx;
7498
7499	  temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7500	  if (temp != 0)
7501	    op0 = temp;
7502	  /* Ensure that MULT comes first if there is one.  */
7503	  else if (GET_CODE (op0) == MULT)
7504	    op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7505	  else
7506	    op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7507
7508	  /* Let's also eliminate constants from op0 if possible.  */
7509	  op0 = eliminate_constant_term (op0, &constant_term);
7510
7511	  /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7512	     their sum should be a constant.  Form it into OP1, since the
7513	     result we want will then be OP0 + OP1.  */
7514
7515	  temp = simplify_binary_operation (PLUS, mode, constant_term,
7516					    XEXP (op1, 1));
7517	  if (temp != 0)
7518	    op1 = temp;
7519	  else
7520	    op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7521	}
7522
7523      /* Put a constant term last and put a multiplication first.  */
7524      if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7525	temp = op1, op1 = op0, op0 = temp;
7526
7527      temp = simplify_binary_operation (PLUS, mode, op0, op1);
7528      return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7529
7530    case MINUS_EXPR:
7531      /* For initializers, we are allowed to return a MINUS of two
7532	 symbolic constants.  Here we handle all cases when both operands
7533	 are constant.  */
7534      /* Handle difference of two symbolic constants,
7535	 for the sake of an initializer.  */
7536      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7537	  && really_constant_p (TREE_OPERAND (exp, 0))
7538	  && really_constant_p (TREE_OPERAND (exp, 1)))
7539	{
7540	  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7541				 modifier);
7542	  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7543				 modifier);
7544
7545	  /* If the last operand is a CONST_INT, use plus_constant of
7546	     the negated constant.  Else make the MINUS.  */
7547	  if (GET_CODE (op1) == CONST_INT)
7548	    return plus_constant (op0, - INTVAL (op1));
7549	  else
7550	    return gen_rtx_MINUS (mode, op0, op1);
7551	}
7552      /* Convert A - const to A + (-const).  */
7553      if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7554	{
7555	  tree negated = fold (build1 (NEGATE_EXPR, type,
7556				       TREE_OPERAND (exp, 1)));
7557
7558	  if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7559	    /* If we can't negate the constant in TYPE, leave it alone and
7560	       expand_binop will negate it for us.  We used to try to do it
7561	       here in the signed version of TYPE, but that doesn't work
7562	       on POINTER_TYPEs.  */;
7563	  else
7564	    {
7565	      exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7566	      goto plus_expr;
7567	    }
7568	}
7569      this_optab = ! unsignedp && flag_trapv
7570                   && (GET_MODE_CLASS(mode) == MODE_INT)
7571                   ? subv_optab : sub_optab;
7572      goto binop;
7573
7574    case MULT_EXPR:
7575      /* If first operand is constant, swap them.
7576	 Thus the following special case checks need only
7577	 check the second operand.  */
7578      if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7579	{
7580	  tree t1 = TREE_OPERAND (exp, 0);
7581	  TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7582	  TREE_OPERAND (exp, 1) = t1;
7583	}
7584
7585      /* Attempt to return something suitable for generating an
7586	 indexed address, for machines that support that.  */
7587
7588      if (modifier == EXPAND_SUM && mode == ptr_mode
7589	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7590	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7591	{
7592	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7593			     EXPAND_SUM);
7594
7595	  /* Apply distributive law if OP0 is x+c.  */
7596	  if (GET_CODE (op0) == PLUS
7597	      && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7598	    return
7599	      gen_rtx_PLUS
7600		(mode,
7601		 gen_rtx_MULT
7602		 (mode, XEXP (op0, 0),
7603		  GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7604		 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7605			  * INTVAL (XEXP (op0, 1))));
7606
7607	  if (GET_CODE (op0) != REG)
7608	    op0 = force_operand (op0, NULL_RTX);
7609	  if (GET_CODE (op0) != REG)
7610	    op0 = copy_to_mode_reg (mode, op0);
7611
7612	  return
7613	    gen_rtx_MULT (mode, op0,
7614			  GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7615	}
7616
7617      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7618	subtarget = 0;
7619
7620      /* Check for multiplying things that have been extended
7621	 from a narrower type.  If this machine supports multiplying
7622	 in that narrower type with a result in the desired type,
7623	 do it that way, and avoid the explicit type-conversion.  */
7624      if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7625	  && TREE_CODE (type) == INTEGER_TYPE
7626	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7627	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7628	  && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7629	       && int_fits_type_p (TREE_OPERAND (exp, 1),
7630				   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7631	       /* Don't use a widening multiply if a shift will do.  */
7632	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7633		    > HOST_BITS_PER_WIDE_INT)
7634		   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7635	      ||
7636	      (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7637	       && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7638		   ==
7639		   TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7640	       /* If both operands are extended, they must either both
7641		  be zero-extended or both be sign-extended.  */
7642	       && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7643		   ==
7644		   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7645	{
7646	  enum machine_mode innermode
7647	    = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7648	  optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7649			? smul_widen_optab : umul_widen_optab);
7650	  this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7651			? umul_widen_optab : smul_widen_optab);
7652	  if (mode == GET_MODE_WIDER_MODE (innermode))
7653	    {
7654	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7655		{
7656		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7657				     NULL_RTX, VOIDmode, 0);
7658		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7659		    op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7660				       VOIDmode, 0);
7661		  else
7662		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7663				       NULL_RTX, VOIDmode, 0);
7664		  goto binop2;
7665		}
7666	      else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7667		       && innermode == word_mode)
7668		{
7669		  rtx htem;
7670		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7671				     NULL_RTX, VOIDmode, 0);
7672		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7673		    op1 = convert_modes (innermode, mode,
7674					 expand_expr (TREE_OPERAND (exp, 1),
7675						      NULL_RTX, VOIDmode, 0),
7676					 unsignedp);
7677		  else
7678		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7679				       NULL_RTX, VOIDmode, 0);
7680		  temp = expand_binop (mode, other_optab, op0, op1, target,
7681				       unsignedp, OPTAB_LIB_WIDEN);
7682		  htem = expand_mult_highpart_adjust (innermode,
7683						      gen_highpart (innermode, temp),
7684						      op0, op1,
7685						      gen_highpart (innermode, temp),
7686						      unsignedp);
7687		  emit_move_insn (gen_highpart (innermode, temp), htem);
7688		  return temp;
7689		}
7690	    }
7691	}
7692      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7693      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7694      return expand_mult (mode, op0, op1, target, unsignedp);
7695
7696    case TRUNC_DIV_EXPR:
7697    case FLOOR_DIV_EXPR:
7698    case CEIL_DIV_EXPR:
7699    case ROUND_DIV_EXPR:
7700    case EXACT_DIV_EXPR:
7701      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7702	subtarget = 0;
7703      /* Possible optimization: compute the dividend with EXPAND_SUM
7704	 then if the divisor is constant can optimize the case
7705	 where some terms of the dividend have coeffs divisible by it.  */
7706      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7707      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7708      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7709
7710    case RDIV_EXPR:
7711      /* Emit a/b as a*(1/b).  Later we may manage CSE the reciprocal saving
7712         expensive divide.  If not, combine will rebuild the original
7713         computation.  */
7714      if (flag_unsafe_math_optimizations && optimize && !optimize_size
7715	  && !real_onep (TREE_OPERAND (exp, 0)))
7716        return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7717				   build (RDIV_EXPR, type,
7718					  build_real (type, dconst1),
7719					  TREE_OPERAND (exp, 1))),
7720			    target, tmode, unsignedp);
7721      this_optab = sdiv_optab;
7722      goto binop;
7723
7724    case TRUNC_MOD_EXPR:
7725    case FLOOR_MOD_EXPR:
7726    case CEIL_MOD_EXPR:
7727    case ROUND_MOD_EXPR:
7728      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7729	subtarget = 0;
7730      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7731      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7732      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7733
7734    case FIX_ROUND_EXPR:
7735    case FIX_FLOOR_EXPR:
7736    case FIX_CEIL_EXPR:
7737      abort ();			/* Not used for C.  */
7738
7739    case FIX_TRUNC_EXPR:
7740      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7741      if (target == 0)
7742	target = gen_reg_rtx (mode);
7743      expand_fix (target, op0, unsignedp);
7744      return target;
7745
7746    case FLOAT_EXPR:
7747      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7748      if (target == 0)
7749	target = gen_reg_rtx (mode);
7750      /* expand_float can't figure out what to do if FROM has VOIDmode.
7751	 So give it the correct mode.  With -O, cse will optimize this.  */
7752      if (GET_MODE (op0) == VOIDmode)
7753	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7754				op0);
7755      expand_float (target, op0,
7756		    TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7757      return target;
7758
7759    case NEGATE_EXPR:
7760      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7761      temp = expand_unop (mode,
7762                          ! unsignedp && flag_trapv
7763                          && (GET_MODE_CLASS(mode) == MODE_INT)
7764                          ? negv_optab : neg_optab, op0, target, 0);
7765      if (temp == 0)
7766	abort ();
7767      return temp;
7768
7769    case ABS_EXPR:
7770      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7771
7772      /* Handle complex values specially.  */
7773      if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7774	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7775	return expand_complex_abs (mode, op0, target, unsignedp);
7776
7777      /* Unsigned abs is simply the operand.  Testing here means we don't
7778	 risk generating incorrect code below.  */
7779      if (TREE_UNSIGNED (type))
7780	return op0;
7781
7782      return expand_abs (mode, op0, target, unsignedp,
7783			 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7784
7785    case MAX_EXPR:
7786    case MIN_EXPR:
7787      target = original_target;
7788      if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7789	  || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7790	  || GET_MODE (target) != mode
7791	  || (GET_CODE (target) == REG
7792	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
7793	target = gen_reg_rtx (mode);
7794      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7795      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7796
7797      /* First try to do it with a special MIN or MAX instruction.
7798	 If that does not win, use a conditional jump to select the proper
7799	 value.  */
7800      this_optab = (TREE_UNSIGNED (type)
7801		    ? (code == MIN_EXPR ? umin_optab : umax_optab)
7802		    : (code == MIN_EXPR ? smin_optab : smax_optab));
7803
7804      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7805			   OPTAB_WIDEN);
7806      if (temp != 0)
7807	return temp;
7808
7809      /* At this point, a MEM target is no longer useful; we will get better
7810	 code without it.  */
7811
7812      if (GET_CODE (target) == MEM)
7813	target = gen_reg_rtx (mode);
7814
7815      if (target != op0)
7816	emit_move_insn (target, op0);
7817
7818      op0 = gen_label_rtx ();
7819
7820      /* If this mode is an integer too wide to compare properly,
7821	 compare word by word.  Rely on cse to optimize constant cases.  */
7822      if (GET_MODE_CLASS (mode) == MODE_INT
7823	  && ! can_compare_p (GE, mode, ccp_jump))
7824	{
7825	  if (code == MAX_EXPR)
7826	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7827					  target, op1, NULL_RTX, op0);
7828	  else
7829	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7830					  op1, target, NULL_RTX, op0);
7831	}
7832      else
7833	{
7834	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7835	  do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7836				   unsignedp, mode, NULL_RTX, NULL_RTX,
7837				   op0);
7838	}
7839      emit_move_insn (target, op1);
7840      emit_label (op0);
7841      return target;
7842
7843    case BIT_NOT_EXPR:
7844      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7845      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7846      if (temp == 0)
7847	abort ();
7848      return temp;
7849
7850    case FFS_EXPR:
7851      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7852      temp = expand_unop (mode, ffs_optab, op0, target, 1);
7853      if (temp == 0)
7854	abort ();
7855      return temp;
7856
7857      /* ??? Can optimize bitwise operations with one arg constant.
7858	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7859	 and (a bitwise1 b) bitwise2 b (etc)
7860	 but that is probably not worth while.  */
7861
7862      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
7863	 boolean values when we want in all cases to compute both of them.  In
7864	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7865	 as actual zero-or-1 values and then bitwise anding.  In cases where
7866	 there cannot be any side effects, better code would be made by
7867	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7868	 how to recognize those cases.  */
7869
7870    case TRUTH_AND_EXPR:
7871    case BIT_AND_EXPR:
7872      this_optab = and_optab;
7873      goto binop;
7874
7875    case TRUTH_OR_EXPR:
7876    case BIT_IOR_EXPR:
7877      this_optab = ior_optab;
7878      goto binop;
7879
7880    case TRUTH_XOR_EXPR:
7881    case BIT_XOR_EXPR:
7882      this_optab = xor_optab;
7883      goto binop;
7884
7885    case LSHIFT_EXPR:
7886    case RSHIFT_EXPR:
7887    case LROTATE_EXPR:
7888    case RROTATE_EXPR:
7889      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7890	subtarget = 0;
7891      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7892      return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7893			   unsignedp);
7894
7895      /* Could determine the answer when only additive constants differ.  Also,
7896	 the addition of one can be handled by changing the condition.  */
7897    case LT_EXPR:
7898    case LE_EXPR:
7899    case GT_EXPR:
7900    case GE_EXPR:
7901    case EQ_EXPR:
7902    case NE_EXPR:
7903    case UNORDERED_EXPR:
7904    case ORDERED_EXPR:
7905    case UNLT_EXPR:
7906    case UNLE_EXPR:
7907    case UNGT_EXPR:
7908    case UNGE_EXPR:
7909    case UNEQ_EXPR:
7910      temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7911      if (temp != 0)
7912	return temp;
7913
7914      /* For foo != 0, load foo, and if it is nonzero load 1 instead.  */
7915      if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7916	  && original_target
7917	  && GET_CODE (original_target) == REG
7918	  && (GET_MODE (original_target)
7919	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7920	{
7921	  temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7922			      VOIDmode, 0);
7923
7924	  if (temp != original_target)
7925	    temp = copy_to_reg (temp);
7926
7927	  op1 = gen_label_rtx ();
7928	  emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7929				   GET_MODE (temp), unsignedp, op1);
7930	  emit_move_insn (temp, const1_rtx);
7931	  emit_label (op1);
7932	  return temp;
7933	}
7934
7935      /* If no set-flag instruction, must generate a conditional
7936	 store into a temporary variable.  Drop through
7937	 and handle this like && and ||.  */
7938
7939    case TRUTH_ANDIF_EXPR:
7940    case TRUTH_ORIF_EXPR:
7941      if (! ignore
7942	  && (target == 0 || ! safe_from_p (target, exp, 1)
7943	      /* Make sure we don't have a hard reg (such as function's return
7944		 value) live across basic blocks, if not optimizing.  */
7945	      || (!optimize && GET_CODE (target) == REG
7946		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7947	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7948
7949      if (target)
7950	emit_clr_insn (target);
7951
7952      op1 = gen_label_rtx ();
7953      jumpifnot (exp, op1);
7954
7955      if (target)
7956	emit_0_to_1_insn (target);
7957
7958      emit_label (op1);
7959      return ignore ? const0_rtx : target;
7960
7961    case TRUTH_NOT_EXPR:
7962      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7963      /* The parser is careful to generate TRUTH_NOT_EXPR
7964	 only with operands that are always zero or one.  */
7965      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7966			   target, 1, OPTAB_LIB_WIDEN);
7967      if (temp == 0)
7968	abort ();
7969      return temp;
7970
7971    case COMPOUND_EXPR:
7972      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7973      emit_queue ();
7974      return expand_expr (TREE_OPERAND (exp, 1),
7975			  (ignore ? const0_rtx : target),
7976			  VOIDmode, 0);
7977
7978    case COND_EXPR:
7979      /* If we would have a "singleton" (see below) were it not for a
7980	 conversion in each arm, bring that conversion back out.  */
7981      if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7982	  && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7983	  && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7984	      == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7985	{
7986	  tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7987	  tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7988
7989	  if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
7990	       && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7991	      || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
7992		  && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
7993	      || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
7994		  && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7995	      || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
7996		  && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
7997	    return expand_expr (build1 (NOP_EXPR, type,
7998					build (COND_EXPR, TREE_TYPE (iftrue),
7999					       TREE_OPERAND (exp, 0),
8000					       iftrue, iffalse)),
8001				target, tmode, modifier);
8002	}
8003
8004      {
8005	/* Note that COND_EXPRs whose type is a structure or union
8006	   are required to be constructed to contain assignments of
8007	   a temporary variable, so that we can evaluate them here
8008	   for side effect only.  If type is void, we must do likewise.  */
8009
8010	/* If an arm of the branch requires a cleanup,
8011	   only that cleanup is performed.  */
8012
8013	tree singleton = 0;
8014	tree binary_op = 0, unary_op = 0;
8015
8016	/* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8017	   convert it to our mode, if necessary.  */
8018	if (integer_onep (TREE_OPERAND (exp, 1))
8019	    && integer_zerop (TREE_OPERAND (exp, 2))
8020	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8021	  {
8022	    if (ignore)
8023	      {
8024		expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8025			     modifier);
8026		return const0_rtx;
8027	      }
8028
8029	    op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8030	    if (GET_MODE (op0) == mode)
8031	      return op0;
8032
8033	    if (target == 0)
8034	      target = gen_reg_rtx (mode);
8035	    convert_move (target, op0, unsignedp);
8036	    return target;
8037	  }
8038
8039	/* Check for X ? A + B : A.  If we have this, we can copy A to the
8040	   output and conditionally add B.  Similarly for unary operations.
8041	   Don't do this if X has side-effects because those side effects
8042	   might affect A or B and the "?" operation is a sequence point in
8043	   ANSI.  (operand_equal_p tests for side effects.)  */
8044
8045	if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8046	    && operand_equal_p (TREE_OPERAND (exp, 2),
8047				TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8048	  singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8049	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8050		 && operand_equal_p (TREE_OPERAND (exp, 1),
8051				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8052	  singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8053	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8054		 && operand_equal_p (TREE_OPERAND (exp, 2),
8055				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8056	  singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8057	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8058		 && operand_equal_p (TREE_OPERAND (exp, 1),
8059				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8060	  singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8061
8062	/* If we are not to produce a result, we have no target.  Otherwise,
8063	   if a target was specified use it; it will not be used as an
8064	   intermediate target unless it is safe.  If no target, use a
8065	   temporary.  */
8066
8067	if (ignore)
8068	  temp = 0;
8069	else if (original_target
8070		 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8071		     || (singleton && GET_CODE (original_target) == REG
8072			 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8073			 && original_target == var_rtx (singleton)))
8074		 && GET_MODE (original_target) == mode
8075#ifdef HAVE_conditional_move
8076		 && (! can_conditionally_move_p (mode)
8077		     || GET_CODE (original_target) == REG
8078		     || TREE_ADDRESSABLE (type))
8079#endif
8080		 && (GET_CODE (original_target) != MEM
8081		     || TREE_ADDRESSABLE (type)))
8082	  temp = original_target;
8083	else if (TREE_ADDRESSABLE (type))
8084	  abort ();
8085	else
8086	  temp = assign_temp (type, 0, 0, 1);
8087
8088	/* If we had X ? A + C : A, with C a constant power of 2, and we can
8089	   do the test of X as a store-flag operation, do this as
8090	   A + ((X != 0) << log C).  Similarly for other simple binary
8091	   operators.  Only do for C == 1 if BRANCH_COST is low.  */
8092	if (temp && singleton && binary_op
8093	    && (TREE_CODE (binary_op) == PLUS_EXPR
8094		|| TREE_CODE (binary_op) == MINUS_EXPR
8095		|| TREE_CODE (binary_op) == BIT_IOR_EXPR
8096		|| TREE_CODE (binary_op) == BIT_XOR_EXPR)
8097	    && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8098		: integer_onep (TREE_OPERAND (binary_op, 1)))
8099	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8100	  {
8101	    rtx result;
8102	    optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8103                            ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8104                               ? addv_optab : add_optab)
8105                            : TREE_CODE (binary_op) == MINUS_EXPR
8106                              ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8107                                 ? subv_optab : sub_optab)
8108                            : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8109                            : xor_optab);
8110
8111	    /* If we had X ? A : A + 1, do this as A + (X == 0).
8112
8113	       We have to invert the truth value here and then put it
8114	       back later if do_store_flag fails.  We cannot simply copy
8115	       TREE_OPERAND (exp, 0) to another variable and modify that
8116	       because invert_truthvalue can modify the tree pointed to
8117	       by its argument.  */
8118	    if (singleton == TREE_OPERAND (exp, 1))
8119	      TREE_OPERAND (exp, 0)
8120		= invert_truthvalue (TREE_OPERAND (exp, 0));
8121
8122	    result = do_store_flag (TREE_OPERAND (exp, 0),
8123				    (safe_from_p (temp, singleton, 1)
8124				     ? temp : NULL_RTX),
8125				    mode, BRANCH_COST <= 1);
8126
8127	    if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8128	      result = expand_shift (LSHIFT_EXPR, mode, result,
8129				     build_int_2 (tree_log2
8130						  (TREE_OPERAND
8131						   (binary_op, 1)),
8132						  0),
8133				     (safe_from_p (temp, singleton, 1)
8134				      ? temp : NULL_RTX), 0);
8135
8136	    if (result)
8137	      {
8138		op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8139		return expand_binop (mode, boptab, op1, result, temp,
8140				     unsignedp, OPTAB_LIB_WIDEN);
8141	      }
8142	    else if (singleton == TREE_OPERAND (exp, 1))
8143	      TREE_OPERAND (exp, 0)
8144		= invert_truthvalue (TREE_OPERAND (exp, 0));
8145	  }
8146
8147	do_pending_stack_adjust ();
8148	NO_DEFER_POP;
8149	op0 = gen_label_rtx ();
8150
8151	if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8152	  {
8153	    if (temp != 0)
8154	      {
8155		/* If the target conflicts with the other operand of the
8156		   binary op, we can't use it.  Also, we can't use the target
8157		   if it is a hard register, because evaluating the condition
8158		   might clobber it.  */
8159		if ((binary_op
8160		     && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8161		    || (GET_CODE (temp) == REG
8162			&& REGNO (temp) < FIRST_PSEUDO_REGISTER))
8163		  temp = gen_reg_rtx (mode);
8164		store_expr (singleton, temp, 0);
8165	      }
8166	    else
8167	      expand_expr (singleton,
8168			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8169	    if (singleton == TREE_OPERAND (exp, 1))
8170	      jumpif (TREE_OPERAND (exp, 0), op0);
8171	    else
8172	      jumpifnot (TREE_OPERAND (exp, 0), op0);
8173
8174	    start_cleanup_deferral ();
8175	    if (binary_op && temp == 0)
8176	      /* Just touch the other operand.  */
8177	      expand_expr (TREE_OPERAND (binary_op, 1),
8178			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8179	    else if (binary_op)
8180	      store_expr (build (TREE_CODE (binary_op), type,
8181				 make_tree (type, temp),
8182				 TREE_OPERAND (binary_op, 1)),
8183			  temp, 0);
8184	    else
8185	      store_expr (build1 (TREE_CODE (unary_op), type,
8186				  make_tree (type, temp)),
8187			  temp, 0);
8188	    op1 = op0;
8189	  }
8190	/* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8191	   comparison operator.  If we have one of these cases, set the
8192	   output to A, branch on A (cse will merge these two references),
8193	   then set the output to FOO.  */
8194	else if (temp
8195		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8196		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8197		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8198				     TREE_OPERAND (exp, 1), 0)
8199		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8200		     || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8201		 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8202	  {
8203	    if (GET_CODE (temp) == REG
8204		&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
8205	      temp = gen_reg_rtx (mode);
8206	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
8207	    jumpif (TREE_OPERAND (exp, 0), op0);
8208
8209	    start_cleanup_deferral ();
8210	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
8211	    op1 = op0;
8212	  }
8213	else if (temp
8214		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8215		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8216		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8217				     TREE_OPERAND (exp, 2), 0)
8218		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8219		     || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8220		 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8221	  {
8222	    if (GET_CODE (temp) == REG
8223		&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
8224	      temp = gen_reg_rtx (mode);
8225	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
8226	    jumpifnot (TREE_OPERAND (exp, 0), op0);
8227
8228	    start_cleanup_deferral ();
8229	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
8230	    op1 = op0;
8231	  }
8232	else
8233	  {
8234	    op1 = gen_label_rtx ();
8235	    jumpifnot (TREE_OPERAND (exp, 0), op0);
8236
8237	    start_cleanup_deferral ();
8238
8239	    /* One branch of the cond can be void, if it never returns. For
8240	       example A ? throw : E  */
8241	    if (temp != 0
8242		&& TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8243	      store_expr (TREE_OPERAND (exp, 1), temp, 0);
8244	    else
8245	      expand_expr (TREE_OPERAND (exp, 1),
8246			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8247	    end_cleanup_deferral ();
8248	    emit_queue ();
8249	    emit_jump_insn (gen_jump (op1));
8250	    emit_barrier ();
8251	    emit_label (op0);
8252	    start_cleanup_deferral ();
8253	    if (temp != 0
8254		&& TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8255	      store_expr (TREE_OPERAND (exp, 2), temp, 0);
8256	    else
8257	      expand_expr (TREE_OPERAND (exp, 2),
8258			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8259	  }
8260
8261	end_cleanup_deferral ();
8262
8263	emit_queue ();
8264	emit_label (op1);
8265	OK_DEFER_POP;
8266
8267	return temp;
8268      }
8269
8270    case TARGET_EXPR:
8271      {
8272	/* Something needs to be initialized, but we didn't know
8273	   where that thing was when building the tree.  For example,
8274	   it could be the return value of a function, or a parameter
8275	   to a function which lays down in the stack, or a temporary
8276	   variable which must be passed by reference.
8277
8278	   We guarantee that the expression will either be constructed
8279	   or copied into our original target.  */
8280
8281	tree slot = TREE_OPERAND (exp, 0);
8282	tree cleanups = NULL_TREE;
8283	tree exp1;
8284
8285	if (TREE_CODE (slot) != VAR_DECL)
8286	  abort ();
8287
8288	if (! ignore)
8289	  target = original_target;
8290
8291	/* Set this here so that if we get a target that refers to a
8292	   register variable that's already been used, put_reg_into_stack
8293	   knows that it should fix up those uses.  */
8294	TREE_USED (slot) = 1;
8295
8296	if (target == 0)
8297	  {
8298	    if (DECL_RTL_SET_P (slot))
8299	      {
8300		target = DECL_RTL (slot);
8301		/* If we have already expanded the slot, so don't do
8302		   it again.  (mrs)  */
8303		if (TREE_OPERAND (exp, 1) == NULL_TREE)
8304		  return target;
8305	      }
8306	    else
8307	      {
8308		target = assign_temp (type, 2, 0, 1);
8309		/* All temp slots at this level must not conflict.  */
8310		preserve_temp_slots (target);
8311		SET_DECL_RTL (slot, target);
8312		if (TREE_ADDRESSABLE (slot))
8313		  put_var_into_stack (slot);
8314
8315		/* Since SLOT is not known to the called function
8316		   to belong to its stack frame, we must build an explicit
8317		   cleanup.  This case occurs when we must build up a reference
8318		   to pass the reference as an argument.  In this case,
8319		   it is very likely that such a reference need not be
8320		   built here.  */
8321
8322		if (TREE_OPERAND (exp, 2) == 0)
8323		  TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8324		cleanups = TREE_OPERAND (exp, 2);
8325	      }
8326	  }
8327	else
8328	  {
8329	    /* This case does occur, when expanding a parameter which
8330	       needs to be constructed on the stack.  The target
8331	       is the actual stack address that we want to initialize.
8332	       The function we call will perform the cleanup in this case.  */
8333
8334	    /* If we have already assigned it space, use that space,
8335	       not target that we were passed in, as our target
8336	       parameter is only a hint.  */
8337	    if (DECL_RTL_SET_P (slot))
8338	      {
8339		target = DECL_RTL (slot);
8340		/* If we have already expanded the slot, so don't do
8341                   it again.  (mrs)  */
8342		if (TREE_OPERAND (exp, 1) == NULL_TREE)
8343		  return target;
8344	      }
8345	    else
8346	      {
8347		SET_DECL_RTL (slot, target);
8348		/* If we must have an addressable slot, then make sure that
8349		   the RTL that we just stored in slot is OK.  */
8350		if (TREE_ADDRESSABLE (slot))
8351		  put_var_into_stack (slot);
8352	      }
8353	  }
8354
8355	exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8356	/* Mark it as expanded.  */
8357	TREE_OPERAND (exp, 1) = NULL_TREE;
8358
8359	store_expr (exp1, target, 0);
8360
8361	expand_decl_cleanup (NULL_TREE, cleanups);
8362
8363	return target;
8364      }
8365
8366    case INIT_EXPR:
8367      {
8368	tree lhs = TREE_OPERAND (exp, 0);
8369	tree rhs = TREE_OPERAND (exp, 1);
8370
8371	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8372	return temp;
8373      }
8374
8375    case MODIFY_EXPR:
8376      {
8377	/* If lhs is complex, expand calls in rhs before computing it.
8378	   That's so we don't compute a pointer and save it over a
8379	   call.  If lhs is simple, compute it first so we can give it
8380	   as a target if the rhs is just a call.  This avoids an
8381	   extra temp and copy and that prevents a partial-subsumption
8382	   which makes bad code.  Actually we could treat
8383	   component_ref's of vars like vars.  */
8384
8385	tree lhs = TREE_OPERAND (exp, 0);
8386	tree rhs = TREE_OPERAND (exp, 1);
8387
8388	temp = 0;
8389
8390	/* Check for |= or &= of a bitfield of size one into another bitfield
8391	   of size 1.  In this case, (unless we need the result of the
8392	   assignment) we can do this more efficiently with a
8393	   test followed by an assignment, if necessary.
8394
8395	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
8396	   things change so we do, this code should be enhanced to
8397	   support it.  */
8398	if (ignore
8399	    && TREE_CODE (lhs) == COMPONENT_REF
8400	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
8401		|| TREE_CODE (rhs) == BIT_AND_EXPR)
8402	    && TREE_OPERAND (rhs, 0) == lhs
8403	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8404	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8405	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8406	  {
8407	    rtx label = gen_label_rtx ();
8408
8409	    do_jump (TREE_OPERAND (rhs, 1),
8410		     TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8411		     TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8412	    expand_assignment (lhs, convert (TREE_TYPE (rhs),
8413					     (TREE_CODE (rhs) == BIT_IOR_EXPR
8414					      ? integer_one_node
8415					      : integer_zero_node)),
8416			       0, 0);
8417	    do_pending_stack_adjust ();
8418	    emit_label (label);
8419	    return const0_rtx;
8420	  }
8421
8422	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8423
8424	return temp;
8425      }
8426
8427    case RETURN_EXPR:
8428      if (!TREE_OPERAND (exp, 0))
8429	expand_null_return ();
8430      else
8431	expand_return (TREE_OPERAND (exp, 0));
8432      return const0_rtx;
8433
8434    case PREINCREMENT_EXPR:
8435    case PREDECREMENT_EXPR:
8436      return expand_increment (exp, 0, ignore);
8437
8438    case POSTINCREMENT_EXPR:
8439    case POSTDECREMENT_EXPR:
8440      /* Faster to treat as pre-increment if result is not used.  */
8441      return expand_increment (exp, ! ignore, ignore);
8442
8443    case ADDR_EXPR:
8444      /* Are we taking the address of a nested function?  */
8445      if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8446	  && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8447	  && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8448	  && ! TREE_STATIC (exp))
8449	{
8450	  op0 = trampoline_address (TREE_OPERAND (exp, 0));
8451	  op0 = force_operand (op0, target);
8452	}
8453      /* If we are taking the address of something erroneous, just
8454	 return a zero.  */
8455      else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8456	return const0_rtx;
8457      /* If we are taking the address of a constant and are at the
8458	 top level, we have to use output_constant_def since we can't
8459	 call force_const_mem at top level.  */
8460      else if (cfun == 0
8461	       && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8462		   || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8463		       == 'c')))
8464	op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8465      else
8466	{
8467	  /* We make sure to pass const0_rtx down if we came in with
8468	     ignore set, to avoid doing the cleanups twice for something.  */
8469	  op0 = expand_expr (TREE_OPERAND (exp, 0),
8470			     ignore ? const0_rtx : NULL_RTX, VOIDmode,
8471			     (modifier == EXPAND_INITIALIZER
8472			      ? modifier : EXPAND_CONST_ADDRESS));
8473
8474	  /* If we are going to ignore the result, OP0 will have been set
8475	     to const0_rtx, so just return it.  Don't get confused and
8476	     think we are taking the address of the constant.  */
8477	  if (ignore)
8478	    return op0;
8479
8480	  /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8481	     clever and returns a REG when given a MEM.  */
8482	  op0 = protect_from_queue (op0, 1);
8483
8484	  /* We would like the object in memory.  If it is a constant, we can
8485	     have it be statically allocated into memory.  For a non-constant,
8486	     we need to allocate some memory and store the value into it.  */
8487
8488	  if (CONSTANT_P (op0))
8489	    op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8490				   op0);
8491	  else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8492		   || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8493		   || GET_CODE (op0) == PARALLEL)
8494	    {
8495	      /* If the operand is a SAVE_EXPR, we can deal with this by
8496		 forcing the SAVE_EXPR into memory.  */
8497	      if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8498		{
8499		  put_var_into_stack (TREE_OPERAND (exp, 0));
8500		  op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8501		}
8502	      else
8503		{
8504		  /* If this object is in a register, it can't be BLKmode.  */
8505		  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8506		  rtx memloc = assign_temp (inner_type, 1, 1, 1);
8507
8508		  if (GET_CODE (op0) == PARALLEL)
8509		    /* Handle calls that pass values in multiple
8510		       non-contiguous locations.  The Irix 6 ABI has examples
8511		       of this.  */
8512		    emit_group_store (memloc, op0,
8513				      int_size_in_bytes (inner_type));
8514		  else
8515		    emit_move_insn (memloc, op0);
8516
8517		  op0 = memloc;
8518		}
8519	    }
8520
8521	  if (GET_CODE (op0) != MEM)
8522	    abort ();
8523
8524	  mark_temp_addr_taken (op0);
8525	  if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8526	    {
8527	      op0 = XEXP (op0, 0);
8528#ifdef POINTERS_EXTEND_UNSIGNED
8529	      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8530		  && mode == ptr_mode)
8531		op0 = convert_memory_address (ptr_mode, op0);
8532#endif
8533	      return op0;
8534	    }
8535
8536	  /* If OP0 is not aligned as least as much as the type requires, we
8537	     need to make a temporary, copy OP0 to it, and take the address of
8538	     the temporary.  We want to use the alignment of the type, not of
8539	     the operand.  Note that this is incorrect for FUNCTION_TYPE, but
8540	     the test for BLKmode means that can't happen.  The test for
8541	     BLKmode is because we never make mis-aligned MEMs with
8542	     non-BLKmode.
8543
8544	     We don't need to do this at all if the machine doesn't have
8545	     strict alignment.  */
8546	  if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8547	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8548		  > MEM_ALIGN (op0))
8549	      && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8550	    {
8551	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8552	      rtx new
8553		= assign_stack_temp_for_type
8554		  (TYPE_MODE (inner_type),
8555		   MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8556		   : int_size_in_bytes (inner_type),
8557		   1, build_qualified_type (inner_type,
8558					    (TYPE_QUALS (inner_type)
8559					     | TYPE_QUAL_CONST)));
8560
8561	      if (TYPE_ALIGN_OK (inner_type))
8562		abort ();
8563
8564	      emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8565	      op0 = new;
8566	    }
8567
8568	  op0 = force_operand (XEXP (op0, 0), target);
8569	}
8570
8571      if (flag_force_addr
8572	  && GET_CODE (op0) != REG
8573	  && modifier != EXPAND_CONST_ADDRESS
8574	  && modifier != EXPAND_INITIALIZER
8575	  && modifier != EXPAND_SUM)
8576	op0 = force_reg (Pmode, op0);
8577
8578      if (GET_CODE (op0) == REG
8579	  && ! REG_USERVAR_P (op0))
8580	mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8581
8582#ifdef POINTERS_EXTEND_UNSIGNED
8583      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8584	  && mode == ptr_mode)
8585	op0 = convert_memory_address (ptr_mode, op0);
8586#endif
8587
8588      return op0;
8589
8590    case ENTRY_VALUE_EXPR:
8591      abort ();
8592
8593    /* COMPLEX type for Extended Pascal & Fortran  */
8594    case COMPLEX_EXPR:
8595      {
8596	enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8597	rtx insns;
8598
8599	/* Get the rtx code of the operands.  */
8600	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8601	op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8602
8603	if (! target)
8604	  target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8605
8606	start_sequence ();
8607
8608	/* Move the real (op0) and imaginary (op1) parts to their location.  */
8609	emit_move_insn (gen_realpart (mode, target), op0);
8610	emit_move_insn (gen_imagpart (mode, target), op1);
8611
8612	insns = get_insns ();
8613	end_sequence ();
8614
8615	/* Complex construction should appear as a single unit.  */
8616	/* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8617	   each with a separate pseudo as destination.
8618	   It's not correct for flow to treat them as a unit.  */
8619	if (GET_CODE (target) != CONCAT)
8620	  emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8621	else
8622	  emit_insns (insns);
8623
8624	return target;
8625      }
8626
8627    case REALPART_EXPR:
8628      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8629      return gen_realpart (mode, op0);
8630
8631    case IMAGPART_EXPR:
8632      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8633      return gen_imagpart (mode, op0);
8634
8635    case CONJ_EXPR:
8636      {
8637	enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8638	rtx imag_t;
8639	rtx insns;
8640
8641	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8642
8643	if (! target)
8644	  target = gen_reg_rtx (mode);
8645
8646	start_sequence ();
8647
8648	/* Store the realpart and the negated imagpart to target.  */
8649	emit_move_insn (gen_realpart (partmode, target),
8650			gen_realpart (partmode, op0));
8651
8652	imag_t = gen_imagpart (partmode, target);
8653	temp = expand_unop (partmode,
8654                            ! unsignedp && flag_trapv
8655                            && (GET_MODE_CLASS(partmode) == MODE_INT)
8656                            ? negv_optab : neg_optab,
8657			    gen_imagpart (partmode, op0), imag_t, 0);
8658	if (temp != imag_t)
8659	  emit_move_insn (imag_t, temp);
8660
8661	insns = get_insns ();
8662	end_sequence ();
8663
8664	/* Conjugate should appear as a single unit
8665	   If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8666	   each with a separate pseudo as destination.
8667	   It's not correct for flow to treat them as a unit.  */
8668	if (GET_CODE (target) != CONCAT)
8669	  emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8670	else
8671	  emit_insns (insns);
8672
8673	return target;
8674      }
8675
8676    case TRY_CATCH_EXPR:
8677      {
8678	tree handler = TREE_OPERAND (exp, 1);
8679
8680	expand_eh_region_start ();
8681
8682	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8683
8684	expand_eh_region_end_cleanup (handler);
8685
8686	return op0;
8687      }
8688
8689    case TRY_FINALLY_EXPR:
8690      {
8691	tree try_block = TREE_OPERAND (exp, 0);
8692	tree finally_block = TREE_OPERAND (exp, 1);
8693	rtx finally_label = gen_label_rtx ();
8694	rtx done_label = gen_label_rtx ();
8695	rtx return_link = gen_reg_rtx (Pmode);
8696	tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8697			      (tree) finally_label, (tree) return_link);
8698	TREE_SIDE_EFFECTS (cleanup) = 1;
8699
8700	/* Start a new binding layer that will keep track of all cleanup
8701	   actions to be performed.  */
8702	expand_start_bindings (2);
8703
8704	target_temp_slot_level = temp_slot_level;
8705
8706	expand_decl_cleanup (NULL_TREE, cleanup);
8707	op0 = expand_expr (try_block, target, tmode, modifier);
8708
8709	preserve_temp_slots (op0);
8710	expand_end_bindings (NULL_TREE, 0, 0);
8711	emit_jump (done_label);
8712	emit_label (finally_label);
8713	expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8714	emit_indirect_jump (return_link);
8715	emit_label (done_label);
8716	return op0;
8717      }
8718
8719    case GOTO_SUBROUTINE_EXPR:
8720      {
8721	rtx subr = (rtx) TREE_OPERAND (exp, 0);
8722	rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8723	rtx return_address = gen_label_rtx ();
8724	emit_move_insn (return_link,
8725			gen_rtx_LABEL_REF (Pmode, return_address));
8726	emit_jump (subr);
8727	emit_label (return_address);
8728	return const0_rtx;
8729      }
8730
8731    case VA_ARG_EXPR:
8732      return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8733
8734    case EXC_PTR_EXPR:
8735      return get_exception_pointer (cfun);
8736
8737    case FDESC_EXPR:
8738      /* Function descriptors are not valid except for as
8739	 initialization constants, and should not be expanded.  */
8740      abort ();
8741
8742    default:
8743      return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8744    }
8745
8746  /* Here to do an ordinary binary operator, generating an instruction
8747     from the optab already placed in `this_optab'.  */
8748 binop:
8749  if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8750    subtarget = 0;
8751  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8752  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8753 binop2:
8754  temp = expand_binop (mode, this_optab, op0, op1, target,
8755		       unsignedp, OPTAB_LIB_WIDEN);
8756  if (temp == 0)
8757    abort ();
8758  return temp;
8759}
8760
8761/* Return the tree node if a ARG corresponds to a string constant or zero
8762   if it doesn't.  If we return non-zero, set *PTR_OFFSET to the offset
8763   in bytes within the string that ARG is accessing.  The type of the
8764   offset will be `sizetype'.  */
8765
8766tree
8767string_constant (arg, ptr_offset)
8768     tree arg;
8769     tree *ptr_offset;
8770{
8771  STRIP_NOPS (arg);
8772
8773  if (TREE_CODE (arg) == ADDR_EXPR
8774      && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8775    {
8776      *ptr_offset = size_zero_node;
8777      return TREE_OPERAND (arg, 0);
8778    }
8779  else if (TREE_CODE (arg) == PLUS_EXPR)
8780    {
8781      tree arg0 = TREE_OPERAND (arg, 0);
8782      tree arg1 = TREE_OPERAND (arg, 1);
8783
8784      STRIP_NOPS (arg0);
8785      STRIP_NOPS (arg1);
8786
8787      if (TREE_CODE (arg0) == ADDR_EXPR
8788	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8789	{
8790	  *ptr_offset = convert (sizetype, arg1);
8791	  return TREE_OPERAND (arg0, 0);
8792	}
8793      else if (TREE_CODE (arg1) == ADDR_EXPR
8794	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8795	{
8796	  *ptr_offset = convert (sizetype, arg0);
8797	  return TREE_OPERAND (arg1, 0);
8798	}
8799    }
8800
8801  return 0;
8802}
8803
8804/* Expand code for a post- or pre- increment or decrement
8805   and return the RTX for the result.
8806   POST is 1 for postinc/decrements and 0 for preinc/decrements.  */
8807
8808static rtx
8809expand_increment (exp, post, ignore)
8810     tree exp;
8811     int post, ignore;
8812{
8813  rtx op0, op1;
8814  rtx temp, value;
8815  tree incremented = TREE_OPERAND (exp, 0);
8816  optab this_optab = add_optab;
8817  int icode;
8818  enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8819  int op0_is_copy = 0;
8820  int single_insn = 0;
8821  /* 1 means we can't store into OP0 directly,
8822     because it is a subreg narrower than a word,
8823     and we don't dare clobber the rest of the word.  */
8824  int bad_subreg = 0;
8825
8826  /* Stabilize any component ref that might need to be
8827     evaluated more than once below.  */
8828  if (!post
8829      || TREE_CODE (incremented) == BIT_FIELD_REF
8830      || (TREE_CODE (incremented) == COMPONENT_REF
8831	  && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8832	      || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8833    incremented = stabilize_reference (incremented);
8834  /* Nested *INCREMENT_EXPRs can happen in C++.  We must force innermost
8835     ones into save exprs so that they don't accidentally get evaluated
8836     more than once by the code below.  */
8837  if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8838      || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8839    incremented = save_expr (incremented);
8840
8841  /* Compute the operands as RTX.
8842     Note whether OP0 is the actual lvalue or a copy of it:
8843     I believe it is a copy iff it is a register or subreg
8844     and insns were generated in computing it.  */
8845
8846  temp = get_last_insn ();
8847  op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8848
8849  /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8850     in place but instead must do sign- or zero-extension during assignment,
8851     so we copy it into a new register and let the code below use it as
8852     a copy.
8853
8854     Note that we can safely modify this SUBREG since it is know not to be
8855     shared (it was made by the expand_expr call above).  */
8856
8857  if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8858    {
8859      if (post)
8860	SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8861      else
8862	bad_subreg = 1;
8863    }
8864  else if (GET_CODE (op0) == SUBREG
8865	   && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8866    {
8867      /* We cannot increment this SUBREG in place.  If we are
8868	 post-incrementing, get a copy of the old value.  Otherwise,
8869	 just mark that we cannot increment in place.  */
8870      if (post)
8871	op0 = copy_to_reg (op0);
8872      else
8873	bad_subreg = 1;
8874    }
8875
8876  op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8877		 && temp != get_last_insn ());
8878  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8879
8880  /* Decide whether incrementing or decrementing.  */
8881  if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8882      || TREE_CODE (exp) == PREDECREMENT_EXPR)
8883    this_optab = sub_optab;
8884
8885  /* Convert decrement by a constant into a negative increment.  */
8886  if (this_optab == sub_optab
8887      && GET_CODE (op1) == CONST_INT)
8888    {
8889      op1 = GEN_INT (-INTVAL (op1));
8890      this_optab = add_optab;
8891    }
8892
8893  if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
8894    this_optab = this_optab == add_optab ? addv_optab : subv_optab;
8895
8896  /* For a preincrement, see if we can do this with a single instruction.  */
8897  if (!post)
8898    {
8899      icode = (int) this_optab->handlers[(int) mode].insn_code;
8900      if (icode != (int) CODE_FOR_nothing
8901	  /* Make sure that OP0 is valid for operands 0 and 1
8902	     of the insn we want to queue.  */
8903	  && (*insn_data[icode].operand[0].predicate) (op0, mode)
8904	  && (*insn_data[icode].operand[1].predicate) (op0, mode)
8905	  && (*insn_data[icode].operand[2].predicate) (op1, mode))
8906	single_insn = 1;
8907    }
8908
8909  /* If OP0 is not the actual lvalue, but rather a copy in a register,
8910     then we cannot just increment OP0.  We must therefore contrive to
8911     increment the original value.  Then, for postincrement, we can return
8912     OP0 since it is a copy of the old value.  For preincrement, expand here
8913     unless we can do it with a single insn.
8914
8915     Likewise if storing directly into OP0 would clobber high bits
8916     we need to preserve (bad_subreg).  */
8917  if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8918    {
8919      /* This is the easiest way to increment the value wherever it is.
8920	 Problems with multiple evaluation of INCREMENTED are prevented
8921	 because either (1) it is a component_ref or preincrement,
8922	 in which case it was stabilized above, or (2) it is an array_ref
8923	 with constant index in an array in a register, which is
8924	 safe to reevaluate.  */
8925      tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8926			     || TREE_CODE (exp) == PREDECREMENT_EXPR)
8927			    ? MINUS_EXPR : PLUS_EXPR),
8928			   TREE_TYPE (exp),
8929			   incremented,
8930			   TREE_OPERAND (exp, 1));
8931
8932      while (TREE_CODE (incremented) == NOP_EXPR
8933	     || TREE_CODE (incremented) == CONVERT_EXPR)
8934	{
8935	  newexp = convert (TREE_TYPE (incremented), newexp);
8936	  incremented = TREE_OPERAND (incremented, 0);
8937	}
8938
8939      temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8940      return post ? op0 : temp;
8941    }
8942
8943  if (post)
8944    {
8945      /* We have a true reference to the value in OP0.
8946	 If there is an insn to add or subtract in this mode, queue it.
8947	 Queueing the increment insn avoids the register shuffling
8948	 that often results if we must increment now and first save
8949	 the old value for subsequent use.  */
8950
8951#if 0  /* Turned off to avoid making extra insn for indexed memref.  */
8952      op0 = stabilize (op0);
8953#endif
8954
8955      icode = (int) this_optab->handlers[(int) mode].insn_code;
8956      if (icode != (int) CODE_FOR_nothing
8957	  /* Make sure that OP0 is valid for operands 0 and 1
8958	     of the insn we want to queue.  */
8959	  && (*insn_data[icode].operand[0].predicate) (op0, mode)
8960	  && (*insn_data[icode].operand[1].predicate) (op0, mode))
8961	{
8962	  if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8963	    op1 = force_reg (mode, op1);
8964
8965	  return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8966	}
8967      if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8968	{
8969	  rtx addr = (general_operand (XEXP (op0, 0), mode)
8970		      ? force_reg (Pmode, XEXP (op0, 0))
8971		      : copy_to_reg (XEXP (op0, 0)));
8972	  rtx temp, result;
8973
8974	  op0 = replace_equiv_address (op0, addr);
8975	  temp = force_reg (GET_MODE (op0), op0);
8976	  if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8977	    op1 = force_reg (mode, op1);
8978
8979	  /* The increment queue is LIFO, thus we have to `queue'
8980	     the instructions in reverse order.  */
8981	  enqueue_insn (op0, gen_move_insn (op0, temp));
8982	  result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8983	  return result;
8984	}
8985    }
8986
8987  /* Preincrement, or we can't increment with one simple insn.  */
8988  if (post)
8989    /* Save a copy of the value before inc or dec, to return it later.  */
8990    temp = value = copy_to_reg (op0);
8991  else
8992    /* Arrange to return the incremented value.  */
8993    /* Copy the rtx because expand_binop will protect from the queue,
8994       and the results of that would be invalid for us to return
8995       if our caller does emit_queue before using our result.  */
8996    temp = copy_rtx (value = op0);
8997
8998  /* Increment however we can.  */
8999  op1 = expand_binop (mode, this_optab, value, op1, op0,
9000		      TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9001
9002  /* Make sure the value is stored into OP0.  */
9003  if (op1 != op0)
9004    emit_move_insn (op0, op1);
9005
9006  return temp;
9007}
9008
9009/* At the start of a function, record that we have no previously-pushed
9010   arguments waiting to be popped.  */
9011
9012void
9013init_pending_stack_adjust ()
9014{
9015  pending_stack_adjust = 0;
9016}
9017
9018/* When exiting from function, if safe, clear out any pending stack adjust
9019   so the adjustment won't get done.
9020
9021   Note, if the current function calls alloca, then it must have a
9022   frame pointer regardless of the value of flag_omit_frame_pointer.  */
9023
9024void
9025clear_pending_stack_adjust ()
9026{
9027#ifdef EXIT_IGNORE_STACK
9028  if (optimize > 0
9029      && (! flag_omit_frame_pointer || current_function_calls_alloca)
9030      && EXIT_IGNORE_STACK
9031      && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9032      && ! flag_inline_functions)
9033    {
9034      stack_pointer_delta -= pending_stack_adjust,
9035      pending_stack_adjust = 0;
9036    }
9037#endif
9038}
9039
9040/* Pop any previously-pushed arguments that have not been popped yet.  */
9041
9042void
9043do_pending_stack_adjust ()
9044{
9045  if (inhibit_defer_pop == 0)
9046    {
9047      if (pending_stack_adjust != 0)
9048	adjust_stack (GEN_INT (pending_stack_adjust));
9049      pending_stack_adjust = 0;
9050    }
9051}
9052
9053/* Expand conditional expressions.  */
9054
9055/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9056   LABEL is an rtx of code CODE_LABEL, in this function and all the
9057   functions here.  */
9058
9059void
9060jumpifnot (exp, label)
9061     tree exp;
9062     rtx label;
9063{
9064  do_jump (exp, label, NULL_RTX);
9065}
9066
9067/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero.  */
9068
9069void
9070jumpif (exp, label)
9071     tree exp;
9072     rtx label;
9073{
9074  do_jump (exp, NULL_RTX, label);
9075}
9076
9077/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9078   the result is zero, or IF_TRUE_LABEL if the result is one.
9079   Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9080   meaning fall through in that case.
9081
9082   do_jump always does any pending stack adjust except when it does not
9083   actually perform a jump.  An example where there is no jump
9084   is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9085
9086   This function is responsible for optimizing cases such as
9087   &&, || and comparison operators in EXP.  */
9088
9089void
9090do_jump (exp, if_false_label, if_true_label)
9091     tree exp;
9092     rtx if_false_label, if_true_label;
9093{
9094  enum tree_code code = TREE_CODE (exp);
9095  /* Some cases need to create a label to jump to
9096     in order to properly fall through.
9097     These cases set DROP_THROUGH_LABEL nonzero.  */
9098  rtx drop_through_label = 0;
9099  rtx temp;
9100  int i;
9101  tree type;
9102  enum machine_mode mode;
9103
9104#ifdef MAX_INTEGER_COMPUTATION_MODE
9105  check_max_integer_computation_mode (exp);
9106#endif
9107
9108  emit_queue ();
9109
9110  switch (code)
9111    {
9112    case ERROR_MARK:
9113      break;
9114
9115    case INTEGER_CST:
9116      temp = integer_zerop (exp) ? if_false_label : if_true_label;
9117      if (temp)
9118	emit_jump (temp);
9119      break;
9120
9121#if 0
9122      /* This is not true with #pragma weak  */
9123    case ADDR_EXPR:
9124      /* The address of something can never be zero.  */
9125      if (if_true_label)
9126	emit_jump (if_true_label);
9127      break;
9128#endif
9129
9130    case NOP_EXPR:
9131      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9132	  || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9133	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9134	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9135	goto normal;
9136    case CONVERT_EXPR:
9137      /* If we are narrowing the operand, we have to do the compare in the
9138	 narrower mode.  */
9139      if ((TYPE_PRECISION (TREE_TYPE (exp))
9140	   < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9141	goto normal;
9142    case NON_LVALUE_EXPR:
9143    case REFERENCE_EXPR:
9144    case ABS_EXPR:
9145    case NEGATE_EXPR:
9146    case LROTATE_EXPR:
9147    case RROTATE_EXPR:
9148      /* These cannot change zero->non-zero or vice versa.  */
9149      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9150      break;
9151
9152    case WITH_RECORD_EXPR:
9153      /* Put the object on the placeholder list, recurse through our first
9154	 operand, and pop the list.  */
9155      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9156				    placeholder_list);
9157      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9158      placeholder_list = TREE_CHAIN (placeholder_list);
9159      break;
9160
9161#if 0
9162      /* This is never less insns than evaluating the PLUS_EXPR followed by
9163	 a test and can be longer if the test is eliminated.  */
9164    case PLUS_EXPR:
9165      /* Reduce to minus.  */
9166      exp = build (MINUS_EXPR, TREE_TYPE (exp),
9167		   TREE_OPERAND (exp, 0),
9168		   fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9169				 TREE_OPERAND (exp, 1))));
9170      /* Process as MINUS.  */
9171#endif
9172
9173    case MINUS_EXPR:
9174      /* Non-zero iff operands of minus differ.  */
9175      do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9176				  TREE_OPERAND (exp, 0),
9177				  TREE_OPERAND (exp, 1)),
9178			   NE, NE, if_false_label, if_true_label);
9179      break;
9180
9181    case BIT_AND_EXPR:
9182      /* If we are AND'ing with a small constant, do this comparison in the
9183	 smallest type that fits.  If the machine doesn't have comparisons
9184	 that small, it will be converted back to the wider comparison.
9185	 This helps if we are testing the sign bit of a narrower object.
9186	 combine can't do this for us because it can't know whether a
9187	 ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */
9188
9189      if (! SLOW_BYTE_ACCESS
9190	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9191	  && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9192	  && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9193	  && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9194	  && (type = type_for_mode (mode, 1)) != 0
9195	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9196	  && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9197	      != CODE_FOR_nothing))
9198	{
9199	  do_jump (convert (type, exp), if_false_label, if_true_label);
9200	  break;
9201	}
9202      goto normal;
9203
9204    case TRUTH_NOT_EXPR:
9205      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9206      break;
9207
9208    case TRUTH_ANDIF_EXPR:
9209      if (if_false_label == 0)
9210	if_false_label = drop_through_label = gen_label_rtx ();
9211      do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9212      start_cleanup_deferral ();
9213      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9214      end_cleanup_deferral ();
9215      break;
9216
9217    case TRUTH_ORIF_EXPR:
9218      if (if_true_label == 0)
9219	if_true_label = drop_through_label = gen_label_rtx ();
9220      do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9221      start_cleanup_deferral ();
9222      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9223      end_cleanup_deferral ();
9224      break;
9225
9226    case COMPOUND_EXPR:
9227      push_temp_slots ();
9228      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9229      preserve_temp_slots (NULL_RTX);
9230      free_temp_slots ();
9231      pop_temp_slots ();
9232      emit_queue ();
9233      do_pending_stack_adjust ();
9234      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9235      break;
9236
9237    case COMPONENT_REF:
9238    case BIT_FIELD_REF:
9239    case ARRAY_REF:
9240    case ARRAY_RANGE_REF:
9241      {
9242	HOST_WIDE_INT bitsize, bitpos;
9243	int unsignedp;
9244	enum machine_mode mode;
9245	tree type;
9246	tree offset;
9247	int volatilep = 0;
9248
9249	/* Get description of this reference.  We don't actually care
9250	   about the underlying object here.  */
9251	get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9252			     &unsignedp, &volatilep);
9253
9254	type = type_for_size (bitsize, unsignedp);
9255	if (! SLOW_BYTE_ACCESS
9256	    && type != 0 && bitsize >= 0
9257	    && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9258	    && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9259		!= CODE_FOR_nothing))
9260	  {
9261	    do_jump (convert (type, exp), if_false_label, if_true_label);
9262	    break;
9263	  }
9264	goto normal;
9265      }
9266
9267    case COND_EXPR:
9268      /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases.  */
9269      if (integer_onep (TREE_OPERAND (exp, 1))
9270	  && integer_zerop (TREE_OPERAND (exp, 2)))
9271	do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9272
9273      else if (integer_zerop (TREE_OPERAND (exp, 1))
9274	       && integer_onep (TREE_OPERAND (exp, 2)))
9275	do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9276
9277      else
9278	{
9279	  rtx label1 = gen_label_rtx ();
9280	  drop_through_label = gen_label_rtx ();
9281
9282	  do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9283
9284	  start_cleanup_deferral ();
9285	  /* Now the THEN-expression.  */
9286	  do_jump (TREE_OPERAND (exp, 1),
9287		   if_false_label ? if_false_label : drop_through_label,
9288		   if_true_label ? if_true_label : drop_through_label);
9289	  /* In case the do_jump just above never jumps.  */
9290	  do_pending_stack_adjust ();
9291	  emit_label (label1);
9292
9293	  /* Now the ELSE-expression.  */
9294	  do_jump (TREE_OPERAND (exp, 2),
9295		   if_false_label ? if_false_label : drop_through_label,
9296		   if_true_label ? if_true_label : drop_through_label);
9297	  end_cleanup_deferral ();
9298	}
9299      break;
9300
9301    case EQ_EXPR:
9302      {
9303	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9304
9305	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9306	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9307	  {
9308	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9309	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9310	    do_jump
9311	      (fold
9312	       (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9313		       fold (build (EQ_EXPR, TREE_TYPE (exp),
9314				    fold (build1 (REALPART_EXPR,
9315						  TREE_TYPE (inner_type),
9316						  exp0)),
9317				    fold (build1 (REALPART_EXPR,
9318						  TREE_TYPE (inner_type),
9319						  exp1)))),
9320		       fold (build (EQ_EXPR, TREE_TYPE (exp),
9321				    fold (build1 (IMAGPART_EXPR,
9322						  TREE_TYPE (inner_type),
9323						  exp0)),
9324				    fold (build1 (IMAGPART_EXPR,
9325						  TREE_TYPE (inner_type),
9326						  exp1)))))),
9327	       if_false_label, if_true_label);
9328	  }
9329
9330	else if (integer_zerop (TREE_OPERAND (exp, 1)))
9331	  do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9332
9333	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9334		 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9335	  do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9336	else
9337	  do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9338	break;
9339      }
9340
9341    case NE_EXPR:
9342      {
9343	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9344
9345	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9346	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9347	  {
9348	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9349	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9350	    do_jump
9351	      (fold
9352	       (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9353		       fold (build (NE_EXPR, TREE_TYPE (exp),
9354				    fold (build1 (REALPART_EXPR,
9355						  TREE_TYPE (inner_type),
9356						  exp0)),
9357				    fold (build1 (REALPART_EXPR,
9358						  TREE_TYPE (inner_type),
9359						  exp1)))),
9360		       fold (build (NE_EXPR, TREE_TYPE (exp),
9361				    fold (build1 (IMAGPART_EXPR,
9362						  TREE_TYPE (inner_type),
9363						  exp0)),
9364				    fold (build1 (IMAGPART_EXPR,
9365						  TREE_TYPE (inner_type),
9366						  exp1)))))),
9367	       if_false_label, if_true_label);
9368	  }
9369
9370	else if (integer_zerop (TREE_OPERAND (exp, 1)))
9371	  do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9372
9373	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9374		 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9375	  do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9376	else
9377	  do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9378	break;
9379      }
9380
9381    case LT_EXPR:
9382      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9383      if (GET_MODE_CLASS (mode) == MODE_INT
9384	  && ! can_compare_p (LT, mode, ccp_jump))
9385	do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9386      else
9387	do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9388      break;
9389
9390    case LE_EXPR:
9391      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9392      if (GET_MODE_CLASS (mode) == MODE_INT
9393	  && ! can_compare_p (LE, mode, ccp_jump))
9394	do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9395      else
9396	do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9397      break;
9398
9399    case GT_EXPR:
9400      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9401      if (GET_MODE_CLASS (mode) == MODE_INT
9402	  && ! can_compare_p (GT, mode, ccp_jump))
9403	do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9404      else
9405	do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9406      break;
9407
9408    case GE_EXPR:
9409      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9410      if (GET_MODE_CLASS (mode) == MODE_INT
9411	  && ! can_compare_p (GE, mode, ccp_jump))
9412	do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9413      else
9414	do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9415      break;
9416
9417    case UNORDERED_EXPR:
9418    case ORDERED_EXPR:
9419      {
9420	enum rtx_code cmp, rcmp;
9421	int do_rev;
9422
9423	if (code == UNORDERED_EXPR)
9424	  cmp = UNORDERED, rcmp = ORDERED;
9425	else
9426	  cmp = ORDERED, rcmp = UNORDERED;
9427	mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9428
9429	do_rev = 0;
9430	if (! can_compare_p (cmp, mode, ccp_jump)
9431	    && (can_compare_p (rcmp, mode, ccp_jump)
9432		/* If the target doesn't provide either UNORDERED or ORDERED
9433		   comparisons, canonicalize on UNORDERED for the library.  */
9434		|| rcmp == UNORDERED))
9435	  do_rev = 1;
9436
9437        if (! do_rev)
9438	  do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9439	else
9440	  do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9441      }
9442      break;
9443
9444    {
9445      enum rtx_code rcode1;
9446      enum tree_code tcode2;
9447
9448      case UNLT_EXPR:
9449	rcode1 = UNLT;
9450	tcode2 = LT_EXPR;
9451	goto unordered_bcc;
9452      case UNLE_EXPR:
9453	rcode1 = UNLE;
9454	tcode2 = LE_EXPR;
9455	goto unordered_bcc;
9456      case UNGT_EXPR:
9457	rcode1 = UNGT;
9458	tcode2 = GT_EXPR;
9459	goto unordered_bcc;
9460      case UNGE_EXPR:
9461	rcode1 = UNGE;
9462	tcode2 = GE_EXPR;
9463	goto unordered_bcc;
9464      case UNEQ_EXPR:
9465	rcode1 = UNEQ;
9466	tcode2 = EQ_EXPR;
9467	goto unordered_bcc;
9468
9469      unordered_bcc:
9470        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9471	if (can_compare_p (rcode1, mode, ccp_jump))
9472	  do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9473			       if_true_label);
9474	else
9475	  {
9476	    tree op0 = save_expr (TREE_OPERAND (exp, 0));
9477	    tree op1 = save_expr (TREE_OPERAND (exp, 1));
9478	    tree cmp0, cmp1;
9479
9480	    /* If the target doesn't support combined unordered
9481	       compares, decompose into UNORDERED + comparison.  */
9482	    cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9483	    cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9484	    exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9485	    do_jump (exp, if_false_label, if_true_label);
9486	  }
9487      }
9488      break;
9489
9490      /* Special case:
9491		__builtin_expect (<test>, 0)	and
9492		__builtin_expect (<test>, 1)
9493
9494	 We need to do this here, so that <test> is not converted to a SCC
9495	 operation on machines that use condition code registers and COMPARE
9496	 like the PowerPC, and then the jump is done based on whether the SCC
9497	 operation produced a 1 or 0.  */
9498    case CALL_EXPR:
9499      /* Check for a built-in function.  */
9500      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9501	{
9502	  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9503	  tree arglist = TREE_OPERAND (exp, 1);
9504
9505	  if (TREE_CODE (fndecl) == FUNCTION_DECL
9506	      && DECL_BUILT_IN (fndecl)
9507	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9508	      && arglist != NULL_TREE
9509	      && TREE_CHAIN (arglist) != NULL_TREE)
9510	    {
9511	      rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9512						    if_true_label);
9513
9514	      if (seq != NULL_RTX)
9515		{
9516		  emit_insn (seq);
9517		  return;
9518		}
9519	    }
9520	}
9521      /* fall through and generate the normal code.  */
9522
9523    default:
9524    normal:
9525      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9526#if 0
9527      /* This is not needed any more and causes poor code since it causes
9528	 comparisons and tests from non-SI objects to have different code
9529	 sequences.  */
9530      /* Copy to register to avoid generating bad insns by cse
9531	 from (set (mem ...) (arithop))  (set (cc0) (mem ...)).  */
9532      if (!cse_not_expected && GET_CODE (temp) == MEM)
9533	temp = copy_to_reg (temp);
9534#endif
9535      do_pending_stack_adjust ();
9536      /* Do any postincrements in the expression that was tested.  */
9537      emit_queue ();
9538
9539      if (GET_CODE (temp) == CONST_INT
9540	  || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9541	  || GET_CODE (temp) == LABEL_REF)
9542	{
9543	  rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9544	  if (target)
9545	    emit_jump (target);
9546	}
9547      else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9548	       && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9549	/* Note swapping the labels gives us not-equal.  */
9550	do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9551      else if (GET_MODE (temp) != VOIDmode)
9552	do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9553				 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9554				 GET_MODE (temp), NULL_RTX,
9555				 if_false_label, if_true_label);
9556      else
9557	abort ();
9558    }
9559
9560  if (drop_through_label)
9561    {
9562      /* If do_jump produces code that might be jumped around,
9563	 do any stack adjusts from that code, before the place
9564	 where control merges in.  */
9565      do_pending_stack_adjust ();
9566      emit_label (drop_through_label);
9567    }
9568}
9569
9570/* Given a comparison expression EXP for values too wide to be compared
9571   with one insn, test the comparison and jump to the appropriate label.
9572   The code of EXP is ignored; we always test GT if SWAP is 0,
9573   and LT if SWAP is 1.  */
9574
9575static void
9576do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9577     tree exp;
9578     int swap;
9579     rtx if_false_label, if_true_label;
9580{
9581  rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9582  rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9583  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9584  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9585
9586  do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9587}
9588
9589/* Compare OP0 with OP1, word at a time, in mode MODE.
9590   UNSIGNEDP says to do unsigned comparison.
9591   Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise.  */
9592
9593void
9594do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9595     enum machine_mode mode;
9596     int unsignedp;
9597     rtx op0, op1;
9598     rtx if_false_label, if_true_label;
9599{
9600  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9601  rtx drop_through_label = 0;
9602  int i;
9603
9604  if (! if_true_label || ! if_false_label)
9605    drop_through_label = gen_label_rtx ();
9606  if (! if_true_label)
9607    if_true_label = drop_through_label;
9608  if (! if_false_label)
9609    if_false_label = drop_through_label;
9610
9611  /* Compare a word at a time, high order first.  */
9612  for (i = 0; i < nwords; i++)
9613    {
9614      rtx op0_word, op1_word;
9615
9616      if (WORDS_BIG_ENDIAN)
9617	{
9618	  op0_word = operand_subword_force (op0, i, mode);
9619	  op1_word = operand_subword_force (op1, i, mode);
9620	}
9621      else
9622	{
9623	  op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9624	  op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9625	}
9626
9627      /* All but high-order word must be compared as unsigned.  */
9628      do_compare_rtx_and_jump (op0_word, op1_word, GT,
9629			       (unsignedp || i > 0), word_mode, NULL_RTX,
9630			       NULL_RTX, if_true_label);
9631
9632      /* Consider lower words only if these are equal.  */
9633      do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9634			       NULL_RTX, NULL_RTX, if_false_label);
9635    }
9636
9637  if (if_false_label)
9638    emit_jump (if_false_label);
9639  if (drop_through_label)
9640    emit_label (drop_through_label);
9641}
9642
9643/* Given an EQ_EXPR expression EXP for values too wide to be compared
9644   with one insn, test the comparison and jump to the appropriate label.  */
9645
9646static void
9647do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9648     tree exp;
9649     rtx if_false_label, if_true_label;
9650{
9651  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9652  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9653  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9654  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9655  int i;
9656  rtx drop_through_label = 0;
9657
9658  if (! if_false_label)
9659    drop_through_label = if_false_label = gen_label_rtx ();
9660
9661  for (i = 0; i < nwords; i++)
9662    do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9663			     operand_subword_force (op1, i, mode),
9664			     EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9665			     word_mode, NULL_RTX, if_false_label, NULL_RTX);
9666
9667  if (if_true_label)
9668    emit_jump (if_true_label);
9669  if (drop_through_label)
9670    emit_label (drop_through_label);
9671}
9672
9673/* Jump according to whether OP0 is 0.
9674   We assume that OP0 has an integer mode that is too wide
9675   for the available compare insns.  */
9676
9677void
9678do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9679     rtx op0;
9680     rtx if_false_label, if_true_label;
9681{
9682  int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9683  rtx part;
9684  int i;
9685  rtx drop_through_label = 0;
9686
9687  /* The fastest way of doing this comparison on almost any machine is to
9688     "or" all the words and compare the result.  If all have to be loaded
9689     from memory and this is a very wide item, it's possible this may
9690     be slower, but that's highly unlikely.  */
9691
9692  part = gen_reg_rtx (word_mode);
9693  emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9694  for (i = 1; i < nwords && part != 0; i++)
9695    part = expand_binop (word_mode, ior_optab, part,
9696			 operand_subword_force (op0, i, GET_MODE (op0)),
9697			 part, 1, OPTAB_WIDEN);
9698
9699  if (part != 0)
9700    {
9701      do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9702			       NULL_RTX, if_false_label, if_true_label);
9703
9704      return;
9705    }
9706
9707  /* If we couldn't do the "or" simply, do this with a series of compares.  */
9708  if (! if_false_label)
9709    drop_through_label = if_false_label = gen_label_rtx ();
9710
9711  for (i = 0; i < nwords; i++)
9712    do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9713			     const0_rtx, EQ, 1, word_mode, NULL_RTX,
9714			     if_false_label, NULL_RTX);
9715
9716  if (if_true_label)
9717    emit_jump (if_true_label);
9718
9719  if (drop_through_label)
9720    emit_label (drop_through_label);
9721}
9722
9723/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9724   (including code to compute the values to be compared)
9725   and set (CC0) according to the result.
9726   The decision as to signed or unsigned comparison must be made by the caller.
9727
9728   We force a stack adjustment unless there are currently
9729   things pushed on the stack that aren't yet used.
9730
9731   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9732   compared.  */
9733
9734rtx
9735compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9736     rtx op0, op1;
9737     enum rtx_code code;
9738     int unsignedp;
9739     enum machine_mode mode;
9740     rtx size;
9741{
9742  rtx tem;
9743
9744  /* If one operand is constant, make it the second one.  Only do this
9745     if the other operand is not constant as well.  */
9746
9747  if (swap_commutative_operands_p (op0, op1))
9748    {
9749      tem = op0;
9750      op0 = op1;
9751      op1 = tem;
9752      code = swap_condition (code);
9753    }
9754
9755  if (flag_force_mem)
9756    {
9757      op0 = force_not_mem (op0);
9758      op1 = force_not_mem (op1);
9759    }
9760
9761  do_pending_stack_adjust ();
9762
9763  if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9764      && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9765    return tem;
9766
9767#if 0
9768  /* There's no need to do this now that combine.c can eliminate lots of
9769     sign extensions.  This can be less efficient in certain cases on other
9770     machines.  */
9771
9772  /* If this is a signed equality comparison, we can do it as an
9773     unsigned comparison since zero-extension is cheaper than sign
9774     extension and comparisons with zero are done as unsigned.  This is
9775     the case even on machines that can do fast sign extension, since
9776     zero-extension is easier to combine with other operations than
9777     sign-extension is.  If we are comparing against a constant, we must
9778     convert it to what it would look like unsigned.  */
9779  if ((code == EQ || code == NE) && ! unsignedp
9780      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9781    {
9782      if (GET_CODE (op1) == CONST_INT
9783	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9784	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9785      unsignedp = 1;
9786    }
9787#endif
9788
9789  emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9790
9791  return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9792}
9793
9794/* Like do_compare_and_jump but expects the values to compare as two rtx's.
9795   The decision as to signed or unsigned comparison must be made by the caller.
9796
9797   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9798   compared.  */
9799
9800void
9801do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9802			 if_false_label, if_true_label)
9803     rtx op0, op1;
9804     enum rtx_code code;
9805     int unsignedp;
9806     enum machine_mode mode;
9807     rtx size;
9808     rtx if_false_label, if_true_label;
9809{
9810  rtx tem;
9811  int dummy_true_label = 0;
9812
9813  /* Reverse the comparison if that is safe and we want to jump if it is
9814     false.  */
9815  if (! if_true_label && ! FLOAT_MODE_P (mode))
9816    {
9817      if_true_label = if_false_label;
9818      if_false_label = 0;
9819      code = reverse_condition (code);
9820    }
9821
9822  /* If one operand is constant, make it the second one.  Only do this
9823     if the other operand is not constant as well.  */
9824
9825  if (swap_commutative_operands_p (op0, op1))
9826    {
9827      tem = op0;
9828      op0 = op1;
9829      op1 = tem;
9830      code = swap_condition (code);
9831    }
9832
9833  if (flag_force_mem)
9834    {
9835      op0 = force_not_mem (op0);
9836      op1 = force_not_mem (op1);
9837    }
9838
9839  do_pending_stack_adjust ();
9840
9841  if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9842      && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9843    {
9844      if (tem == const_true_rtx)
9845	{
9846	  if (if_true_label)
9847	    emit_jump (if_true_label);
9848	}
9849      else
9850	{
9851	  if (if_false_label)
9852	    emit_jump (if_false_label);
9853	}
9854      return;
9855    }
9856
9857#if 0
9858  /* There's no need to do this now that combine.c can eliminate lots of
9859     sign extensions.  This can be less efficient in certain cases on other
9860     machines.  */
9861
9862  /* If this is a signed equality comparison, we can do it as an
9863     unsigned comparison since zero-extension is cheaper than sign
9864     extension and comparisons with zero are done as unsigned.  This is
9865     the case even on machines that can do fast sign extension, since
9866     zero-extension is easier to combine with other operations than
9867     sign-extension is.  If we are comparing against a constant, we must
9868     convert it to what it would look like unsigned.  */
9869  if ((code == EQ || code == NE) && ! unsignedp
9870      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9871    {
9872      if (GET_CODE (op1) == CONST_INT
9873	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9874	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9875      unsignedp = 1;
9876    }
9877#endif
9878
9879  if (! if_true_label)
9880    {
9881      dummy_true_label = 1;
9882      if_true_label = gen_label_rtx ();
9883    }
9884
9885  emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
9886			   if_true_label);
9887
9888  if (if_false_label)
9889    emit_jump (if_false_label);
9890  if (dummy_true_label)
9891    emit_label (if_true_label);
9892}
9893
9894/* Generate code for a comparison expression EXP (including code to compute
9895   the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9896   IF_TRUE_LABEL.  One of the labels can be NULL_RTX, in which case the
9897   generated code will drop through.
9898   SIGNED_CODE should be the rtx operation for this comparison for
9899   signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9900
9901   We force a stack adjustment unless there are currently
9902   things pushed on the stack that aren't yet used.  */
9903
9904static void
9905do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9906		     if_true_label)
9907     tree exp;
9908     enum rtx_code signed_code, unsigned_code;
9909     rtx if_false_label, if_true_label;
9910{
9911  rtx op0, op1;
9912  tree type;
9913  enum machine_mode mode;
9914  int unsignedp;
9915  enum rtx_code code;
9916
9917  /* Don't crash if the comparison was erroneous.  */
9918  op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9919  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9920    return;
9921
9922  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9923  if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
9924    return;
9925
9926  type = TREE_TYPE (TREE_OPERAND (exp, 0));
9927  mode = TYPE_MODE (type);
9928  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
9929      && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
9930	  || (GET_MODE_BITSIZE (mode)
9931	      > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
9932								      1)))))))
9933    {
9934      /* op0 might have been replaced by promoted constant, in which
9935	 case the type of second argument should be used.  */
9936      type = TREE_TYPE (TREE_OPERAND (exp, 1));
9937      mode = TYPE_MODE (type);
9938    }
9939  unsignedp = TREE_UNSIGNED (type);
9940  code = unsignedp ? unsigned_code : signed_code;
9941
9942#ifdef HAVE_canonicalize_funcptr_for_compare
9943  /* If function pointers need to be "canonicalized" before they can
9944     be reliably compared, then canonicalize them.  */
9945  if (HAVE_canonicalize_funcptr_for_compare
9946      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9947      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9948	  == FUNCTION_TYPE))
9949    {
9950      rtx new_op0 = gen_reg_rtx (mode);
9951
9952      emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9953      op0 = new_op0;
9954    }
9955
9956  if (HAVE_canonicalize_funcptr_for_compare
9957      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9958      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9959	  == FUNCTION_TYPE))
9960    {
9961      rtx new_op1 = gen_reg_rtx (mode);
9962
9963      emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9964      op1 = new_op1;
9965    }
9966#endif
9967
9968  /* Do any postincrements in the expression that was tested.  */
9969  emit_queue ();
9970
9971  do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9972			   ((mode == BLKmode)
9973			    ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9974			   if_false_label, if_true_label);
9975}
9976
9977/* Generate code to calculate EXP using a store-flag instruction
9978   and return an rtx for the result.  EXP is either a comparison
9979   or a TRUTH_NOT_EXPR whose operand is a comparison.
9980
9981   If TARGET is nonzero, store the result there if convenient.
9982
9983   If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9984   cheap.
9985
9986   Return zero if there is no suitable set-flag instruction
9987   available on this machine.
9988
9989   Once expand_expr has been called on the arguments of the comparison,
9990   we are committed to doing the store flag, since it is not safe to
9991   re-evaluate the expression.  We emit the store-flag insn by calling
9992   emit_store_flag, but only expand the arguments if we have a reason
9993   to believe that emit_store_flag will be successful.  If we think that
9994   it will, but it isn't, we have to simulate the store-flag with a
9995   set/jump/set sequence.  */
9996
9997static rtx
9998do_store_flag (exp, target, mode, only_cheap)
9999     tree exp;
10000     rtx target;
10001     enum machine_mode mode;
10002     int only_cheap;
10003{
10004  enum rtx_code code;
10005  tree arg0, arg1, type;
10006  tree tem;
10007  enum machine_mode operand_mode;
10008  int invert = 0;
10009  int unsignedp;
10010  rtx op0, op1;
10011  enum insn_code icode;
10012  rtx subtarget = target;
10013  rtx result, label;
10014
10015  /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10016     result at the end.  We can't simply invert the test since it would
10017     have already been inverted if it were valid.  This case occurs for
10018     some floating-point comparisons.  */
10019
10020  if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10021    invert = 1, exp = TREE_OPERAND (exp, 0);
10022
10023  arg0 = TREE_OPERAND (exp, 0);
10024  arg1 = TREE_OPERAND (exp, 1);
10025
10026  /* Don't crash if the comparison was erroneous.  */
10027  if (arg0 == error_mark_node || arg1 == error_mark_node)
10028    return const0_rtx;
10029
10030  type = TREE_TYPE (arg0);
10031  operand_mode = TYPE_MODE (type);
10032  unsignedp = TREE_UNSIGNED (type);
10033
10034  /* We won't bother with BLKmode store-flag operations because it would mean
10035     passing a lot of information to emit_store_flag.  */
10036  if (operand_mode == BLKmode)
10037    return 0;
10038
10039  /* We won't bother with store-flag operations involving function pointers
10040     when function pointers must be canonicalized before comparisons.  */
10041#ifdef HAVE_canonicalize_funcptr_for_compare
10042  if (HAVE_canonicalize_funcptr_for_compare
10043      && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10044	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10045	       == FUNCTION_TYPE))
10046	  || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10047	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10048		  == FUNCTION_TYPE))))
10049    return 0;
10050#endif
10051
10052  STRIP_NOPS (arg0);
10053  STRIP_NOPS (arg1);
10054
10055  /* Get the rtx comparison code to use.  We know that EXP is a comparison
10056     operation of some type.  Some comparisons against 1 and -1 can be
10057     converted to comparisons with zero.  Do so here so that the tests
10058     below will be aware that we have a comparison with zero.   These
10059     tests will not catch constants in the first operand, but constants
10060     are rarely passed as the first operand.  */
10061
10062  switch (TREE_CODE (exp))
10063    {
10064    case EQ_EXPR:
10065      code = EQ;
10066      break;
10067    case NE_EXPR:
10068      code = NE;
10069      break;
10070    case LT_EXPR:
10071      if (integer_onep (arg1))
10072	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10073      else
10074	code = unsignedp ? LTU : LT;
10075      break;
10076    case LE_EXPR:
10077      if (! unsignedp && integer_all_onesp (arg1))
10078	arg1 = integer_zero_node, code = LT;
10079      else
10080	code = unsignedp ? LEU : LE;
10081      break;
10082    case GT_EXPR:
10083      if (! unsignedp && integer_all_onesp (arg1))
10084	arg1 = integer_zero_node, code = GE;
10085      else
10086	code = unsignedp ? GTU : GT;
10087      break;
10088    case GE_EXPR:
10089      if (integer_onep (arg1))
10090	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10091      else
10092	code = unsignedp ? GEU : GE;
10093      break;
10094
10095    case UNORDERED_EXPR:
10096      code = UNORDERED;
10097      break;
10098    case ORDERED_EXPR:
10099      code = ORDERED;
10100      break;
10101    case UNLT_EXPR:
10102      code = UNLT;
10103      break;
10104    case UNLE_EXPR:
10105      code = UNLE;
10106      break;
10107    case UNGT_EXPR:
10108      code = UNGT;
10109      break;
10110    case UNGE_EXPR:
10111      code = UNGE;
10112      break;
10113    case UNEQ_EXPR:
10114      code = UNEQ;
10115      break;
10116
10117    default:
10118      abort ();
10119    }
10120
10121  /* Put a constant second.  */
10122  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10123    {
10124      tem = arg0; arg0 = arg1; arg1 = tem;
10125      code = swap_condition (code);
10126    }
10127
10128  /* If this is an equality or inequality test of a single bit, we can
10129     do this by shifting the bit being tested to the low-order bit and
10130     masking the result with the constant 1.  If the condition was EQ,
10131     we xor it with 1.  This does not require an scc insn and is faster
10132     than an scc insn even if we have it.  */
10133
10134  if ((code == NE || code == EQ)
10135      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10136      && integer_pow2p (TREE_OPERAND (arg0, 1)))
10137    {
10138      tree inner = TREE_OPERAND (arg0, 0);
10139      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10140      int ops_unsignedp;
10141
10142      /* If INNER is a right shift of a constant and it plus BITNUM does
10143	 not overflow, adjust BITNUM and INNER.  */
10144
10145      if (TREE_CODE (inner) == RSHIFT_EXPR
10146	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10147	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10148	  && bitnum < TYPE_PRECISION (type)
10149	  && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10150				   bitnum - TYPE_PRECISION (type)))
10151	{
10152	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10153	  inner = TREE_OPERAND (inner, 0);
10154	}
10155
10156      /* If we are going to be able to omit the AND below, we must do our
10157	 operations as unsigned.  If we must use the AND, we have a choice.
10158	 Normally unsigned is faster, but for some machines signed is.  */
10159      ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10160#ifdef LOAD_EXTEND_OP
10161		       : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10162#else
10163		       : 1
10164#endif
10165		       );
10166
10167      if (! get_subtarget (subtarget)
10168	  || GET_MODE (subtarget) != operand_mode
10169	  || ! safe_from_p (subtarget, inner, 1))
10170	subtarget = 0;
10171
10172      op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10173
10174      if (bitnum != 0)
10175	op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10176			    size_int (bitnum), subtarget, ops_unsignedp);
10177
10178      if (GET_MODE (op0) != mode)
10179	op0 = convert_to_mode (mode, op0, ops_unsignedp);
10180
10181      if ((code == EQ && ! invert) || (code == NE && invert))
10182	op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10183			    ops_unsignedp, OPTAB_LIB_WIDEN);
10184
10185      /* Put the AND last so it can combine with more things.  */
10186      if (bitnum != TYPE_PRECISION (type) - 1)
10187	op0 = expand_and (op0, const1_rtx, subtarget);
10188
10189      return op0;
10190    }
10191
10192  /* Now see if we are likely to be able to do this.  Return if not.  */
10193  if (! can_compare_p (code, operand_mode, ccp_store_flag))
10194    return 0;
10195
10196  icode = setcc_gen_code[(int) code];
10197  if (icode == CODE_FOR_nothing
10198      || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10199    {
10200      /* We can only do this if it is one of the special cases that
10201	 can be handled without an scc insn.  */
10202      if ((code == LT && integer_zerop (arg1))
10203	  || (! only_cheap && code == GE && integer_zerop (arg1)))
10204	;
10205      else if (BRANCH_COST >= 0
10206	       && ! only_cheap && (code == NE || code == EQ)
10207	       && TREE_CODE (type) != REAL_TYPE
10208	       && ((abs_optab->handlers[(int) operand_mode].insn_code
10209		    != CODE_FOR_nothing)
10210		   || (ffs_optab->handlers[(int) operand_mode].insn_code
10211		       != CODE_FOR_nothing)))
10212	;
10213      else
10214	return 0;
10215    }
10216
10217  if (! get_subtarget (target)
10218      || GET_MODE (subtarget) != operand_mode
10219      || ! safe_from_p (subtarget, arg1, 1))
10220    subtarget = 0;
10221
10222  op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10223  op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10224
10225  if (target == 0)
10226    target = gen_reg_rtx (mode);
10227
10228  /* Pass copies of OP0 and OP1 in case they contain a QUEUED.  This is safe
10229     because, if the emit_store_flag does anything it will succeed and
10230     OP0 and OP1 will not be used subsequently.  */
10231
10232  result = emit_store_flag (target, code,
10233			    queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10234			    queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10235			    operand_mode, unsignedp, 1);
10236
10237  if (result)
10238    {
10239      if (invert)
10240	result = expand_binop (mode, xor_optab, result, const1_rtx,
10241			       result, 0, OPTAB_LIB_WIDEN);
10242      return result;
10243    }
10244
10245  /* If this failed, we have to do this with set/compare/jump/set code.  */
10246  if (GET_CODE (target) != REG
10247      || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10248    target = gen_reg_rtx (GET_MODE (target));
10249
10250  emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10251  result = compare_from_rtx (op0, op1, code, unsignedp,
10252			     operand_mode, NULL_RTX);
10253  if (GET_CODE (result) == CONST_INT)
10254    return (((result == const0_rtx && ! invert)
10255	     || (result != const0_rtx && invert))
10256	    ? const0_rtx : const1_rtx);
10257
10258  /* The code of RESULT may not match CODE if compare_from_rtx
10259     decided to swap its operands and reverse the original code.
10260
10261     We know that compare_from_rtx returns either a CONST_INT or
10262     a new comparison code, so it is safe to just extract the
10263     code from RESULT.  */
10264  code = GET_CODE (result);
10265
10266  label = gen_label_rtx ();
10267  if (bcc_gen_fctn[(int) code] == 0)
10268    abort ();
10269
10270  emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10271  emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10272  emit_label (label);
10273
10274  return target;
10275}
10276
10277
10278/* Stubs in case we haven't got a casesi insn.  */
10279#ifndef HAVE_casesi
10280# define HAVE_casesi 0
10281# define gen_casesi(a, b, c, d, e) (0)
10282# define CODE_FOR_casesi CODE_FOR_nothing
10283#endif
10284
10285/* If the machine does not have a case insn that compares the bounds,
10286   this means extra overhead for dispatch tables, which raises the
10287   threshold for using them.  */
10288#ifndef CASE_VALUES_THRESHOLD
10289#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10290#endif /* CASE_VALUES_THRESHOLD */
10291
10292unsigned int
10293case_values_threshold ()
10294{
10295  return CASE_VALUES_THRESHOLD;
10296}
10297
10298/* Attempt to generate a casesi instruction.  Returns 1 if successful,
10299   0 otherwise (i.e. if there is no casesi instruction).  */
10300int
10301try_casesi (index_type, index_expr, minval, range,
10302	    table_label, default_label)
10303     tree index_type, index_expr, minval, range;
10304     rtx table_label ATTRIBUTE_UNUSED;
10305     rtx default_label;
10306{
10307  enum machine_mode index_mode = SImode;
10308  int index_bits = GET_MODE_BITSIZE (index_mode);
10309  rtx op1, op2, index;
10310  enum machine_mode op_mode;
10311
10312  if (! HAVE_casesi)
10313    return 0;
10314
10315  /* Convert the index to SImode.  */
10316  if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10317    {
10318      enum machine_mode omode = TYPE_MODE (index_type);
10319      rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10320
10321      /* We must handle the endpoints in the original mode.  */
10322      index_expr = build (MINUS_EXPR, index_type,
10323			  index_expr, minval);
10324      minval = integer_zero_node;
10325      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10326      emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10327			       omode, 1, default_label);
10328      /* Now we can safely truncate.  */
10329      index = convert_to_mode (index_mode, index, 0);
10330    }
10331  else
10332    {
10333      if (TYPE_MODE (index_type) != index_mode)
10334	{
10335	  index_expr = convert (type_for_size (index_bits, 0),
10336				index_expr);
10337	  index_type = TREE_TYPE (index_expr);
10338	}
10339
10340      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10341    }
10342  emit_queue ();
10343  index = protect_from_queue (index, 0);
10344  do_pending_stack_adjust ();
10345
10346  op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10347  if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10348      (index, op_mode))
10349    index = copy_to_mode_reg (op_mode, index);
10350
10351  op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10352
10353  op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10354  op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10355		       op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10356  if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10357      (op1, op_mode))
10358    op1 = copy_to_mode_reg (op_mode, op1);
10359
10360  op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10361
10362  op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10363  op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10364		       op2, TREE_UNSIGNED (TREE_TYPE (range)));
10365  if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10366      (op2, op_mode))
10367    op2 = copy_to_mode_reg (op_mode, op2);
10368
10369  emit_jump_insn (gen_casesi (index, op1, op2,
10370			      table_label, default_label));
10371  return 1;
10372}
10373
10374/* Attempt to generate a tablejump instruction; same concept.  */
10375#ifndef HAVE_tablejump
10376#define HAVE_tablejump 0
10377#define gen_tablejump(x, y) (0)
10378#endif
10379
10380/* Subroutine of the next function.
10381
10382   INDEX is the value being switched on, with the lowest value
10383   in the table already subtracted.
10384   MODE is its expected mode (needed if INDEX is constant).
10385   RANGE is the length of the jump table.
10386   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10387
10388   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10389   index value is out of range.  */
10390
10391static void
10392do_tablejump (index, mode, range, table_label, default_label)
10393     rtx index, range, table_label, default_label;
10394     enum machine_mode mode;
10395{
10396  rtx temp, vector;
10397
10398  /* Do an unsigned comparison (in the proper mode) between the index
10399     expression and the value which represents the length of the range.
10400     Since we just finished subtracting the lower bound of the range
10401     from the index expression, this comparison allows us to simultaneously
10402     check that the original index expression value is both greater than
10403     or equal to the minimum value of the range and less than or equal to
10404     the maximum value of the range.  */
10405
10406  emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10407			   default_label);
10408
10409  /* If index is in range, it must fit in Pmode.
10410     Convert to Pmode so we can index with it.  */
10411  if (mode != Pmode)
10412    index = convert_to_mode (Pmode, index, 1);
10413
10414  /* Don't let a MEM slip thru, because then INDEX that comes
10415     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10416     and break_out_memory_refs will go to work on it and mess it up.  */
10417#ifdef PIC_CASE_VECTOR_ADDRESS
10418  if (flag_pic && GET_CODE (index) != REG)
10419    index = copy_to_mode_reg (Pmode, index);
10420#endif
10421
10422  /* If flag_force_addr were to affect this address
10423     it could interfere with the tricky assumptions made
10424     about addresses that contain label-refs,
10425     which may be valid only very near the tablejump itself.  */
10426  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10427     GET_MODE_SIZE, because this indicates how large insns are.  The other
10428     uses should all be Pmode, because they are addresses.  This code
10429     could fail if addresses and insns are not the same size.  */
10430  index = gen_rtx_PLUS (Pmode,
10431			gen_rtx_MULT (Pmode, index,
10432				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10433			gen_rtx_LABEL_REF (Pmode, table_label));
10434#ifdef PIC_CASE_VECTOR_ADDRESS
10435  if (flag_pic)
10436    index = PIC_CASE_VECTOR_ADDRESS (index);
10437  else
10438#endif
10439    index = memory_address_noforce (CASE_VECTOR_MODE, index);
10440  temp = gen_reg_rtx (CASE_VECTOR_MODE);
10441  vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10442  RTX_UNCHANGING_P (vector) = 1;
10443  convert_move (temp, vector, 0);
10444
10445  emit_jump_insn (gen_tablejump (temp, table_label));
10446
10447  /* If we are generating PIC code or if the table is PC-relative, the
10448     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
10449  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10450    emit_barrier ();
10451}
10452
10453int
10454try_tablejump (index_type, index_expr, minval, range,
10455	       table_label, default_label)
10456     tree index_type, index_expr, minval, range;
10457     rtx table_label, default_label;
10458{
10459  rtx index;
10460
10461  if (! HAVE_tablejump)
10462    return 0;
10463
10464  index_expr = fold (build (MINUS_EXPR, index_type,
10465			    convert (index_type, index_expr),
10466			    convert (index_type, minval)));
10467  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10468  emit_queue ();
10469  index = protect_from_queue (index, 0);
10470  do_pending_stack_adjust ();
10471
10472  do_tablejump (index, TYPE_MODE (index_type),
10473		convert_modes (TYPE_MODE (index_type),
10474			       TYPE_MODE (TREE_TYPE (range)),
10475			       expand_expr (range, NULL_RTX,
10476					    VOIDmode, 0),
10477			       TREE_UNSIGNED (TREE_TYPE (range))),
10478		table_label, default_label);
10479  return 1;
10480}
10481