expr.c revision 96489
1/* Convert tree expression to rtl instructions, for GNU compiler.
2   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "machmode.h"
25#include "rtl.h"
26#include "tree.h"
27#include "obstack.h"
28#include "flags.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "except.h"
32#include "function.h"
33#include "insn-config.h"
34#include "insn-attr.h"
35/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
36#include "expr.h"
37#include "optabs.h"
38#include "libfuncs.h"
39#include "recog.h"
40#include "reload.h"
41#include "output.h"
42#include "typeclass.h"
43#include "toplev.h"
44#include "ggc.h"
45#include "langhooks.h"
46#include "intl.h"
47#include "tm_p.h"
48
49/* Decide whether a function's arguments should be processed
50   from first to last or from last to first.
51
52   They should if the stack and args grow in opposite directions, but
53   only if we have push insns.  */
54
55#ifdef PUSH_ROUNDING
56
57#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58#define PUSH_ARGS_REVERSED	/* If it's last to first.  */
59#endif
60
61#endif
62
63#ifndef STACK_PUSH_CODE
64#ifdef STACK_GROWS_DOWNWARD
65#define STACK_PUSH_CODE PRE_DEC
66#else
67#define STACK_PUSH_CODE PRE_INC
68#endif
69#endif
70
71/* Assume that case vectors are not pc-relative.  */
72#ifndef CASE_VECTOR_PC_RELATIVE
73#define CASE_VECTOR_PC_RELATIVE 0
74#endif
75
76/* If this is nonzero, we do not bother generating VOLATILE
77   around volatile memory references, and we are willing to
78   output indirect addresses.  If cse is to follow, we reject
79   indirect addresses so a useful potential cse is generated;
80   if it is used only once, instruction combination will produce
81   the same indirect address eventually.  */
82int cse_not_expected;
83
84/* Chain of pending expressions for PLACEHOLDER_EXPR to replace.  */
85static tree placeholder_list = 0;
86
87/* This structure is used by move_by_pieces to describe the move to
88   be performed.  */
89struct move_by_pieces
90{
91  rtx to;
92  rtx to_addr;
93  int autinc_to;
94  int explicit_inc_to;
95  rtx from;
96  rtx from_addr;
97  int autinc_from;
98  int explicit_inc_from;
99  unsigned HOST_WIDE_INT len;
100  HOST_WIDE_INT offset;
101  int reverse;
102};
103
104/* This structure is used by store_by_pieces to describe the clear to
105   be performed.  */
106
107struct store_by_pieces
108{
109  rtx to;
110  rtx to_addr;
111  int autinc_to;
112  int explicit_inc_to;
113  unsigned HOST_WIDE_INT len;
114  HOST_WIDE_INT offset;
115  rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
116  PTR constfundata;
117  int reverse;
118};
119
120extern struct obstack permanent_obstack;
121
122static rtx enqueue_insn		PARAMS ((rtx, rtx));
123static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124				PARAMS ((unsigned HOST_WIDE_INT,
125					 unsigned int));
126static void move_by_pieces_1	PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127					 struct move_by_pieces *));
128static rtx clear_by_pieces_1	PARAMS ((PTR, HOST_WIDE_INT,
129					 enum machine_mode));
130static void clear_by_pieces	PARAMS ((rtx, unsigned HOST_WIDE_INT,
131					 unsigned int));
132static void store_by_pieces_1	PARAMS ((struct store_by_pieces *,
133					 unsigned int));
134static void store_by_pieces_2	PARAMS ((rtx (*) (rtx, ...),
135					 enum machine_mode,
136					 struct store_by_pieces *));
137static rtx get_subtarget	PARAMS ((rtx));
138static int is_zeros_p		PARAMS ((tree));
139static int mostly_zeros_p	PARAMS ((tree));
140static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141					     HOST_WIDE_INT, enum machine_mode,
142					     tree, tree, int, int));
143static void store_constructor	PARAMS ((tree, rtx, int, HOST_WIDE_INT));
144static rtx store_field		PARAMS ((rtx, HOST_WIDE_INT,
145					 HOST_WIDE_INT, enum machine_mode,
146					 tree, enum machine_mode, int, tree,
147					 int));
148static rtx var_rtx		PARAMS ((tree));
149static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
150static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
151static int is_aligning_offset	PARAMS ((tree, tree));
152static rtx expand_increment	PARAMS ((tree, int, int));
153static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
154static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
155static void do_compare_and_jump	PARAMS ((tree, enum rtx_code, enum rtx_code,
156					 rtx, rtx));
157static rtx do_store_flag	PARAMS ((tree, rtx, enum machine_mode, int));
158#ifdef PUSH_ROUNDING
159static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
160#endif
161static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
162
163/* Record for each mode whether we can move a register directly to or
164   from an object of that mode in memory.  If we can't, we won't try
165   to use that mode directly when accessing a field of that mode.  */
166
167static char direct_load[NUM_MACHINE_MODES];
168static char direct_store[NUM_MACHINE_MODES];
169
170/* If a memory-to-memory move would take MOVE_RATIO or more simple
171   move-instruction sequences, we will do a movstr or libcall instead.  */
172
173#ifndef MOVE_RATIO
174#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
175#define MOVE_RATIO 2
176#else
177/* If we are optimizing for space (-Os), cut down the default move ratio.  */
178#define MOVE_RATIO (optimize_size ? 3 : 15)
179#endif
180#endif
181
182/* This macro is used to determine whether move_by_pieces should be called
183   to perform a structure copy.  */
184#ifndef MOVE_BY_PIECES_P
185#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
186  (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
187#endif
188
189/* This array records the insn_code of insns to perform block moves.  */
190enum insn_code movstr_optab[NUM_MACHINE_MODES];
191
192/* This array records the insn_code of insns to perform block clears.  */
193enum insn_code clrstr_optab[NUM_MACHINE_MODES];
194
195/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow.  */
196
197#ifndef SLOW_UNALIGNED_ACCESS
198#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
199#endif
200
201/* This is run once per compilation to set up which modes can be used
202   directly in memory and to initialize the block move optab.  */
203
204void
205init_expr_once ()
206{
207  rtx insn, pat;
208  enum machine_mode mode;
209  int num_clobbers;
210  rtx mem, mem1;
211
212  start_sequence ();
213
214  /* Try indexing by frame ptr and try by stack ptr.
215     It is known that on the Convex the stack ptr isn't a valid index.
216     With luck, one or the other is valid on any machine.  */
217  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
218  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
219
220  insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
221  pat = PATTERN (insn);
222
223  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
224       mode = (enum machine_mode) ((int) mode + 1))
225    {
226      int regno;
227      rtx reg;
228
229      direct_load[(int) mode] = direct_store[(int) mode] = 0;
230      PUT_MODE (mem, mode);
231      PUT_MODE (mem1, mode);
232
233      /* See if there is some register that can be used in this mode and
234	 directly loaded or stored from memory.  */
235
236      if (mode != VOIDmode && mode != BLKmode)
237	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
238	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
239	     regno++)
240	  {
241	    if (! HARD_REGNO_MODE_OK (regno, mode))
242	      continue;
243
244	    reg = gen_rtx_REG (mode, regno);
245
246	    SET_SRC (pat) = mem;
247	    SET_DEST (pat) = reg;
248	    if (recog (pat, insn, &num_clobbers) >= 0)
249	      direct_load[(int) mode] = 1;
250
251	    SET_SRC (pat) = mem1;
252	    SET_DEST (pat) = reg;
253	    if (recog (pat, insn, &num_clobbers) >= 0)
254	      direct_load[(int) mode] = 1;
255
256	    SET_SRC (pat) = reg;
257	    SET_DEST (pat) = mem;
258	    if (recog (pat, insn, &num_clobbers) >= 0)
259	      direct_store[(int) mode] = 1;
260
261	    SET_SRC (pat) = reg;
262	    SET_DEST (pat) = mem1;
263	    if (recog (pat, insn, &num_clobbers) >= 0)
264	      direct_store[(int) mode] = 1;
265	  }
266    }
267
268  end_sequence ();
269}
270
271/* This is run at the start of compiling a function.  */
272
273void
274init_expr ()
275{
276  cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
277
278  pending_chain = 0;
279  pending_stack_adjust = 0;
280  stack_pointer_delta = 0;
281  inhibit_defer_pop = 0;
282  saveregs_value = 0;
283  apply_args_value = 0;
284  forced_labels = 0;
285}
286
287void
288mark_expr_status (p)
289     struct expr_status *p;
290{
291  if (p == NULL)
292    return;
293
294  ggc_mark_rtx (p->x_saveregs_value);
295  ggc_mark_rtx (p->x_apply_args_value);
296  ggc_mark_rtx (p->x_forced_labels);
297}
298
299void
300free_expr_status (f)
301     struct function *f;
302{
303  free (f->expr);
304  f->expr = NULL;
305}
306
307/* Small sanity check that the queue is empty at the end of a function.  */
308
309void
310finish_expr_for_function ()
311{
312  if (pending_chain)
313    abort ();
314}
315
316/* Manage the queue of increment instructions to be output
317   for POSTINCREMENT_EXPR expressions, etc.  */
318
319/* Queue up to increment (or change) VAR later.  BODY says how:
320   BODY should be the same thing you would pass to emit_insn
321   to increment right away.  It will go to emit_insn later on.
322
323   The value is a QUEUED expression to be used in place of VAR
324   where you want to guarantee the pre-incrementation value of VAR.  */
325
326static rtx
327enqueue_insn (var, body)
328     rtx var, body;
329{
330  pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
331				  body, pending_chain);
332  return pending_chain;
333}
334
335/* Use protect_from_queue to convert a QUEUED expression
336   into something that you can put immediately into an instruction.
337   If the queued incrementation has not happened yet,
338   protect_from_queue returns the variable itself.
339   If the incrementation has happened, protect_from_queue returns a temp
340   that contains a copy of the old value of the variable.
341
342   Any time an rtx which might possibly be a QUEUED is to be put
343   into an instruction, it must be passed through protect_from_queue first.
344   QUEUED expressions are not meaningful in instructions.
345
346   Do not pass a value through protect_from_queue and then hold
347   on to it for a while before putting it in an instruction!
348   If the queue is flushed in between, incorrect code will result.  */
349
350rtx
351protect_from_queue (x, modify)
352     rtx x;
353     int modify;
354{
355  RTX_CODE code = GET_CODE (x);
356
357#if 0  /* A QUEUED can hang around after the queue is forced out.  */
358  /* Shortcut for most common case.  */
359  if (pending_chain == 0)
360    return x;
361#endif
362
363  if (code != QUEUED)
364    {
365      /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
366	 use of autoincrement.  Make a copy of the contents of the memory
367	 location rather than a copy of the address, but not if the value is
368	 of mode BLKmode.  Don't modify X in place since it might be
369	 shared.  */
370      if (code == MEM && GET_MODE (x) != BLKmode
371	  && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
372	{
373	  rtx y = XEXP (x, 0);
374	  rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
375
376	  if (QUEUED_INSN (y))
377	    {
378	      rtx temp = gen_reg_rtx (GET_MODE (x));
379
380	      emit_insn_before (gen_move_insn (temp, new),
381				QUEUED_INSN (y));
382	      return temp;
383	    }
384
385	  /* Copy the address into a pseudo, so that the returned value
386	     remains correct across calls to emit_queue.  */
387	  return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
388	}
389
390      /* Otherwise, recursively protect the subexpressions of all
391	 the kinds of rtx's that can contain a QUEUED.  */
392      if (code == MEM)
393	{
394	  rtx tem = protect_from_queue (XEXP (x, 0), 0);
395	  if (tem != XEXP (x, 0))
396	    {
397	      x = copy_rtx (x);
398	      XEXP (x, 0) = tem;
399	    }
400	}
401      else if (code == PLUS || code == MULT)
402	{
403	  rtx new0 = protect_from_queue (XEXP (x, 0), 0);
404	  rtx new1 = protect_from_queue (XEXP (x, 1), 0);
405	  if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
406	    {
407	      x = copy_rtx (x);
408	      XEXP (x, 0) = new0;
409	      XEXP (x, 1) = new1;
410	    }
411	}
412      return x;
413    }
414  /* If the increment has not happened, use the variable itself.  Copy it
415     into a new pseudo so that the value remains correct across calls to
416     emit_queue.  */
417  if (QUEUED_INSN (x) == 0)
418    return copy_to_reg (QUEUED_VAR (x));
419  /* If the increment has happened and a pre-increment copy exists,
420     use that copy.  */
421  if (QUEUED_COPY (x) != 0)
422    return QUEUED_COPY (x);
423  /* The increment has happened but we haven't set up a pre-increment copy.
424     Set one up now, and use it.  */
425  QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
426  emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
427		    QUEUED_INSN (x));
428  return QUEUED_COPY (x);
429}
430
431/* Return nonzero if X contains a QUEUED expression:
432   if it contains anything that will be altered by a queued increment.
433   We handle only combinations of MEM, PLUS, MINUS and MULT operators
434   since memory addresses generally contain only those.  */
435
436int
437queued_subexp_p (x)
438     rtx x;
439{
440  enum rtx_code code = GET_CODE (x);
441  switch (code)
442    {
443    case QUEUED:
444      return 1;
445    case MEM:
446      return queued_subexp_p (XEXP (x, 0));
447    case MULT:
448    case PLUS:
449    case MINUS:
450      return (queued_subexp_p (XEXP (x, 0))
451	      || queued_subexp_p (XEXP (x, 1)));
452    default:
453      return 0;
454    }
455}
456
457/* Perform all the pending incrementations.  */
458
459void
460emit_queue ()
461{
462  rtx p;
463  while ((p = pending_chain))
464    {
465      rtx body = QUEUED_BODY (p);
466
467      if (GET_CODE (body) == SEQUENCE)
468	{
469	  QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
470	  emit_insn (QUEUED_BODY (p));
471	}
472      else
473	QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
474      pending_chain = QUEUED_NEXT (p);
475    }
476}
477
478/* Copy data from FROM to TO, where the machine modes are not the same.
479   Both modes may be integer, or both may be floating.
480   UNSIGNEDP should be nonzero if FROM is an unsigned type.
481   This causes zero-extension instead of sign-extension.  */
482
483void
484convert_move (to, from, unsignedp)
485     rtx to, from;
486     int unsignedp;
487{
488  enum machine_mode to_mode = GET_MODE (to);
489  enum machine_mode from_mode = GET_MODE (from);
490  int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
491  int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
492  enum insn_code code;
493  rtx libcall;
494
495  /* rtx code for making an equivalent value.  */
496  enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
497
498  to = protect_from_queue (to, 1);
499  from = protect_from_queue (from, 0);
500
501  if (to_real != from_real)
502    abort ();
503
504  /* If FROM is a SUBREG that indicates that we have already done at least
505     the required extension, strip it.  We don't handle such SUBREGs as
506     TO here.  */
507
508  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
509      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
510	  >= GET_MODE_SIZE (to_mode))
511      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
512    from = gen_lowpart (to_mode, from), from_mode = to_mode;
513
514  if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
515    abort ();
516
517  if (to_mode == from_mode
518      || (from_mode == VOIDmode && CONSTANT_P (from)))
519    {
520      emit_move_insn (to, from);
521      return;
522    }
523
524  if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
525    {
526      if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
527	abort ();
528
529      if (VECTOR_MODE_P (to_mode))
530	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
531      else
532	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
533
534      emit_move_insn (to, from);
535      return;
536    }
537
538  if (to_real != from_real)
539    abort ();
540
541  if (to_real)
542    {
543      rtx value, insns;
544
545      if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
546	{
547	  /* Try converting directly if the insn is supported.  */
548	  if ((code = can_extend_p (to_mode, from_mode, 0))
549	      != CODE_FOR_nothing)
550	    {
551	      emit_unop_insn (code, to, from, UNKNOWN);
552	      return;
553	    }
554	}
555
556#ifdef HAVE_trunchfqf2
557      if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
558	{
559	  emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
560	  return;
561	}
562#endif
563#ifdef HAVE_trunctqfqf2
564      if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
565	{
566	  emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
567	  return;
568	}
569#endif
570#ifdef HAVE_truncsfqf2
571      if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
572	{
573	  emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
574	  return;
575	}
576#endif
577#ifdef HAVE_truncdfqf2
578      if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
579	{
580	  emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
581	  return;
582	}
583#endif
584#ifdef HAVE_truncxfqf2
585      if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
586	{
587	  emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
588	  return;
589	}
590#endif
591#ifdef HAVE_trunctfqf2
592      if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
593	{
594	  emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
595	  return;
596	}
597#endif
598
599#ifdef HAVE_trunctqfhf2
600      if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
601	{
602	  emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
603	  return;
604	}
605#endif
606#ifdef HAVE_truncsfhf2
607      if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
608	{
609	  emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
610	  return;
611	}
612#endif
613#ifdef HAVE_truncdfhf2
614      if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
615	{
616	  emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
617	  return;
618	}
619#endif
620#ifdef HAVE_truncxfhf2
621      if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
622	{
623	  emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
624	  return;
625	}
626#endif
627#ifdef HAVE_trunctfhf2
628      if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
629	{
630	  emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
631	  return;
632	}
633#endif
634
635#ifdef HAVE_truncsftqf2
636      if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
637	{
638	  emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
639	  return;
640	}
641#endif
642#ifdef HAVE_truncdftqf2
643      if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
644	{
645	  emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
646	  return;
647	}
648#endif
649#ifdef HAVE_truncxftqf2
650      if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
651	{
652	  emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
653	  return;
654	}
655#endif
656#ifdef HAVE_trunctftqf2
657      if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
658	{
659	  emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
660	  return;
661	}
662#endif
663
664#ifdef HAVE_truncdfsf2
665      if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
666	{
667	  emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
668	  return;
669	}
670#endif
671#ifdef HAVE_truncxfsf2
672      if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
673	{
674	  emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
675	  return;
676	}
677#endif
678#ifdef HAVE_trunctfsf2
679      if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
680	{
681	  emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
682	  return;
683	}
684#endif
685#ifdef HAVE_truncxfdf2
686      if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
687	{
688	  emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
689	  return;
690	}
691#endif
692#ifdef HAVE_trunctfdf2
693      if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
694	{
695	  emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
696	  return;
697	}
698#endif
699
700      libcall = (rtx) 0;
701      switch (from_mode)
702	{
703	case SFmode:
704	  switch (to_mode)
705	    {
706	    case DFmode:
707	      libcall = extendsfdf2_libfunc;
708	      break;
709
710	    case XFmode:
711	      libcall = extendsfxf2_libfunc;
712	      break;
713
714	    case TFmode:
715	      libcall = extendsftf2_libfunc;
716	      break;
717
718	    default:
719	      break;
720	    }
721	  break;
722
723	case DFmode:
724	  switch (to_mode)
725	    {
726	    case SFmode:
727	      libcall = truncdfsf2_libfunc;
728	      break;
729
730	    case XFmode:
731	      libcall = extenddfxf2_libfunc;
732	      break;
733
734	    case TFmode:
735	      libcall = extenddftf2_libfunc;
736	      break;
737
738	    default:
739	      break;
740	    }
741	  break;
742
743	case XFmode:
744	  switch (to_mode)
745	    {
746	    case SFmode:
747	      libcall = truncxfsf2_libfunc;
748	      break;
749
750	    case DFmode:
751	      libcall = truncxfdf2_libfunc;
752	      break;
753
754	    default:
755	      break;
756	    }
757	  break;
758
759	case TFmode:
760	  switch (to_mode)
761	    {
762	    case SFmode:
763	      libcall = trunctfsf2_libfunc;
764	      break;
765
766	    case DFmode:
767	      libcall = trunctfdf2_libfunc;
768	      break;
769
770	    default:
771	      break;
772	    }
773	  break;
774
775	default:
776	  break;
777	}
778
779      if (libcall == (rtx) 0)
780	/* This conversion is not implemented yet.  */
781	abort ();
782
783      start_sequence ();
784      value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
785				       1, from, from_mode);
786      insns = get_insns ();
787      end_sequence ();
788      emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
789								    from));
790      return;
791    }
792
793  /* Now both modes are integers.  */
794
795  /* Handle expanding beyond a word.  */
796  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
797      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
798    {
799      rtx insns;
800      rtx lowpart;
801      rtx fill_value;
802      rtx lowfrom;
803      int i;
804      enum machine_mode lowpart_mode;
805      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
806
807      /* Try converting directly if the insn is supported.  */
808      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
809	  != CODE_FOR_nothing)
810	{
811	  /* If FROM is a SUBREG, put it into a register.  Do this
812	     so that we always generate the same set of insns for
813	     better cse'ing; if an intermediate assignment occurred,
814	     we won't be doing the operation directly on the SUBREG.  */
815	  if (optimize > 0 && GET_CODE (from) == SUBREG)
816	    from = force_reg (from_mode, from);
817	  emit_unop_insn (code, to, from, equiv_code);
818	  return;
819	}
820      /* Next, try converting via full word.  */
821      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
822	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
823		   != CODE_FOR_nothing))
824	{
825	  if (GET_CODE (to) == REG)
826	    emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
827	  convert_move (gen_lowpart (word_mode, to), from, unsignedp);
828	  emit_unop_insn (code, to,
829			  gen_lowpart (word_mode, to), equiv_code);
830	  return;
831	}
832
833      /* No special multiword conversion insn; do it by hand.  */
834      start_sequence ();
835
836      /* Since we will turn this into a no conflict block, we must ensure
837	 that the source does not overlap the target.  */
838
839      if (reg_overlap_mentioned_p (to, from))
840	from = force_reg (from_mode, from);
841
842      /* Get a copy of FROM widened to a word, if necessary.  */
843      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
844	lowpart_mode = word_mode;
845      else
846	lowpart_mode = from_mode;
847
848      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
849
850      lowpart = gen_lowpart (lowpart_mode, to);
851      emit_move_insn (lowpart, lowfrom);
852
853      /* Compute the value to put in each remaining word.  */
854      if (unsignedp)
855	fill_value = const0_rtx;
856      else
857	{
858#ifdef HAVE_slt
859	  if (HAVE_slt
860	      && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
861	      && STORE_FLAG_VALUE == -1)
862	    {
863	      emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
864			     lowpart_mode, 0);
865	      fill_value = gen_reg_rtx (word_mode);
866	      emit_insn (gen_slt (fill_value));
867	    }
868	  else
869#endif
870	    {
871	      fill_value
872		= expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
873				size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
874				NULL_RTX, 0);
875	      fill_value = convert_to_mode (word_mode, fill_value, 1);
876	    }
877	}
878
879      /* Fill the remaining words.  */
880      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
881	{
882	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
883	  rtx subword = operand_subword (to, index, 1, to_mode);
884
885	  if (subword == 0)
886	    abort ();
887
888	  if (fill_value != subword)
889	    emit_move_insn (subword, fill_value);
890	}
891
892      insns = get_insns ();
893      end_sequence ();
894
895      emit_no_conflict_block (insns, to, from, NULL_RTX,
896			      gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
897      return;
898    }
899
900  /* Truncating multi-word to a word or less.  */
901  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
902      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
903    {
904      if (!((GET_CODE (from) == MEM
905	     && ! MEM_VOLATILE_P (from)
906	     && direct_load[(int) to_mode]
907	     && ! mode_dependent_address_p (XEXP (from, 0)))
908	    || GET_CODE (from) == REG
909	    || GET_CODE (from) == SUBREG))
910	from = force_reg (from_mode, from);
911      convert_move (to, gen_lowpart (word_mode, from), 0);
912      return;
913    }
914
915  /* Handle pointer conversion.  */			/* SPEE 900220.  */
916  if (to_mode == PQImode)
917    {
918      if (from_mode != QImode)
919	from = convert_to_mode (QImode, from, unsignedp);
920
921#ifdef HAVE_truncqipqi2
922      if (HAVE_truncqipqi2)
923	{
924	  emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
925	  return;
926	}
927#endif /* HAVE_truncqipqi2 */
928      abort ();
929    }
930
931  if (from_mode == PQImode)
932    {
933      if (to_mode != QImode)
934	{
935	  from = convert_to_mode (QImode, from, unsignedp);
936	  from_mode = QImode;
937	}
938      else
939	{
940#ifdef HAVE_extendpqiqi2
941	  if (HAVE_extendpqiqi2)
942	    {
943	      emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
944	      return;
945	    }
946#endif /* HAVE_extendpqiqi2 */
947	  abort ();
948	}
949    }
950
951  if (to_mode == PSImode)
952    {
953      if (from_mode != SImode)
954	from = convert_to_mode (SImode, from, unsignedp);
955
956#ifdef HAVE_truncsipsi2
957      if (HAVE_truncsipsi2)
958	{
959	  emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
960	  return;
961	}
962#endif /* HAVE_truncsipsi2 */
963      abort ();
964    }
965
966  if (from_mode == PSImode)
967    {
968      if (to_mode != SImode)
969	{
970	  from = convert_to_mode (SImode, from, unsignedp);
971	  from_mode = SImode;
972	}
973      else
974	{
975#ifdef HAVE_extendpsisi2
976	  if (! unsignedp && HAVE_extendpsisi2)
977	    {
978	      emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
979	      return;
980	    }
981#endif /* HAVE_extendpsisi2 */
982#ifdef HAVE_zero_extendpsisi2
983	  if (unsignedp && HAVE_zero_extendpsisi2)
984	    {
985	      emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
986	      return;
987	    }
988#endif /* HAVE_zero_extendpsisi2 */
989	  abort ();
990	}
991    }
992
993  if (to_mode == PDImode)
994    {
995      if (from_mode != DImode)
996	from = convert_to_mode (DImode, from, unsignedp);
997
998#ifdef HAVE_truncdipdi2
999      if (HAVE_truncdipdi2)
1000	{
1001	  emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1002	  return;
1003	}
1004#endif /* HAVE_truncdipdi2 */
1005      abort ();
1006    }
1007
1008  if (from_mode == PDImode)
1009    {
1010      if (to_mode != DImode)
1011	{
1012	  from = convert_to_mode (DImode, from, unsignedp);
1013	  from_mode = DImode;
1014	}
1015      else
1016	{
1017#ifdef HAVE_extendpdidi2
1018	  if (HAVE_extendpdidi2)
1019	    {
1020	      emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1021	      return;
1022	    }
1023#endif /* HAVE_extendpdidi2 */
1024	  abort ();
1025	}
1026    }
1027
1028  /* Now follow all the conversions between integers
1029     no more than a word long.  */
1030
1031  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
1032  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1033      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1034				GET_MODE_BITSIZE (from_mode)))
1035    {
1036      if (!((GET_CODE (from) == MEM
1037	     && ! MEM_VOLATILE_P (from)
1038	     && direct_load[(int) to_mode]
1039	     && ! mode_dependent_address_p (XEXP (from, 0)))
1040	    || GET_CODE (from) == REG
1041	    || GET_CODE (from) == SUBREG))
1042	from = force_reg (from_mode, from);
1043      if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1044	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1045	from = copy_to_reg (from);
1046      emit_move_insn (to, gen_lowpart (to_mode, from));
1047      return;
1048    }
1049
1050  /* Handle extension.  */
1051  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1052    {
1053      /* Convert directly if that works.  */
1054      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1055	  != CODE_FOR_nothing)
1056	{
1057	  if (flag_force_mem)
1058	    from = force_not_mem (from);
1059
1060	  emit_unop_insn (code, to, from, equiv_code);
1061	  return;
1062	}
1063      else
1064	{
1065	  enum machine_mode intermediate;
1066	  rtx tmp;
1067	  tree shift_amount;
1068
1069	  /* Search for a mode to convert via.  */
1070	  for (intermediate = from_mode; intermediate != VOIDmode;
1071	       intermediate = GET_MODE_WIDER_MODE (intermediate))
1072	    if (((can_extend_p (to_mode, intermediate, unsignedp)
1073		  != CODE_FOR_nothing)
1074		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1075		     && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1076					       GET_MODE_BITSIZE (intermediate))))
1077		&& (can_extend_p (intermediate, from_mode, unsignedp)
1078		    != CODE_FOR_nothing))
1079	      {
1080		convert_move (to, convert_to_mode (intermediate, from,
1081						   unsignedp), unsignedp);
1082		return;
1083	      }
1084
1085	  /* No suitable intermediate mode.
1086	     Generate what we need with	shifts.  */
1087	  shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1088				      - GET_MODE_BITSIZE (from_mode), 0);
1089	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
1090	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1091			      to, unsignedp);
1092	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1093			      to, unsignedp);
1094	  if (tmp != to)
1095	    emit_move_insn (to, tmp);
1096	  return;
1097	}
1098    }
1099
1100  /* Support special truncate insns for certain modes.  */
1101
1102  if (from_mode == DImode && to_mode == SImode)
1103    {
1104#ifdef HAVE_truncdisi2
1105      if (HAVE_truncdisi2)
1106	{
1107	  emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1108	  return;
1109	}
1110#endif
1111      convert_move (to, force_reg (from_mode, from), unsignedp);
1112      return;
1113    }
1114
1115  if (from_mode == DImode && to_mode == HImode)
1116    {
1117#ifdef HAVE_truncdihi2
1118      if (HAVE_truncdihi2)
1119	{
1120	  emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1121	  return;
1122	}
1123#endif
1124      convert_move (to, force_reg (from_mode, from), unsignedp);
1125      return;
1126    }
1127
1128  if (from_mode == DImode && to_mode == QImode)
1129    {
1130#ifdef HAVE_truncdiqi2
1131      if (HAVE_truncdiqi2)
1132	{
1133	  emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1134	  return;
1135	}
1136#endif
1137      convert_move (to, force_reg (from_mode, from), unsignedp);
1138      return;
1139    }
1140
1141  if (from_mode == SImode && to_mode == HImode)
1142    {
1143#ifdef HAVE_truncsihi2
1144      if (HAVE_truncsihi2)
1145	{
1146	  emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1147	  return;
1148	}
1149#endif
1150      convert_move (to, force_reg (from_mode, from), unsignedp);
1151      return;
1152    }
1153
1154  if (from_mode == SImode && to_mode == QImode)
1155    {
1156#ifdef HAVE_truncsiqi2
1157      if (HAVE_truncsiqi2)
1158	{
1159	  emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1160	  return;
1161	}
1162#endif
1163      convert_move (to, force_reg (from_mode, from), unsignedp);
1164      return;
1165    }
1166
1167  if (from_mode == HImode && to_mode == QImode)
1168    {
1169#ifdef HAVE_trunchiqi2
1170      if (HAVE_trunchiqi2)
1171	{
1172	  emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1173	  return;
1174	}
1175#endif
1176      convert_move (to, force_reg (from_mode, from), unsignedp);
1177      return;
1178    }
1179
1180  if (from_mode == TImode && to_mode == DImode)
1181    {
1182#ifdef HAVE_trunctidi2
1183      if (HAVE_trunctidi2)
1184	{
1185	  emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1186	  return;
1187	}
1188#endif
1189      convert_move (to, force_reg (from_mode, from), unsignedp);
1190      return;
1191    }
1192
1193  if (from_mode == TImode && to_mode == SImode)
1194    {
1195#ifdef HAVE_trunctisi2
1196      if (HAVE_trunctisi2)
1197	{
1198	  emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1199	  return;
1200	}
1201#endif
1202      convert_move (to, force_reg (from_mode, from), unsignedp);
1203      return;
1204    }
1205
1206  if (from_mode == TImode && to_mode == HImode)
1207    {
1208#ifdef HAVE_trunctihi2
1209      if (HAVE_trunctihi2)
1210	{
1211	  emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1212	  return;
1213	}
1214#endif
1215      convert_move (to, force_reg (from_mode, from), unsignedp);
1216      return;
1217    }
1218
1219  if (from_mode == TImode && to_mode == QImode)
1220    {
1221#ifdef HAVE_trunctiqi2
1222      if (HAVE_trunctiqi2)
1223	{
1224	  emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1225	  return;
1226	}
1227#endif
1228      convert_move (to, force_reg (from_mode, from), unsignedp);
1229      return;
1230    }
1231
1232  /* Handle truncation of volatile memrefs, and so on;
1233     the things that couldn't be truncated directly,
1234     and for which there was no special instruction.  */
1235  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1236    {
1237      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1238      emit_move_insn (to, temp);
1239      return;
1240    }
1241
1242  /* Mode combination is not recognized.  */
1243  abort ();
1244}
1245
1246/* Return an rtx for a value that would result
1247   from converting X to mode MODE.
1248   Both X and MODE may be floating, or both integer.
1249   UNSIGNEDP is nonzero if X is an unsigned value.
1250   This can be done by referring to a part of X in place
1251   or by copying to a new temporary with conversion.
1252
1253   This function *must not* call protect_from_queue
1254   except when putting X into an insn (in which case convert_move does it).  */
1255
1256rtx
1257convert_to_mode (mode, x, unsignedp)
1258     enum machine_mode mode;
1259     rtx x;
1260     int unsignedp;
1261{
1262  return convert_modes (mode, VOIDmode, x, unsignedp);
1263}
1264
1265/* Return an rtx for a value that would result
1266   from converting X from mode OLDMODE to mode MODE.
1267   Both modes may be floating, or both integer.
1268   UNSIGNEDP is nonzero if X is an unsigned value.
1269
1270   This can be done by referring to a part of X in place
1271   or by copying to a new temporary with conversion.
1272
1273   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1274
1275   This function *must not* call protect_from_queue
1276   except when putting X into an insn (in which case convert_move does it).  */
1277
1278rtx
1279convert_modes (mode, oldmode, x, unsignedp)
1280     enum machine_mode mode, oldmode;
1281     rtx x;
1282     int unsignedp;
1283{
1284  rtx temp;
1285
1286  /* If FROM is a SUBREG that indicates that we have already done at least
1287     the required extension, strip it.  */
1288
1289  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1290      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1291      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1292    x = gen_lowpart (mode, x);
1293
1294  if (GET_MODE (x) != VOIDmode)
1295    oldmode = GET_MODE (x);
1296
1297  if (mode == oldmode)
1298    return x;
1299
1300  /* There is one case that we must handle specially: If we are converting
1301     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1302     we are to interpret the constant as unsigned, gen_lowpart will do
1303     the wrong if the constant appears negative.  What we want to do is
1304     make the high-order word of the constant zero, not all ones.  */
1305
1306  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1307      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1308      && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1309    {
1310      HOST_WIDE_INT val = INTVAL (x);
1311
1312      if (oldmode != VOIDmode
1313	  && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1314	{
1315	  int width = GET_MODE_BITSIZE (oldmode);
1316
1317	  /* We need to zero extend VAL.  */
1318	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1319	}
1320
1321      return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1322    }
1323
1324  /* We can do this with a gen_lowpart if both desired and current modes
1325     are integer, and this is either a constant integer, a register, or a
1326     non-volatile MEM.  Except for the constant case where MODE is no
1327     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
1328
1329  if ((GET_CODE (x) == CONST_INT
1330       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1331      || (GET_MODE_CLASS (mode) == MODE_INT
1332	  && GET_MODE_CLASS (oldmode) == MODE_INT
1333	  && (GET_CODE (x) == CONST_DOUBLE
1334	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1335		  && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1336		       && direct_load[(int) mode])
1337		      || (GET_CODE (x) == REG
1338			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1339						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
1340    {
1341      /* ?? If we don't know OLDMODE, we have to assume here that
1342	 X does not need sign- or zero-extension.   This may not be
1343	 the case, but it's the best we can do.  */
1344      if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1345	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1346	{
1347	  HOST_WIDE_INT val = INTVAL (x);
1348	  int width = GET_MODE_BITSIZE (oldmode);
1349
1350	  /* We must sign or zero-extend in this case.  Start by
1351	     zero-extending, then sign extend if we need to.  */
1352	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1353	  if (! unsignedp
1354	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1355	    val |= (HOST_WIDE_INT) (-1) << width;
1356
1357	  return GEN_INT (trunc_int_for_mode (val, mode));
1358	}
1359
1360      return gen_lowpart (mode, x);
1361    }
1362
1363  temp = gen_reg_rtx (mode);
1364  convert_move (temp, x, unsignedp);
1365  return temp;
1366}
1367
1368/* This macro is used to determine what the largest unit size that
1369   move_by_pieces can use is.  */
1370
1371/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1372   move efficiently, as opposed to  MOVE_MAX which is the maximum
1373   number of bytes we can move with a single instruction.  */
1374
1375#ifndef MOVE_MAX_PIECES
1376#define MOVE_MAX_PIECES   MOVE_MAX
1377#endif
1378
1379/* Generate several move instructions to copy LEN bytes from block FROM to
1380   block TO.  (These are MEM rtx's with BLKmode).  The caller must pass FROM
1381   and TO through protect_from_queue before calling.
1382
1383   If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1384   used to push FROM to the stack.
1385
1386   ALIGN is maximum alignment we can assume.  */
1387
1388void
1389move_by_pieces (to, from, len, align)
1390     rtx to, from;
1391     unsigned HOST_WIDE_INT len;
1392     unsigned int align;
1393{
1394  struct move_by_pieces data;
1395  rtx to_addr, from_addr = XEXP (from, 0);
1396  unsigned int max_size = MOVE_MAX_PIECES + 1;
1397  enum machine_mode mode = VOIDmode, tmode;
1398  enum insn_code icode;
1399
1400  data.offset = 0;
1401  data.from_addr = from_addr;
1402  if (to)
1403    {
1404      to_addr = XEXP (to, 0);
1405      data.to = to;
1406      data.autinc_to
1407	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1408	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1409      data.reverse
1410	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1411    }
1412  else
1413    {
1414      to_addr = NULL_RTX;
1415      data.to = NULL_RTX;
1416      data.autinc_to = 1;
1417#ifdef STACK_GROWS_DOWNWARD
1418      data.reverse = 1;
1419#else
1420      data.reverse = 0;
1421#endif
1422    }
1423  data.to_addr = to_addr;
1424  data.from = from;
1425  data.autinc_from
1426    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1427       || GET_CODE (from_addr) == POST_INC
1428       || GET_CODE (from_addr) == POST_DEC);
1429
1430  data.explicit_inc_from = 0;
1431  data.explicit_inc_to = 0;
1432  if (data.reverse) data.offset = len;
1433  data.len = len;
1434
1435  /* If copying requires more than two move insns,
1436     copy addresses to registers (to make displacements shorter)
1437     and use post-increment if available.  */
1438  if (!(data.autinc_from && data.autinc_to)
1439      && move_by_pieces_ninsns (len, align) > 2)
1440    {
1441      /* Find the mode of the largest move...  */
1442      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1443	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1444	if (GET_MODE_SIZE (tmode) < max_size)
1445	  mode = tmode;
1446
1447      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1448	{
1449	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1450	  data.autinc_from = 1;
1451	  data.explicit_inc_from = -1;
1452	}
1453      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1454	{
1455	  data.from_addr = copy_addr_to_reg (from_addr);
1456	  data.autinc_from = 1;
1457	  data.explicit_inc_from = 1;
1458	}
1459      if (!data.autinc_from && CONSTANT_P (from_addr))
1460	data.from_addr = copy_addr_to_reg (from_addr);
1461      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1462	{
1463	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1464	  data.autinc_to = 1;
1465	  data.explicit_inc_to = -1;
1466	}
1467      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1468	{
1469	  data.to_addr = copy_addr_to_reg (to_addr);
1470	  data.autinc_to = 1;
1471	  data.explicit_inc_to = 1;
1472	}
1473      if (!data.autinc_to && CONSTANT_P (to_addr))
1474	data.to_addr = copy_addr_to_reg (to_addr);
1475    }
1476
1477  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1478      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1479    align = MOVE_MAX * BITS_PER_UNIT;
1480
1481  /* First move what we can in the largest integer mode, then go to
1482     successively smaller modes.  */
1483
1484  while (max_size > 1)
1485    {
1486      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1487	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1488	if (GET_MODE_SIZE (tmode) < max_size)
1489	  mode = tmode;
1490
1491      if (mode == VOIDmode)
1492	break;
1493
1494      icode = mov_optab->handlers[(int) mode].insn_code;
1495      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1496	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1497
1498      max_size = GET_MODE_SIZE (mode);
1499    }
1500
1501  /* The code above should have handled everything.  */
1502  if (data.len > 0)
1503    abort ();
1504}
1505
1506/* Return number of insns required to move L bytes by pieces.
1507   ALIGN (in bits) is maximum alignment we can assume.  */
1508
1509static unsigned HOST_WIDE_INT
1510move_by_pieces_ninsns (l, align)
1511     unsigned HOST_WIDE_INT l;
1512     unsigned int align;
1513{
1514  unsigned HOST_WIDE_INT n_insns = 0;
1515  unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1516
1517  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1518      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1519    align = MOVE_MAX * BITS_PER_UNIT;
1520
1521  while (max_size > 1)
1522    {
1523      enum machine_mode mode = VOIDmode, tmode;
1524      enum insn_code icode;
1525
1526      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1527	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1528	if (GET_MODE_SIZE (tmode) < max_size)
1529	  mode = tmode;
1530
1531      if (mode == VOIDmode)
1532	break;
1533
1534      icode = mov_optab->handlers[(int) mode].insn_code;
1535      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1536	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1537
1538      max_size = GET_MODE_SIZE (mode);
1539    }
1540
1541  if (l)
1542    abort ();
1543  return n_insns;
1544}
1545
1546/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1547   with move instructions for mode MODE.  GENFUN is the gen_... function
1548   to make a move insn for that mode.  DATA has all the other info.  */
1549
1550static void
1551move_by_pieces_1 (genfun, mode, data)
1552     rtx (*genfun) PARAMS ((rtx, ...));
1553     enum machine_mode mode;
1554     struct move_by_pieces *data;
1555{
1556  unsigned int size = GET_MODE_SIZE (mode);
1557  rtx to1 = NULL_RTX, from1;
1558
1559  while (data->len >= size)
1560    {
1561      if (data->reverse)
1562	data->offset -= size;
1563
1564      if (data->to)
1565	{
1566	  if (data->autinc_to)
1567	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1568					     data->offset);
1569	  else
1570	    to1 = adjust_address (data->to, mode, data->offset);
1571	}
1572
1573      if (data->autinc_from)
1574	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1575					   data->offset);
1576      else
1577	from1 = adjust_address (data->from, mode, data->offset);
1578
1579      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1580	emit_insn (gen_add2_insn (data->to_addr,
1581				  GEN_INT (-(HOST_WIDE_INT)size)));
1582      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1583	emit_insn (gen_add2_insn (data->from_addr,
1584				  GEN_INT (-(HOST_WIDE_INT)size)));
1585
1586      if (data->to)
1587	emit_insn ((*genfun) (to1, from1));
1588      else
1589	{
1590#ifdef PUSH_ROUNDING
1591	  emit_single_push_insn (mode, from1, NULL);
1592#else
1593	  abort ();
1594#endif
1595	}
1596
1597      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1598	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1599      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1600	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1601
1602      if (! data->reverse)
1603	data->offset += size;
1604
1605      data->len -= size;
1606    }
1607}
1608
1609/* Emit code to move a block Y to a block X.
1610   This may be done with string-move instructions,
1611   with multiple scalar move instructions, or with a library call.
1612
1613   Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1614   with mode BLKmode.
1615   SIZE is an rtx that says how long they are.
1616   ALIGN is the maximum alignment we can assume they have.
1617
1618   Return the address of the new block, if memcpy is called and returns it,
1619   0 otherwise.  */
1620
1621rtx
1622emit_block_move (x, y, size)
1623     rtx x, y;
1624     rtx size;
1625{
1626  rtx retval = 0;
1627#ifdef TARGET_MEM_FUNCTIONS
1628  static tree fn;
1629  tree call_expr, arg_list;
1630#endif
1631  unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1632
1633  if (GET_MODE (x) != BLKmode)
1634    abort ();
1635
1636  if (GET_MODE (y) != BLKmode)
1637    abort ();
1638
1639  x = protect_from_queue (x, 1);
1640  y = protect_from_queue (y, 0);
1641  size = protect_from_queue (size, 0);
1642
1643  if (GET_CODE (x) != MEM)
1644    abort ();
1645  if (GET_CODE (y) != MEM)
1646    abort ();
1647  if (size == 0)
1648    abort ();
1649
1650  if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1651    move_by_pieces (x, y, INTVAL (size), align);
1652  else
1653    {
1654      /* Try the most limited insn first, because there's no point
1655	 including more than one in the machine description unless
1656	 the more limited one has some advantage.  */
1657
1658      rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1659      enum machine_mode mode;
1660
1661      /* Since this is a move insn, we don't care about volatility.  */
1662      volatile_ok = 1;
1663
1664      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1665	   mode = GET_MODE_WIDER_MODE (mode))
1666	{
1667	  enum insn_code code = movstr_optab[(int) mode];
1668	  insn_operand_predicate_fn pred;
1669
1670	  if (code != CODE_FOR_nothing
1671	      /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1672		 here because if SIZE is less than the mode mask, as it is
1673		 returned by the macro, it will definitely be less than the
1674		 actual mode mask.  */
1675	      && ((GET_CODE (size) == CONST_INT
1676		   && ((unsigned HOST_WIDE_INT) INTVAL (size)
1677		       <= (GET_MODE_MASK (mode) >> 1)))
1678		  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1679	      && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1680		  || (*pred) (x, BLKmode))
1681	      && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1682		  || (*pred) (y, BLKmode))
1683	      && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1684		  || (*pred) (opalign, VOIDmode)))
1685	    {
1686	      rtx op2;
1687	      rtx last = get_last_insn ();
1688	      rtx pat;
1689
1690	      op2 = convert_to_mode (mode, size, 1);
1691	      pred = insn_data[(int) code].operand[2].predicate;
1692	      if (pred != 0 && ! (*pred) (op2, mode))
1693		op2 = copy_to_mode_reg (mode, op2);
1694
1695	      pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1696	      if (pat)
1697		{
1698		  emit_insn (pat);
1699		  volatile_ok = 0;
1700		  return 0;
1701		}
1702	      else
1703		delete_insns_since (last);
1704	    }
1705	}
1706
1707      volatile_ok = 0;
1708
1709      /* X, Y, or SIZE may have been passed through protect_from_queue.
1710
1711	 It is unsafe to save the value generated by protect_from_queue
1712	 and reuse it later.  Consider what happens if emit_queue is
1713	 called before the return value from protect_from_queue is used.
1714
1715	 Expansion of the CALL_EXPR below will call emit_queue before
1716	 we are finished emitting RTL for argument setup.  So if we are
1717	 not careful we could get the wrong value for an argument.
1718
1719	 To avoid this problem we go ahead and emit code to copy X, Y &
1720	 SIZE into new pseudos.  We can then place those new pseudos
1721	 into an RTL_EXPR and use them later, even after a call to
1722	 emit_queue.
1723
1724	 Note this is not strictly needed for library calls since they
1725	 do not call emit_queue before loading their arguments.  However,
1726	 we may need to have library calls call emit_queue in the future
1727	 since failing to do so could cause problems for targets which
1728	 define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
1729      x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1730      y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1731
1732#ifdef TARGET_MEM_FUNCTIONS
1733      size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1734#else
1735      size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1736			      TREE_UNSIGNED (integer_type_node));
1737      size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1738#endif
1739
1740#ifdef TARGET_MEM_FUNCTIONS
1741      /* It is incorrect to use the libcall calling conventions to call
1742	 memcpy in this context.
1743
1744	 This could be a user call to memcpy and the user may wish to
1745	 examine the return value from memcpy.
1746
1747	 For targets where libcalls and normal calls have different conventions
1748	 for returning pointers, we could end up generating incorrect code.
1749
1750	 So instead of using a libcall sequence we build up a suitable
1751	 CALL_EXPR and expand the call in the normal fashion.  */
1752      if (fn == NULL_TREE)
1753	{
1754	  tree fntype;
1755
1756	  /* This was copied from except.c, I don't know if all this is
1757	     necessary in this context or not.  */
1758	  fn = get_identifier ("memcpy");
1759	  fntype = build_pointer_type (void_type_node);
1760	  fntype = build_function_type (fntype, NULL_TREE);
1761	  fn = build_decl (FUNCTION_DECL, fn, fntype);
1762	  ggc_add_tree_root (&fn, 1);
1763	  DECL_EXTERNAL (fn) = 1;
1764	  TREE_PUBLIC (fn) = 1;
1765	  DECL_ARTIFICIAL (fn) = 1;
1766	  TREE_NOTHROW (fn) = 1;
1767	  make_decl_rtl (fn, NULL);
1768	  assemble_external (fn);
1769	}
1770
1771      /* We need to make an argument list for the function call.
1772
1773	 memcpy has three arguments, the first two are void * addresses and
1774	 the last is a size_t byte count for the copy.  */
1775      arg_list
1776	= build_tree_list (NULL_TREE,
1777			   make_tree (build_pointer_type (void_type_node), x));
1778      TREE_CHAIN (arg_list)
1779	= build_tree_list (NULL_TREE,
1780			   make_tree (build_pointer_type (void_type_node), y));
1781      TREE_CHAIN (TREE_CHAIN (arg_list))
1782	 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1783      TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1784
1785      /* Now we have to build up the CALL_EXPR itself.  */
1786      call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1787      call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1788			 call_expr, arg_list, NULL_TREE);
1789      TREE_SIDE_EFFECTS (call_expr) = 1;
1790
1791      retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1792#else
1793      emit_library_call (bcopy_libfunc, LCT_NORMAL,
1794			 VOIDmode, 3, y, Pmode, x, Pmode,
1795			 convert_to_mode (TYPE_MODE (integer_type_node), size,
1796					  TREE_UNSIGNED (integer_type_node)),
1797			 TYPE_MODE (integer_type_node));
1798#endif
1799
1800      /* If we are initializing a readonly value, show the above call
1801	 clobbered it.  Otherwise, a load from it may erroneously be hoisted
1802	 from a loop.  */
1803      if (RTX_UNCHANGING_P (x))
1804	emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1805    }
1806
1807  return retval;
1808}
1809
1810/* Copy all or part of a value X into registers starting at REGNO.
1811   The number of registers to be filled is NREGS.  */
1812
1813void
1814move_block_to_reg (regno, x, nregs, mode)
1815     int regno;
1816     rtx x;
1817     int nregs;
1818     enum machine_mode mode;
1819{
1820  int i;
1821#ifdef HAVE_load_multiple
1822  rtx pat;
1823  rtx last;
1824#endif
1825
1826  if (nregs == 0)
1827    return;
1828
1829  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1830    x = validize_mem (force_const_mem (mode, x));
1831
1832  /* See if the machine can do this with a load multiple insn.  */
1833#ifdef HAVE_load_multiple
1834  if (HAVE_load_multiple)
1835    {
1836      last = get_last_insn ();
1837      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1838			       GEN_INT (nregs));
1839      if (pat)
1840	{
1841	  emit_insn (pat);
1842	  return;
1843	}
1844      else
1845	delete_insns_since (last);
1846    }
1847#endif
1848
1849  for (i = 0; i < nregs; i++)
1850    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1851		    operand_subword_force (x, i, mode));
1852}
1853
1854/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1855   The number of registers to be filled is NREGS.  SIZE indicates the number
1856   of bytes in the object X.  */
1857
1858void
1859move_block_from_reg (regno, x, nregs, size)
1860     int regno;
1861     rtx x;
1862     int nregs;
1863     int size;
1864{
1865  int i;
1866#ifdef HAVE_store_multiple
1867  rtx pat;
1868  rtx last;
1869#endif
1870  enum machine_mode mode;
1871
1872  if (nregs == 0)
1873    return;
1874
1875  /* If SIZE is that of a mode no bigger than a word, just use that
1876     mode's store operation.  */
1877  if (size <= UNITS_PER_WORD
1878      && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1879      && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1880    {
1881      emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1882      return;
1883    }
1884
1885  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1886     to the left before storing to memory.  Note that the previous test
1887     doesn't handle all cases (e.g. SIZE == 3).  */
1888  if (size < UNITS_PER_WORD
1889      && BYTES_BIG_ENDIAN
1890      && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1891    {
1892      rtx tem = operand_subword (x, 0, 1, BLKmode);
1893      rtx shift;
1894
1895      if (tem == 0)
1896	abort ();
1897
1898      shift = expand_shift (LSHIFT_EXPR, word_mode,
1899			    gen_rtx_REG (word_mode, regno),
1900			    build_int_2 ((UNITS_PER_WORD - size)
1901					 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1902      emit_move_insn (tem, shift);
1903      return;
1904    }
1905
1906  /* See if the machine can do this with a store multiple insn.  */
1907#ifdef HAVE_store_multiple
1908  if (HAVE_store_multiple)
1909    {
1910      last = get_last_insn ();
1911      pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1912				GEN_INT (nregs));
1913      if (pat)
1914	{
1915	  emit_insn (pat);
1916	  return;
1917	}
1918      else
1919	delete_insns_since (last);
1920    }
1921#endif
1922
1923  for (i = 0; i < nregs; i++)
1924    {
1925      rtx tem = operand_subword (x, i, 1, BLKmode);
1926
1927      if (tem == 0)
1928	abort ();
1929
1930      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1931    }
1932}
1933
1934/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1935   registers represented by a PARALLEL.  SSIZE represents the total size of
1936   block SRC in bytes, or -1 if not known.  */
1937/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1938   the balance will be in what would be the low-order memory addresses, i.e.
1939   left justified for big endian, right justified for little endian.  This
1940   happens to be true for the targets currently using this support.  If this
1941   ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1942   would be needed.  */
1943
1944void
1945emit_group_load (dst, orig_src, ssize)
1946     rtx dst, orig_src;
1947     int ssize;
1948{
1949  rtx *tmps, src;
1950  int start, i;
1951
1952  if (GET_CODE (dst) != PARALLEL)
1953    abort ();
1954
1955  /* Check for a NULL entry, used to indicate that the parameter goes
1956     both on the stack and in registers.  */
1957  if (XEXP (XVECEXP (dst, 0, 0), 0))
1958    start = 0;
1959  else
1960    start = 1;
1961
1962  tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1963
1964  /* Process the pieces.  */
1965  for (i = start; i < XVECLEN (dst, 0); i++)
1966    {
1967      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1968      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1969      unsigned int bytelen = GET_MODE_SIZE (mode);
1970      int shift = 0;
1971
1972      /* Handle trailing fragments that run over the size of the struct.  */
1973      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1974	{
1975	  shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1976	  bytelen = ssize - bytepos;
1977	  if (bytelen <= 0)
1978	    abort ();
1979	}
1980
1981      /* If we won't be loading directly from memory, protect the real source
1982	 from strange tricks we might play; but make sure that the source can
1983	 be loaded directly into the destination.  */
1984      src = orig_src;
1985      if (GET_CODE (orig_src) != MEM
1986	  && (!CONSTANT_P (orig_src)
1987	      || (GET_MODE (orig_src) != mode
1988		  && GET_MODE (orig_src) != VOIDmode)))
1989	{
1990	  if (GET_MODE (orig_src) == VOIDmode)
1991	    src = gen_reg_rtx (mode);
1992	  else
1993	    src = gen_reg_rtx (GET_MODE (orig_src));
1994
1995	  emit_move_insn (src, orig_src);
1996	}
1997
1998      /* Optimize the access just a bit.  */
1999      if (GET_CODE (src) == MEM
2000	  && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2001	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2002	  && bytelen == GET_MODE_SIZE (mode))
2003	{
2004	  tmps[i] = gen_reg_rtx (mode);
2005	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2006	}
2007      else if (GET_CODE (src) == CONCAT)
2008	{
2009	  if ((bytepos == 0
2010	       && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2011	      || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2012		  && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2013	    {
2014	      tmps[i] = XEXP (src, bytepos != 0);
2015	      if (! CONSTANT_P (tmps[i])
2016		  && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2017		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2018					     0, 1, NULL_RTX, mode, mode, ssize);
2019	    }
2020	  else if (bytepos == 0)
2021	    {
2022	      rtx mem = assign_stack_temp (GET_MODE (src),
2023					   GET_MODE_SIZE (GET_MODE (src)), 0);
2024	      emit_move_insn (mem, src);
2025	      tmps[i] = adjust_address (mem, mode, 0);
2026	    }
2027	  else
2028	    abort ();
2029	}
2030      else if (CONSTANT_P (src)
2031	       || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2032	tmps[i] = src;
2033      else
2034	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2035				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2036				     mode, mode, ssize);
2037
2038      if (BYTES_BIG_ENDIAN && shift)
2039	expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2040		      tmps[i], 0, OPTAB_WIDEN);
2041    }
2042
2043  emit_queue ();
2044
2045  /* Copy the extracted pieces into the proper (probable) hard regs.  */
2046  for (i = start; i < XVECLEN (dst, 0); i++)
2047    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2048}
2049
2050/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2051   registers represented by a PARALLEL.  SSIZE represents the total size of
2052   block DST, or -1 if not known.  */
2053
2054void
2055emit_group_store (orig_dst, src, ssize)
2056     rtx orig_dst, src;
2057     int ssize;
2058{
2059  rtx *tmps, dst;
2060  int start, i;
2061
2062  if (GET_CODE (src) != PARALLEL)
2063    abort ();
2064
2065  /* Check for a NULL entry, used to indicate that the parameter goes
2066     both on the stack and in registers.  */
2067  if (XEXP (XVECEXP (src, 0, 0), 0))
2068    start = 0;
2069  else
2070    start = 1;
2071
2072  tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2073
2074  /* Copy the (probable) hard regs into pseudos.  */
2075  for (i = start; i < XVECLEN (src, 0); i++)
2076    {
2077      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2078      tmps[i] = gen_reg_rtx (GET_MODE (reg));
2079      emit_move_insn (tmps[i], reg);
2080    }
2081  emit_queue ();
2082
2083  /* If we won't be storing directly into memory, protect the real destination
2084     from strange tricks we might play.  */
2085  dst = orig_dst;
2086  if (GET_CODE (dst) == PARALLEL)
2087    {
2088      rtx temp;
2089
2090      /* We can get a PARALLEL dst if there is a conditional expression in
2091	 a return statement.  In that case, the dst and src are the same,
2092	 so no action is necessary.  */
2093      if (rtx_equal_p (dst, src))
2094	return;
2095
2096      /* It is unclear if we can ever reach here, but we may as well handle
2097	 it.  Allocate a temporary, and split this into a store/load to/from
2098	 the temporary.  */
2099
2100      temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2101      emit_group_store (temp, src, ssize);
2102      emit_group_load (dst, temp, ssize);
2103      return;
2104    }
2105  else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2106    {
2107      dst = gen_reg_rtx (GET_MODE (orig_dst));
2108      /* Make life a bit easier for combine.  */
2109      emit_move_insn (dst, const0_rtx);
2110    }
2111
2112  /* Process the pieces.  */
2113  for (i = start; i < XVECLEN (src, 0); i++)
2114    {
2115      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2116      enum machine_mode mode = GET_MODE (tmps[i]);
2117      unsigned int bytelen = GET_MODE_SIZE (mode);
2118      rtx dest = dst;
2119
2120      /* Handle trailing fragments that run over the size of the struct.  */
2121      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2122	{
2123	  if (BYTES_BIG_ENDIAN)
2124	    {
2125	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2126	      expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2127			    tmps[i], 0, OPTAB_WIDEN);
2128	    }
2129	  bytelen = ssize - bytepos;
2130	}
2131
2132      if (GET_CODE (dst) == CONCAT)
2133	{
2134	  if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2135	    dest = XEXP (dst, 0);
2136	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2137	    {
2138	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2139	      dest = XEXP (dst, 1);
2140	    }
2141	  else
2142	    abort ();
2143	}
2144
2145      /* Optimize the access just a bit.  */
2146      if (GET_CODE (dest) == MEM
2147	  && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2148	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2149	  && bytelen == GET_MODE_SIZE (mode))
2150	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2151      else
2152	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2153			 mode, tmps[i], ssize);
2154    }
2155
2156  emit_queue ();
2157
2158  /* Copy from the pseudo into the (probable) hard reg.  */
2159  if (GET_CODE (dst) == REG)
2160    emit_move_insn (orig_dst, dst);
2161}
2162
2163/* Generate code to copy a BLKmode object of TYPE out of a
2164   set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2165   is null, a stack temporary is created.  TGTBLK is returned.
2166
2167   The primary purpose of this routine is to handle functions
2168   that return BLKmode structures in registers.  Some machines
2169   (the PA for example) want to return all small structures
2170   in registers regardless of the structure's alignment.  */
2171
2172rtx
2173copy_blkmode_from_reg (tgtblk, srcreg, type)
2174     rtx tgtblk;
2175     rtx srcreg;
2176     tree type;
2177{
2178  unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2179  rtx src = NULL, dst = NULL;
2180  unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2181  unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2182
2183  if (tgtblk == 0)
2184    {
2185      tgtblk = assign_temp (build_qualified_type (type,
2186						  (TYPE_QUALS (type)
2187						   | TYPE_QUAL_CONST)),
2188			    0, 1, 1);
2189      preserve_temp_slots (tgtblk);
2190    }
2191
2192  /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2193     into a new pseudo which is a full word.
2194
2195     If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2196     the wrong part of the register gets copied so we fake a type conversion
2197     in place.  */
2198  if (GET_MODE (srcreg) != BLKmode
2199      && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2200    {
2201      if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2202	srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2203      else
2204	srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2205    }
2206
2207  /* Structures whose size is not a multiple of a word are aligned
2208     to the least significant byte (to the right).  On a BYTES_BIG_ENDIAN
2209     machine, this means we must skip the empty high order bytes when
2210     calculating the bit offset.  */
2211  if (BYTES_BIG_ENDIAN
2212      && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2213      && bytes % UNITS_PER_WORD)
2214    big_endian_correction
2215      = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2216
2217  /* Copy the structure BITSIZE bites at a time.
2218
2219     We could probably emit more efficient code for machines which do not use
2220     strict alignment, but it doesn't seem worth the effort at the current
2221     time.  */
2222  for (bitpos = 0, xbitpos = big_endian_correction;
2223       bitpos < bytes * BITS_PER_UNIT;
2224       bitpos += bitsize, xbitpos += bitsize)
2225    {
2226      /* We need a new source operand each time xbitpos is on a
2227	 word boundary and when xbitpos == big_endian_correction
2228	 (the first time through).  */
2229      if (xbitpos % BITS_PER_WORD == 0
2230	  || xbitpos == big_endian_correction)
2231	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2232				     GET_MODE (srcreg));
2233
2234      /* We need a new destination operand each time bitpos is on
2235	 a word boundary.  */
2236      if (bitpos % BITS_PER_WORD == 0)
2237	dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2238
2239      /* Use xbitpos for the source extraction (right justified) and
2240	 xbitpos for the destination store (left justified).  */
2241      store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2242		       extract_bit_field (src, bitsize,
2243					  xbitpos % BITS_PER_WORD, 1,
2244					  NULL_RTX, word_mode, word_mode,
2245					  BITS_PER_WORD),
2246		       BITS_PER_WORD);
2247    }
2248
2249  return tgtblk;
2250}
2251
2252/* Add a USE expression for REG to the (possibly empty) list pointed
2253   to by CALL_FUSAGE.  REG must denote a hard register.  */
2254
2255void
2256use_reg (call_fusage, reg)
2257     rtx *call_fusage, reg;
2258{
2259  if (GET_CODE (reg) != REG
2260      || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2261    abort ();
2262
2263  *call_fusage
2264    = gen_rtx_EXPR_LIST (VOIDmode,
2265			 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2266}
2267
2268/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2269   starting at REGNO.  All of these registers must be hard registers.  */
2270
2271void
2272use_regs (call_fusage, regno, nregs)
2273     rtx *call_fusage;
2274     int regno;
2275     int nregs;
2276{
2277  int i;
2278
2279  if (regno + nregs > FIRST_PSEUDO_REGISTER)
2280    abort ();
2281
2282  for (i = 0; i < nregs; i++)
2283    use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2284}
2285
2286/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2287   PARALLEL REGS.  This is for calls that pass values in multiple
2288   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2289
2290void
2291use_group_regs (call_fusage, regs)
2292     rtx *call_fusage;
2293     rtx regs;
2294{
2295  int i;
2296
2297  for (i = 0; i < XVECLEN (regs, 0); i++)
2298    {
2299      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2300
2301      /* A NULL entry means the parameter goes both on the stack and in
2302	 registers.  This can also be a MEM for targets that pass values
2303	 partially on the stack and partially in registers.  */
2304      if (reg != 0 && GET_CODE (reg) == REG)
2305	use_reg (call_fusage, reg);
2306    }
2307}
2308
2309
2310int
2311can_store_by_pieces (len, constfun, constfundata, align)
2312     unsigned HOST_WIDE_INT len;
2313     rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2314     PTR constfundata;
2315     unsigned int align;
2316{
2317  unsigned HOST_WIDE_INT max_size, l;
2318  HOST_WIDE_INT offset = 0;
2319  enum machine_mode mode, tmode;
2320  enum insn_code icode;
2321  int reverse;
2322  rtx cst;
2323
2324  if (! MOVE_BY_PIECES_P (len, align))
2325    return 0;
2326
2327  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2328      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2329    align = MOVE_MAX * BITS_PER_UNIT;
2330
2331  /* We would first store what we can in the largest integer mode, then go to
2332     successively smaller modes.  */
2333
2334  for (reverse = 0;
2335       reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2336       reverse++)
2337    {
2338      l = len;
2339      mode = VOIDmode;
2340      max_size = MOVE_MAX_PIECES + 1;
2341      while (max_size > 1)
2342	{
2343	  for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2344	       tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2345	    if (GET_MODE_SIZE (tmode) < max_size)
2346	      mode = tmode;
2347
2348	  if (mode == VOIDmode)
2349	    break;
2350
2351	  icode = mov_optab->handlers[(int) mode].insn_code;
2352	  if (icode != CODE_FOR_nothing
2353	      && align >= GET_MODE_ALIGNMENT (mode))
2354	    {
2355	      unsigned int size = GET_MODE_SIZE (mode);
2356
2357	      while (l >= size)
2358		{
2359		  if (reverse)
2360		    offset -= size;
2361
2362		  cst = (*constfun) (constfundata, offset, mode);
2363		  if (!LEGITIMATE_CONSTANT_P (cst))
2364		    return 0;
2365
2366		  if (!reverse)
2367		    offset += size;
2368
2369		  l -= size;
2370		}
2371	    }
2372
2373	  max_size = GET_MODE_SIZE (mode);
2374	}
2375
2376      /* The code above should have handled everything.  */
2377      if (l != 0)
2378	abort ();
2379    }
2380
2381  return 1;
2382}
2383
2384/* Generate several move instructions to store LEN bytes generated by
2385   CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2386   pointer which will be passed as argument in every CONSTFUN call.
2387   ALIGN is maximum alignment we can assume.  */
2388
2389void
2390store_by_pieces (to, len, constfun, constfundata, align)
2391     rtx to;
2392     unsigned HOST_WIDE_INT len;
2393     rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2394     PTR constfundata;
2395     unsigned int align;
2396{
2397  struct store_by_pieces data;
2398
2399  if (! MOVE_BY_PIECES_P (len, align))
2400    abort ();
2401  to = protect_from_queue (to, 1);
2402  data.constfun = constfun;
2403  data.constfundata = constfundata;
2404  data.len = len;
2405  data.to = to;
2406  store_by_pieces_1 (&data, align);
2407}
2408
2409/* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2410   rtx with BLKmode).  The caller must pass TO through protect_from_queue
2411   before calling. ALIGN is maximum alignment we can assume.  */
2412
2413static void
2414clear_by_pieces (to, len, align)
2415     rtx to;
2416     unsigned HOST_WIDE_INT len;
2417     unsigned int align;
2418{
2419  struct store_by_pieces data;
2420
2421  data.constfun = clear_by_pieces_1;
2422  data.constfundata = NULL;
2423  data.len = len;
2424  data.to = to;
2425  store_by_pieces_1 (&data, align);
2426}
2427
2428/* Callback routine for clear_by_pieces.
2429   Return const0_rtx unconditionally.  */
2430
2431static rtx
2432clear_by_pieces_1 (data, offset, mode)
2433     PTR data ATTRIBUTE_UNUSED;
2434     HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2435     enum machine_mode mode ATTRIBUTE_UNUSED;
2436{
2437  return const0_rtx;
2438}
2439
2440/* Subroutine of clear_by_pieces and store_by_pieces.
2441   Generate several move instructions to store LEN bytes of block TO.  (A MEM
2442   rtx with BLKmode).  The caller must pass TO through protect_from_queue
2443   before calling.  ALIGN is maximum alignment we can assume.  */
2444
2445static void
2446store_by_pieces_1 (data, align)
2447     struct store_by_pieces *data;
2448     unsigned int align;
2449{
2450  rtx to_addr = XEXP (data->to, 0);
2451  unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2452  enum machine_mode mode = VOIDmode, tmode;
2453  enum insn_code icode;
2454
2455  data->offset = 0;
2456  data->to_addr = to_addr;
2457  data->autinc_to
2458    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2459       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2460
2461  data->explicit_inc_to = 0;
2462  data->reverse
2463    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2464  if (data->reverse)
2465    data->offset = data->len;
2466
2467  /* If storing requires more than two move insns,
2468     copy addresses to registers (to make displacements shorter)
2469     and use post-increment if available.  */
2470  if (!data->autinc_to
2471      && move_by_pieces_ninsns (data->len, align) > 2)
2472    {
2473      /* Determine the main mode we'll be using.  */
2474      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2475	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2476	if (GET_MODE_SIZE (tmode) < max_size)
2477	  mode = tmode;
2478
2479      if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2480	{
2481	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2482	  data->autinc_to = 1;
2483	  data->explicit_inc_to = -1;
2484	}
2485
2486      if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2487	  && ! data->autinc_to)
2488	{
2489	  data->to_addr = copy_addr_to_reg (to_addr);
2490	  data->autinc_to = 1;
2491	  data->explicit_inc_to = 1;
2492	}
2493
2494      if ( !data->autinc_to && CONSTANT_P (to_addr))
2495	data->to_addr = copy_addr_to_reg (to_addr);
2496    }
2497
2498  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2499      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2500    align = MOVE_MAX * BITS_PER_UNIT;
2501
2502  /* First store what we can in the largest integer mode, then go to
2503     successively smaller modes.  */
2504
2505  while (max_size > 1)
2506    {
2507      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2508	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2509	if (GET_MODE_SIZE (tmode) < max_size)
2510	  mode = tmode;
2511
2512      if (mode == VOIDmode)
2513	break;
2514
2515      icode = mov_optab->handlers[(int) mode].insn_code;
2516      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2517	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2518
2519      max_size = GET_MODE_SIZE (mode);
2520    }
2521
2522  /* The code above should have handled everything.  */
2523  if (data->len != 0)
2524    abort ();
2525}
2526
2527/* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2528   with move instructions for mode MODE.  GENFUN is the gen_... function
2529   to make a move insn for that mode.  DATA has all the other info.  */
2530
2531static void
2532store_by_pieces_2 (genfun, mode, data)
2533     rtx (*genfun) PARAMS ((rtx, ...));
2534     enum machine_mode mode;
2535     struct store_by_pieces *data;
2536{
2537  unsigned int size = GET_MODE_SIZE (mode);
2538  rtx to1, cst;
2539
2540  while (data->len >= size)
2541    {
2542      if (data->reverse)
2543	data->offset -= size;
2544
2545      if (data->autinc_to)
2546	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2547					 data->offset);
2548      else
2549	to1 = adjust_address (data->to, mode, data->offset);
2550
2551      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2552	emit_insn (gen_add2_insn (data->to_addr,
2553				  GEN_INT (-(HOST_WIDE_INT) size)));
2554
2555      cst = (*data->constfun) (data->constfundata, data->offset, mode);
2556      emit_insn ((*genfun) (to1, cst));
2557
2558      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2559	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2560
2561      if (! data->reverse)
2562	data->offset += size;
2563
2564      data->len -= size;
2565    }
2566}
2567
2568/* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2569   its length in bytes.  */
2570
2571rtx
2572clear_storage (object, size)
2573     rtx object;
2574     rtx size;
2575{
2576#ifdef TARGET_MEM_FUNCTIONS
2577  static tree fn;
2578  tree call_expr, arg_list;
2579#endif
2580  rtx retval = 0;
2581  unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2582			: GET_MODE_ALIGNMENT (GET_MODE (object)));
2583
2584  /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2585     just move a zero.  Otherwise, do this a piece at a time.  */
2586  if (GET_MODE (object) != BLKmode
2587      && GET_CODE (size) == CONST_INT
2588      && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2589    emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2590  else
2591    {
2592      object = protect_from_queue (object, 1);
2593      size = protect_from_queue (size, 0);
2594
2595      if (GET_CODE (size) == CONST_INT
2596	  && MOVE_BY_PIECES_P (INTVAL (size), align))
2597	clear_by_pieces (object, INTVAL (size), align);
2598      else
2599	{
2600	  /* Try the most limited insn first, because there's no point
2601	     including more than one in the machine description unless
2602	     the more limited one has some advantage.  */
2603
2604	  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2605	  enum machine_mode mode;
2606
2607	  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2608	       mode = GET_MODE_WIDER_MODE (mode))
2609	    {
2610	      enum insn_code code = clrstr_optab[(int) mode];
2611	      insn_operand_predicate_fn pred;
2612
2613	      if (code != CODE_FOR_nothing
2614		  /* We don't need MODE to be narrower than
2615		     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2616		     the mode mask, as it is returned by the macro, it will
2617		     definitely be less than the actual mode mask.  */
2618		  && ((GET_CODE (size) == CONST_INT
2619		       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2620			   <= (GET_MODE_MASK (mode) >> 1)))
2621		      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2622		  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2623		      || (*pred) (object, BLKmode))
2624		  && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2625		      || (*pred) (opalign, VOIDmode)))
2626		{
2627		  rtx op1;
2628		  rtx last = get_last_insn ();
2629		  rtx pat;
2630
2631		  op1 = convert_to_mode (mode, size, 1);
2632		  pred = insn_data[(int) code].operand[1].predicate;
2633		  if (pred != 0 && ! (*pred) (op1, mode))
2634		    op1 = copy_to_mode_reg (mode, op1);
2635
2636		  pat = GEN_FCN ((int) code) (object, op1, opalign);
2637		  if (pat)
2638		    {
2639		      emit_insn (pat);
2640		      return 0;
2641		    }
2642		  else
2643		    delete_insns_since (last);
2644		}
2645	    }
2646
2647	  /* OBJECT or SIZE may have been passed through protect_from_queue.
2648
2649	     It is unsafe to save the value generated by protect_from_queue
2650	     and reuse it later.  Consider what happens if emit_queue is
2651	     called before the return value from protect_from_queue is used.
2652
2653	     Expansion of the CALL_EXPR below will call emit_queue before
2654	     we are finished emitting RTL for argument setup.  So if we are
2655	     not careful we could get the wrong value for an argument.
2656
2657	     To avoid this problem we go ahead and emit code to copy OBJECT
2658	     and SIZE into new pseudos.  We can then place those new pseudos
2659	     into an RTL_EXPR and use them later, even after a call to
2660	     emit_queue.
2661
2662	     Note this is not strictly needed for library calls since they
2663	     do not call emit_queue before loading their arguments.  However,
2664	     we may need to have library calls call emit_queue in the future
2665	     since failing to do so could cause problems for targets which
2666	     define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
2667	  object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2668
2669#ifdef TARGET_MEM_FUNCTIONS
2670	  size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2671#else
2672	  size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2673				  TREE_UNSIGNED (integer_type_node));
2674	  size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2675#endif
2676
2677#ifdef TARGET_MEM_FUNCTIONS
2678	  /* It is incorrect to use the libcall calling conventions to call
2679	     memset in this context.
2680
2681	     This could be a user call to memset and the user may wish to
2682	     examine the return value from memset.
2683
2684	     For targets where libcalls and normal calls have different
2685	     conventions for returning pointers, we could end up generating
2686	     incorrect code.
2687
2688	     So instead of using a libcall sequence we build up a suitable
2689	     CALL_EXPR and expand the call in the normal fashion.  */
2690	  if (fn == NULL_TREE)
2691	    {
2692	      tree fntype;
2693
2694	      /* This was copied from except.c, I don't know if all this is
2695		 necessary in this context or not.  */
2696	      fn = get_identifier ("memset");
2697	      fntype = build_pointer_type (void_type_node);
2698	      fntype = build_function_type (fntype, NULL_TREE);
2699	      fn = build_decl (FUNCTION_DECL, fn, fntype);
2700	      ggc_add_tree_root (&fn, 1);
2701	      DECL_EXTERNAL (fn) = 1;
2702	      TREE_PUBLIC (fn) = 1;
2703	      DECL_ARTIFICIAL (fn) = 1;
2704	      TREE_NOTHROW (fn) = 1;
2705	      make_decl_rtl (fn, NULL);
2706	      assemble_external (fn);
2707	    }
2708
2709	  /* We need to make an argument list for the function call.
2710
2711	     memset has three arguments, the first is a void * addresses, the
2712	     second an integer with the initialization value, the last is a
2713	     size_t byte count for the copy.  */
2714	  arg_list
2715	    = build_tree_list (NULL_TREE,
2716			       make_tree (build_pointer_type (void_type_node),
2717					  object));
2718	  TREE_CHAIN (arg_list)
2719	    = build_tree_list (NULL_TREE,
2720			       make_tree (integer_type_node, const0_rtx));
2721	  TREE_CHAIN (TREE_CHAIN (arg_list))
2722	    = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2723	  TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2724
2725	  /* Now we have to build up the CALL_EXPR itself.  */
2726	  call_expr = build1 (ADDR_EXPR,
2727			      build_pointer_type (TREE_TYPE (fn)), fn);
2728	  call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2729			     call_expr, arg_list, NULL_TREE);
2730	  TREE_SIDE_EFFECTS (call_expr) = 1;
2731
2732	  retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2733#else
2734	  emit_library_call (bzero_libfunc, LCT_NORMAL,
2735			     VOIDmode, 2, object, Pmode, size,
2736			     TYPE_MODE (integer_type_node));
2737#endif
2738
2739	  /* If we are initializing a readonly value, show the above call
2740	     clobbered it.  Otherwise, a load from it may erroneously be
2741	     hoisted from a loop.  */
2742	  if (RTX_UNCHANGING_P (object))
2743	    emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2744	}
2745    }
2746
2747  return retval;
2748}
2749
2750/* Generate code to copy Y into X.
2751   Both Y and X must have the same mode, except that
2752   Y can be a constant with VOIDmode.
2753   This mode cannot be BLKmode; use emit_block_move for that.
2754
2755   Return the last instruction emitted.  */
2756
2757rtx
2758emit_move_insn (x, y)
2759     rtx x, y;
2760{
2761  enum machine_mode mode = GET_MODE (x);
2762  rtx y_cst = NULL_RTX;
2763  rtx last_insn;
2764
2765  x = protect_from_queue (x, 1);
2766  y = protect_from_queue (y, 0);
2767
2768  if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2769    abort ();
2770
2771  /* Never force constant_p_rtx to memory.  */
2772  if (GET_CODE (y) == CONSTANT_P_RTX)
2773    ;
2774  else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2775    {
2776      y_cst = y;
2777      y = force_const_mem (mode, y);
2778    }
2779
2780  /* If X or Y are memory references, verify that their addresses are valid
2781     for the machine.  */
2782  if (GET_CODE (x) == MEM
2783      && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2784	   && ! push_operand (x, GET_MODE (x)))
2785	  || (flag_force_addr
2786	      && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2787    x = validize_mem (x);
2788
2789  if (GET_CODE (y) == MEM
2790      && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2791	  || (flag_force_addr
2792	      && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2793    y = validize_mem (y);
2794
2795  if (mode == BLKmode)
2796    abort ();
2797
2798  last_insn = emit_move_insn_1 (x, y);
2799
2800  if (y_cst && GET_CODE (x) == REG)
2801    set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2802
2803  return last_insn;
2804}
2805
2806/* Low level part of emit_move_insn.
2807   Called just like emit_move_insn, but assumes X and Y
2808   are basically valid.  */
2809
2810rtx
2811emit_move_insn_1 (x, y)
2812     rtx x, y;
2813{
2814  enum machine_mode mode = GET_MODE (x);
2815  enum machine_mode submode;
2816  enum mode_class class = GET_MODE_CLASS (mode);
2817
2818  if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2819    abort ();
2820
2821  if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2822    return
2823      emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2824
2825  /* Expand complex moves by moving real part and imag part, if possible.  */
2826  else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2827	   && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2828						    * BITS_PER_UNIT),
2829						   (class == MODE_COMPLEX_INT
2830						    ? MODE_INT : MODE_FLOAT),
2831						   0))
2832	   && (mov_optab->handlers[(int) submode].insn_code
2833	       != CODE_FOR_nothing))
2834    {
2835      /* Don't split destination if it is a stack push.  */
2836      int stack = push_operand (x, GET_MODE (x));
2837
2838#ifdef PUSH_ROUNDING
2839      /* In case we output to the stack, but the size is smaller machine can
2840	 push exactly, we need to use move instructions.  */
2841      if (stack
2842	  && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2843	      != GET_MODE_SIZE (submode)))
2844	{
2845	  rtx temp;
2846	  HOST_WIDE_INT offset1, offset2;
2847
2848	  /* Do not use anti_adjust_stack, since we don't want to update
2849	     stack_pointer_delta.  */
2850	  temp = expand_binop (Pmode,
2851#ifdef STACK_GROWS_DOWNWARD
2852			       sub_optab,
2853#else
2854			       add_optab,
2855#endif
2856			       stack_pointer_rtx,
2857			       GEN_INT
2858				 (PUSH_ROUNDING
2859				  (GET_MODE_SIZE (GET_MODE (x)))),
2860			       stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2861
2862	  if (temp != stack_pointer_rtx)
2863	    emit_move_insn (stack_pointer_rtx, temp);
2864
2865#ifdef STACK_GROWS_DOWNWARD
2866	  offset1 = 0;
2867	  offset2 = GET_MODE_SIZE (submode);
2868#else
2869	  offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2870	  offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2871		     + GET_MODE_SIZE (submode));
2872#endif
2873
2874	  emit_move_insn (change_address (x, submode,
2875					  gen_rtx_PLUS (Pmode,
2876						        stack_pointer_rtx,
2877							GEN_INT (offset1))),
2878			  gen_realpart (submode, y));
2879	  emit_move_insn (change_address (x, submode,
2880					  gen_rtx_PLUS (Pmode,
2881						        stack_pointer_rtx,
2882							GEN_INT (offset2))),
2883			  gen_imagpart (submode, y));
2884	}
2885      else
2886#endif
2887      /* If this is a stack, push the highpart first, so it
2888	 will be in the argument order.
2889
2890	 In that case, change_address is used only to convert
2891	 the mode, not to change the address.  */
2892      if (stack)
2893	{
2894	  /* Note that the real part always precedes the imag part in memory
2895	     regardless of machine's endianness.  */
2896#ifdef STACK_GROWS_DOWNWARD
2897	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2898		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2899		      gen_imagpart (submode, y)));
2900	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2901		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2902		      gen_realpart (submode, y)));
2903#else
2904	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2905		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2906		      gen_realpart (submode, y)));
2907	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2908		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2909		      gen_imagpart (submode, y)));
2910#endif
2911	}
2912      else
2913	{
2914	  rtx realpart_x, realpart_y;
2915	  rtx imagpart_x, imagpart_y;
2916
2917	  /* If this is a complex value with each part being smaller than a
2918	     word, the usual calling sequence will likely pack the pieces into
2919	     a single register.  Unfortunately, SUBREG of hard registers only
2920	     deals in terms of words, so we have a problem converting input
2921	     arguments to the CONCAT of two registers that is used elsewhere
2922	     for complex values.  If this is before reload, we can copy it into
2923	     memory and reload.  FIXME, we should see about using extract and
2924	     insert on integer registers, but complex short and complex char
2925	     variables should be rarely used.  */
2926	  if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2927	      && (reload_in_progress | reload_completed) == 0)
2928	    {
2929	      int packed_dest_p
2930		= (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2931	      int packed_src_p
2932		= (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2933
2934	      if (packed_dest_p || packed_src_p)
2935		{
2936		  enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2937					       ? MODE_FLOAT : MODE_INT);
2938
2939		  enum machine_mode reg_mode
2940		    = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2941
2942		  if (reg_mode != BLKmode)
2943		    {
2944		      rtx mem = assign_stack_temp (reg_mode,
2945						   GET_MODE_SIZE (mode), 0);
2946		      rtx cmem = adjust_address (mem, mode, 0);
2947
2948		      cfun->cannot_inline
2949			= N_("function using short complex types cannot be inline");
2950
2951		      if (packed_dest_p)
2952			{
2953			  rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2954
2955			  emit_move_insn_1 (cmem, y);
2956			  return emit_move_insn_1 (sreg, mem);
2957			}
2958		      else
2959			{
2960			  rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2961
2962			  emit_move_insn_1 (mem, sreg);
2963			  return emit_move_insn_1 (x, cmem);
2964			}
2965		    }
2966		}
2967	    }
2968
2969	  realpart_x = gen_realpart (submode, x);
2970	  realpart_y = gen_realpart (submode, y);
2971	  imagpart_x = gen_imagpart (submode, x);
2972	  imagpart_y = gen_imagpart (submode, y);
2973
2974	  /* Show the output dies here.  This is necessary for SUBREGs
2975	     of pseudos since we cannot track their lifetimes correctly;
2976	     hard regs shouldn't appear here except as return values.
2977	     We never want to emit such a clobber after reload.  */
2978	  if (x != y
2979	      && ! (reload_in_progress || reload_completed)
2980	      && (GET_CODE (realpart_x) == SUBREG
2981		  || GET_CODE (imagpart_x) == SUBREG))
2982	    emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2983
2984	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2985		     (realpart_x, realpart_y));
2986	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2987		     (imagpart_x, imagpart_y));
2988	}
2989
2990      return get_last_insn ();
2991    }
2992
2993  /* This will handle any multi-word mode that lacks a move_insn pattern.
2994     However, you will get better code if you define such patterns,
2995     even if they must turn into multiple assembler instructions.  */
2996  else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2997    {
2998      rtx last_insn = 0;
2999      rtx seq, inner;
3000      int need_clobber;
3001      int i;
3002
3003#ifdef PUSH_ROUNDING
3004
3005      /* If X is a push on the stack, do the push now and replace
3006	 X with a reference to the stack pointer.  */
3007      if (push_operand (x, GET_MODE (x)))
3008	{
3009	  rtx temp;
3010	  enum rtx_code code;
3011
3012	  /* Do not use anti_adjust_stack, since we don't want to update
3013	     stack_pointer_delta.  */
3014	  temp = expand_binop (Pmode,
3015#ifdef STACK_GROWS_DOWNWARD
3016			       sub_optab,
3017#else
3018			       add_optab,
3019#endif
3020			       stack_pointer_rtx,
3021			       GEN_INT
3022				 (PUSH_ROUNDING
3023				  (GET_MODE_SIZE (GET_MODE (x)))),
3024			       stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3025
3026          if (temp != stack_pointer_rtx)
3027            emit_move_insn (stack_pointer_rtx, temp);
3028
3029	  code = GET_CODE (XEXP (x, 0));
3030
3031	  /* Just hope that small offsets off SP are OK.  */
3032	  if (code == POST_INC)
3033	    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3034				GEN_INT (-((HOST_WIDE_INT)
3035					   GET_MODE_SIZE (GET_MODE (x)))));
3036	  else if (code == POST_DEC)
3037	    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3038				GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3039	  else
3040	    temp = stack_pointer_rtx;
3041
3042	  x = change_address (x, VOIDmode, temp);
3043	}
3044#endif
3045
3046      /* If we are in reload, see if either operand is a MEM whose address
3047	 is scheduled for replacement.  */
3048      if (reload_in_progress && GET_CODE (x) == MEM
3049	  && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3050	x = replace_equiv_address_nv (x, inner);
3051      if (reload_in_progress && GET_CODE (y) == MEM
3052	  && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3053	y = replace_equiv_address_nv (y, inner);
3054
3055      start_sequence ();
3056
3057      need_clobber = 0;
3058      for (i = 0;
3059	   i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3060	   i++)
3061	{
3062	  rtx xpart = operand_subword (x, i, 1, mode);
3063	  rtx ypart = operand_subword (y, i, 1, mode);
3064
3065	  /* If we can't get a part of Y, put Y into memory if it is a
3066	     constant.  Otherwise, force it into a register.  If we still
3067	     can't get a part of Y, abort.  */
3068	  if (ypart == 0 && CONSTANT_P (y))
3069	    {
3070	      y = force_const_mem (mode, y);
3071	      ypart = operand_subword (y, i, 1, mode);
3072	    }
3073	  else if (ypart == 0)
3074	    ypart = operand_subword_force (y, i, mode);
3075
3076	  if (xpart == 0 || ypart == 0)
3077	    abort ();
3078
3079	  need_clobber |= (GET_CODE (xpart) == SUBREG);
3080
3081	  last_insn = emit_move_insn (xpart, ypart);
3082	}
3083
3084      seq = gen_sequence ();
3085      end_sequence ();
3086
3087      /* Show the output dies here.  This is necessary for SUBREGs
3088	 of pseudos since we cannot track their lifetimes correctly;
3089	 hard regs shouldn't appear here except as return values.
3090	 We never want to emit such a clobber after reload.  */
3091      if (x != y
3092	  && ! (reload_in_progress || reload_completed)
3093	  && need_clobber != 0)
3094	emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3095
3096      emit_insn (seq);
3097
3098      return last_insn;
3099    }
3100  else
3101    abort ();
3102}
3103
3104/* Pushing data onto the stack.  */
3105
3106/* Push a block of length SIZE (perhaps variable)
3107   and return an rtx to address the beginning of the block.
3108   Note that it is not possible for the value returned to be a QUEUED.
3109   The value may be virtual_outgoing_args_rtx.
3110
3111   EXTRA is the number of bytes of padding to push in addition to SIZE.
3112   BELOW nonzero means this padding comes at low addresses;
3113   otherwise, the padding comes at high addresses.  */
3114
3115rtx
3116push_block (size, extra, below)
3117     rtx size;
3118     int extra, below;
3119{
3120  rtx temp;
3121
3122  size = convert_modes (Pmode, ptr_mode, size, 1);
3123  if (CONSTANT_P (size))
3124    anti_adjust_stack (plus_constant (size, extra));
3125  else if (GET_CODE (size) == REG && extra == 0)
3126    anti_adjust_stack (size);
3127  else
3128    {
3129      temp = copy_to_mode_reg (Pmode, size);
3130      if (extra != 0)
3131	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3132			     temp, 0, OPTAB_LIB_WIDEN);
3133      anti_adjust_stack (temp);
3134    }
3135
3136#ifndef STACK_GROWS_DOWNWARD
3137  if (0)
3138#else
3139  if (1)
3140#endif
3141    {
3142      temp = virtual_outgoing_args_rtx;
3143      if (extra != 0 && below)
3144	temp = plus_constant (temp, extra);
3145    }
3146  else
3147    {
3148      if (GET_CODE (size) == CONST_INT)
3149	temp = plus_constant (virtual_outgoing_args_rtx,
3150			      -INTVAL (size) - (below ? 0 : extra));
3151      else if (extra != 0 && !below)
3152	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3153			     negate_rtx (Pmode, plus_constant (size, extra)));
3154      else
3155	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3156			     negate_rtx (Pmode, size));
3157    }
3158
3159  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3160}
3161
3162#ifdef PUSH_ROUNDING
3163
3164/* Emit single push insn.  */
3165
3166static void
3167emit_single_push_insn (mode, x, type)
3168     rtx x;
3169     enum machine_mode mode;
3170     tree type;
3171{
3172  rtx dest_addr;
3173  unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3174  rtx dest;
3175  enum insn_code icode;
3176  insn_operand_predicate_fn pred;
3177
3178  stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3179  /* If there is push pattern, use it.  Otherwise try old way of throwing
3180     MEM representing push operation to move expander.  */
3181  icode = push_optab->handlers[(int) mode].insn_code;
3182  if (icode != CODE_FOR_nothing)
3183    {
3184      if (((pred = insn_data[(int) icode].operand[0].predicate)
3185	   && !((*pred) (x, mode))))
3186	x = force_reg (mode, x);
3187      emit_insn (GEN_FCN (icode) (x));
3188      return;
3189    }
3190  if (GET_MODE_SIZE (mode) == rounded_size)
3191    dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3192  else
3193    {
3194#ifdef STACK_GROWS_DOWNWARD
3195      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3196				GEN_INT (-(HOST_WIDE_INT) rounded_size));
3197#else
3198      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3199				GEN_INT (rounded_size));
3200#endif
3201      dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3202    }
3203
3204  dest = gen_rtx_MEM (mode, dest_addr);
3205
3206  if (type != 0)
3207    {
3208      set_mem_attributes (dest, type, 1);
3209
3210      if (flag_optimize_sibling_calls)
3211	/* Function incoming arguments may overlap with sibling call
3212	   outgoing arguments and we cannot allow reordering of reads
3213	   from function arguments with stores to outgoing arguments
3214	   of sibling calls.  */
3215	set_mem_alias_set (dest, 0);
3216    }
3217  emit_move_insn (dest, x);
3218}
3219#endif
3220
3221/* Generate code to push X onto the stack, assuming it has mode MODE and
3222   type TYPE.
3223   MODE is redundant except when X is a CONST_INT (since they don't
3224   carry mode info).
3225   SIZE is an rtx for the size of data to be copied (in bytes),
3226   needed only if X is BLKmode.
3227
3228   ALIGN (in bits) is maximum alignment we can assume.
3229
3230   If PARTIAL and REG are both nonzero, then copy that many of the first
3231   words of X into registers starting with REG, and push the rest of X.
3232   The amount of space pushed is decreased by PARTIAL words,
3233   rounded *down* to a multiple of PARM_BOUNDARY.
3234   REG must be a hard register in this case.
3235   If REG is zero but PARTIAL is not, take any all others actions for an
3236   argument partially in registers, but do not actually load any
3237   registers.
3238
3239   EXTRA is the amount in bytes of extra space to leave next to this arg.
3240   This is ignored if an argument block has already been allocated.
3241
3242   On a machine that lacks real push insns, ARGS_ADDR is the address of
3243   the bottom of the argument block for this call.  We use indexing off there
3244   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
3245   argument block has not been preallocated.
3246
3247   ARGS_SO_FAR is the size of args previously pushed for this call.
3248
3249   REG_PARM_STACK_SPACE is nonzero if functions require stack space
3250   for arguments passed in registers.  If nonzero, it will be the number
3251   of bytes required.  */
3252
3253void
3254emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3255		args_addr, args_so_far, reg_parm_stack_space,
3256                alignment_pad)
3257     rtx x;
3258     enum machine_mode mode;
3259     tree type;
3260     rtx size;
3261     unsigned int align;
3262     int partial;
3263     rtx reg;
3264     int extra;
3265     rtx args_addr;
3266     rtx args_so_far;
3267     int reg_parm_stack_space;
3268     rtx alignment_pad;
3269{
3270  rtx xinner;
3271  enum direction stack_direction
3272#ifdef STACK_GROWS_DOWNWARD
3273    = downward;
3274#else
3275    = upward;
3276#endif
3277
3278  /* Decide where to pad the argument: `downward' for below,
3279     `upward' for above, or `none' for don't pad it.
3280     Default is below for small data on big-endian machines; else above.  */
3281  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3282
3283  /* Invert direction if stack is post-decrement.
3284     FIXME: why?  */
3285  if (STACK_PUSH_CODE == POST_DEC)
3286    if (where_pad != none)
3287      where_pad = (where_pad == downward ? upward : downward);
3288
3289  xinner = x = protect_from_queue (x, 0);
3290
3291  if (mode == BLKmode)
3292    {
3293      /* Copy a block into the stack, entirely or partially.  */
3294
3295      rtx temp;
3296      int used = partial * UNITS_PER_WORD;
3297      int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3298      int skip;
3299
3300      if (size == 0)
3301	abort ();
3302
3303      used -= offset;
3304
3305      /* USED is now the # of bytes we need not copy to the stack
3306	 because registers will take care of them.  */
3307
3308      if (partial != 0)
3309	xinner = adjust_address (xinner, BLKmode, used);
3310
3311      /* If the partial register-part of the arg counts in its stack size,
3312	 skip the part of stack space corresponding to the registers.
3313	 Otherwise, start copying to the beginning of the stack space,
3314	 by setting SKIP to 0.  */
3315      skip = (reg_parm_stack_space == 0) ? 0 : used;
3316
3317#ifdef PUSH_ROUNDING
3318      /* Do it with several push insns if that doesn't take lots of insns
3319	 and if there is no difficulty with push insns that skip bytes
3320	 on the stack for alignment purposes.  */
3321      if (args_addr == 0
3322	  && PUSH_ARGS
3323	  && GET_CODE (size) == CONST_INT
3324	  && skip == 0
3325	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3326	  /* Here we avoid the case of a structure whose weak alignment
3327	     forces many pushes of a small amount of data,
3328	     and such small pushes do rounding that causes trouble.  */
3329	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3330	      || align >= BIGGEST_ALIGNMENT
3331	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3332		  == (align / BITS_PER_UNIT)))
3333	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3334	{
3335	  /* Push padding now if padding above and stack grows down,
3336	     or if padding below and stack grows up.
3337	     But if space already allocated, this has already been done.  */
3338	  if (extra && args_addr == 0
3339	      && where_pad != none && where_pad != stack_direction)
3340	    anti_adjust_stack (GEN_INT (extra));
3341
3342	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3343	}
3344      else
3345#endif /* PUSH_ROUNDING  */
3346	{
3347	  rtx target;
3348
3349	  /* Otherwise make space on the stack and copy the data
3350	     to the address of that space.  */
3351
3352	  /* Deduct words put into registers from the size we must copy.  */
3353	  if (partial != 0)
3354	    {
3355	      if (GET_CODE (size) == CONST_INT)
3356		size = GEN_INT (INTVAL (size) - used);
3357	      else
3358		size = expand_binop (GET_MODE (size), sub_optab, size,
3359				     GEN_INT (used), NULL_RTX, 0,
3360				     OPTAB_LIB_WIDEN);
3361	    }
3362
3363	  /* Get the address of the stack space.
3364	     In this case, we do not deal with EXTRA separately.
3365	     A single stack adjust will do.  */
3366	  if (! args_addr)
3367	    {
3368	      temp = push_block (size, extra, where_pad == downward);
3369	      extra = 0;
3370	    }
3371	  else if (GET_CODE (args_so_far) == CONST_INT)
3372	    temp = memory_address (BLKmode,
3373				   plus_constant (args_addr,
3374						  skip + INTVAL (args_so_far)));
3375	  else
3376	    temp = memory_address (BLKmode,
3377				   plus_constant (gen_rtx_PLUS (Pmode,
3378								args_addr,
3379								args_so_far),
3380						  skip));
3381	  target = gen_rtx_MEM (BLKmode, temp);
3382
3383	  if (type != 0)
3384	    {
3385	      set_mem_attributes (target, type, 1);
3386	      /* Function incoming arguments may overlap with sibling call
3387		 outgoing arguments and we cannot allow reordering of reads
3388		 from function arguments with stores to outgoing arguments
3389		 of sibling calls.  */
3390	      set_mem_alias_set (target, 0);
3391	    }
3392	  else
3393	    set_mem_align (target, align);
3394
3395	  /* TEMP is the address of the block.  Copy the data there.  */
3396	  if (GET_CODE (size) == CONST_INT
3397	      && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3398	    {
3399	      move_by_pieces (target, xinner, INTVAL (size), align);
3400	      goto ret;
3401	    }
3402	  else
3403	    {
3404	      rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3405	      enum machine_mode mode;
3406
3407	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3408		   mode != VOIDmode;
3409		   mode = GET_MODE_WIDER_MODE (mode))
3410		{
3411		  enum insn_code code = movstr_optab[(int) mode];
3412		  insn_operand_predicate_fn pred;
3413
3414		  if (code != CODE_FOR_nothing
3415		      && ((GET_CODE (size) == CONST_INT
3416			   && ((unsigned HOST_WIDE_INT) INTVAL (size)
3417			       <= (GET_MODE_MASK (mode) >> 1)))
3418			  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3419		      && (!(pred = insn_data[(int) code].operand[0].predicate)
3420			  || ((*pred) (target, BLKmode)))
3421		      && (!(pred = insn_data[(int) code].operand[1].predicate)
3422			  || ((*pred) (xinner, BLKmode)))
3423		      && (!(pred = insn_data[(int) code].operand[3].predicate)
3424			  || ((*pred) (opalign, VOIDmode))))
3425		    {
3426		      rtx op2 = convert_to_mode (mode, size, 1);
3427		      rtx last = get_last_insn ();
3428		      rtx pat;
3429
3430		      pred = insn_data[(int) code].operand[2].predicate;
3431		      if (pred != 0 && ! (*pred) (op2, mode))
3432			op2 = copy_to_mode_reg (mode, op2);
3433
3434		      pat = GEN_FCN ((int) code) (target, xinner,
3435						  op2, opalign);
3436		      if (pat)
3437			{
3438			  emit_insn (pat);
3439			  goto ret;
3440			}
3441		      else
3442			delete_insns_since (last);
3443		    }
3444		}
3445	    }
3446
3447	  if (!ACCUMULATE_OUTGOING_ARGS)
3448	    {
3449	      /* If the source is referenced relative to the stack pointer,
3450		 copy it to another register to stabilize it.  We do not need
3451		 to do this if we know that we won't be changing sp.  */
3452
3453	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3454		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3455		temp = copy_to_reg (temp);
3456	    }
3457
3458	  /* Make inhibit_defer_pop nonzero around the library call
3459	     to force it to pop the bcopy-arguments right away.  */
3460	  NO_DEFER_POP;
3461#ifdef TARGET_MEM_FUNCTIONS
3462	  emit_library_call (memcpy_libfunc, LCT_NORMAL,
3463			     VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3464			     convert_to_mode (TYPE_MODE (sizetype),
3465					      size, TREE_UNSIGNED (sizetype)),
3466			     TYPE_MODE (sizetype));
3467#else
3468	  emit_library_call (bcopy_libfunc, LCT_NORMAL,
3469			     VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3470			     convert_to_mode (TYPE_MODE (integer_type_node),
3471					      size,
3472					      TREE_UNSIGNED (integer_type_node)),
3473			     TYPE_MODE (integer_type_node));
3474#endif
3475	  OK_DEFER_POP;
3476	}
3477    }
3478  else if (partial > 0)
3479    {
3480      /* Scalar partly in registers.  */
3481
3482      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3483      int i;
3484      int not_stack;
3485      /* # words of start of argument
3486	 that we must make space for but need not store.  */
3487      int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3488      int args_offset = INTVAL (args_so_far);
3489      int skip;
3490
3491      /* Push padding now if padding above and stack grows down,
3492	 or if padding below and stack grows up.
3493	 But if space already allocated, this has already been done.  */
3494      if (extra && args_addr == 0
3495	  && where_pad != none && where_pad != stack_direction)
3496	anti_adjust_stack (GEN_INT (extra));
3497
3498      /* If we make space by pushing it, we might as well push
3499	 the real data.  Otherwise, we can leave OFFSET nonzero
3500	 and leave the space uninitialized.  */
3501      if (args_addr == 0)
3502	offset = 0;
3503
3504      /* Now NOT_STACK gets the number of words that we don't need to
3505	 allocate on the stack.  */
3506      not_stack = partial - offset;
3507
3508      /* If the partial register-part of the arg counts in its stack size,
3509	 skip the part of stack space corresponding to the registers.
3510	 Otherwise, start copying to the beginning of the stack space,
3511	 by setting SKIP to 0.  */
3512      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3513
3514      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3515	x = validize_mem (force_const_mem (mode, x));
3516
3517      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3518	 SUBREGs of such registers are not allowed.  */
3519      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3520	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3521	x = copy_to_reg (x);
3522
3523      /* Loop over all the words allocated on the stack for this arg.  */
3524      /* We can do it by words, because any scalar bigger than a word
3525	 has a size a multiple of a word.  */
3526#ifndef PUSH_ARGS_REVERSED
3527      for (i = not_stack; i < size; i++)
3528#else
3529      for (i = size - 1; i >= not_stack; i--)
3530#endif
3531	if (i >= not_stack + offset)
3532	  emit_push_insn (operand_subword_force (x, i, mode),
3533			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3534			  0, args_addr,
3535			  GEN_INT (args_offset + ((i - not_stack + skip)
3536						  * UNITS_PER_WORD)),
3537			  reg_parm_stack_space, alignment_pad);
3538    }
3539  else
3540    {
3541      rtx addr;
3542      rtx target = NULL_RTX;
3543      rtx dest;
3544
3545      /* Push padding now if padding above and stack grows down,
3546	 or if padding below and stack grows up.
3547	 But if space already allocated, this has already been done.  */
3548      if (extra && args_addr == 0
3549	  && where_pad != none && where_pad != stack_direction)
3550	anti_adjust_stack (GEN_INT (extra));
3551
3552#ifdef PUSH_ROUNDING
3553      if (args_addr == 0 && PUSH_ARGS)
3554	emit_single_push_insn (mode, x, type);
3555      else
3556#endif
3557	{
3558	  if (GET_CODE (args_so_far) == CONST_INT)
3559	    addr
3560	      = memory_address (mode,
3561				plus_constant (args_addr,
3562					       INTVAL (args_so_far)));
3563	  else
3564	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3565						       args_so_far));
3566	  target = addr;
3567	  dest = gen_rtx_MEM (mode, addr);
3568	  if (type != 0)
3569	    {
3570	      set_mem_attributes (dest, type, 1);
3571	      /* Function incoming arguments may overlap with sibling call
3572		 outgoing arguments and we cannot allow reordering of reads
3573		 from function arguments with stores to outgoing arguments
3574		 of sibling calls.  */
3575	      set_mem_alias_set (dest, 0);
3576	    }
3577
3578	  emit_move_insn (dest, x);
3579	}
3580
3581    }
3582
3583 ret:
3584  /* If part should go in registers, copy that part
3585     into the appropriate registers.  Do this now, at the end,
3586     since mem-to-mem copies above may do function calls.  */
3587  if (partial > 0 && reg != 0)
3588    {
3589      /* Handle calls that pass values in multiple non-contiguous locations.
3590	 The Irix 6 ABI has examples of this.  */
3591      if (GET_CODE (reg) == PARALLEL)
3592	emit_group_load (reg, x, -1);  /* ??? size? */
3593      else
3594	move_block_to_reg (REGNO (reg), x, partial, mode);
3595    }
3596
3597  if (extra && args_addr == 0 && where_pad == stack_direction)
3598    anti_adjust_stack (GEN_INT (extra));
3599
3600  if (alignment_pad && args_addr == 0)
3601    anti_adjust_stack (alignment_pad);
3602}
3603
3604/* Return X if X can be used as a subtarget in a sequence of arithmetic
3605   operations.  */
3606
3607static rtx
3608get_subtarget (x)
3609     rtx x;
3610{
3611  return ((x == 0
3612	   /* Only registers can be subtargets.  */
3613	   || GET_CODE (x) != REG
3614	   /* If the register is readonly, it can't be set more than once.  */
3615	   || RTX_UNCHANGING_P (x)
3616	   /* Don't use hard regs to avoid extending their life.  */
3617	   || REGNO (x) < FIRST_PSEUDO_REGISTER
3618	   /* Avoid subtargets inside loops,
3619	      since they hide some invariant expressions.  */
3620	   || preserve_subexpressions_p ())
3621	  ? 0 : x);
3622}
3623
3624/* Expand an assignment that stores the value of FROM into TO.
3625   If WANT_VALUE is nonzero, return an rtx for the value of TO.
3626   (This may contain a QUEUED rtx;
3627   if the value is constant, this rtx is a constant.)
3628   Otherwise, the returned value is NULL_RTX.
3629
3630   SUGGEST_REG is no longer actually used.
3631   It used to mean, copy the value through a register
3632   and return that register, if that is possible.
3633   We now use WANT_VALUE to decide whether to do this.  */
3634
3635rtx
3636expand_assignment (to, from, want_value, suggest_reg)
3637     tree to, from;
3638     int want_value;
3639     int suggest_reg ATTRIBUTE_UNUSED;
3640{
3641  rtx to_rtx = 0;
3642  rtx result;
3643
3644  /* Don't crash if the lhs of the assignment was erroneous.  */
3645
3646  if (TREE_CODE (to) == ERROR_MARK)
3647    {
3648      result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3649      return want_value ? result : NULL_RTX;
3650    }
3651
3652  /* Assignment of a structure component needs special treatment
3653     if the structure component's rtx is not simply a MEM.
3654     Assignment of an array element at a constant index, and assignment of
3655     an array element in an unaligned packed structure field, has the same
3656     problem.  */
3657
3658  if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3659      || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3660    {
3661      enum machine_mode mode1;
3662      HOST_WIDE_INT bitsize, bitpos;
3663      rtx orig_to_rtx;
3664      tree offset;
3665      int unsignedp;
3666      int volatilep = 0;
3667      tree tem;
3668
3669      push_temp_slots ();
3670      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3671				 &unsignedp, &volatilep);
3672
3673      /* If we are going to use store_bit_field and extract_bit_field,
3674	 make sure to_rtx will be safe for multiple use.  */
3675
3676      if (mode1 == VOIDmode && want_value)
3677	tem = stabilize_reference (tem);
3678
3679      orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3680
3681      if (offset != 0)
3682	{
3683	  rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3684
3685	  if (GET_CODE (to_rtx) != MEM)
3686	    abort ();
3687
3688#ifdef POINTERS_EXTEND_UNSIGNED
3689	  if (GET_MODE (offset_rtx) != Pmode)
3690	    offset_rtx = convert_memory_address (Pmode, offset_rtx);
3691#else
3692	  if (GET_MODE (offset_rtx) != ptr_mode)
3693	    offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3694#endif
3695
3696	  /* A constant address in TO_RTX can have VOIDmode, we must not try
3697	     to call force_reg for that case.  Avoid that case.  */
3698	  if (GET_CODE (to_rtx) == MEM
3699	      && GET_MODE (to_rtx) == BLKmode
3700	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3701	      && bitsize > 0
3702	      && (bitpos % bitsize) == 0
3703	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3704	      && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3705	    {
3706	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3707	      bitpos = 0;
3708	    }
3709
3710	  to_rtx = offset_address (to_rtx, offset_rtx,
3711				   highest_pow2_factor_for_type (TREE_TYPE (to),
3712								 offset));
3713	}
3714
3715      if (GET_CODE (to_rtx) == MEM)
3716	{
3717	  tree old_expr = MEM_EXPR (to_rtx);
3718
3719	  /* If the field is at offset zero, we could have been given the
3720	     DECL_RTX of the parent struct.  Don't munge it.  */
3721	  to_rtx = shallow_copy_rtx (to_rtx);
3722
3723	  set_mem_attributes (to_rtx, to, 0);
3724
3725	  /* If we changed MEM_EXPR, that means we're now referencing
3726	     the COMPONENT_REF, which means that MEM_OFFSET must be
3727	     relative to that field.  But we've not yet reflected BITPOS
3728	     in TO_RTX.  This will be done in store_field.  Adjust for
3729	     that by biasing MEM_OFFSET by -bitpos.  */
3730	  if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3731	      && (bitpos / BITS_PER_UNIT) != 0)
3732	    set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3733					     - (bitpos / BITS_PER_UNIT)));
3734	}
3735
3736      /* Deal with volatile and readonly fields.  The former is only done
3737	 for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
3738      if (volatilep && GET_CODE (to_rtx) == MEM)
3739	{
3740	  if (to_rtx == orig_to_rtx)
3741	    to_rtx = copy_rtx (to_rtx);
3742	  MEM_VOLATILE_P (to_rtx) = 1;
3743	}
3744
3745      if (TREE_CODE (to) == COMPONENT_REF
3746	  && TREE_READONLY (TREE_OPERAND (to, 1)))
3747	{
3748	  if (to_rtx == orig_to_rtx)
3749	    to_rtx = copy_rtx (to_rtx);
3750	  RTX_UNCHANGING_P (to_rtx) = 1;
3751	}
3752
3753      if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3754	{
3755	  if (to_rtx == orig_to_rtx)
3756	    to_rtx = copy_rtx (to_rtx);
3757	  MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3758	}
3759
3760      result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3761			    (want_value
3762			     /* Spurious cast for HPUX compiler.  */
3763			     ? ((enum machine_mode)
3764				TYPE_MODE (TREE_TYPE (to)))
3765			     : VOIDmode),
3766			    unsignedp, TREE_TYPE (tem), get_alias_set (to));
3767
3768      preserve_temp_slots (result);
3769      free_temp_slots ();
3770      pop_temp_slots ();
3771
3772      /* If the value is meaningful, convert RESULT to the proper mode.
3773	 Otherwise, return nothing.  */
3774      return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3775					  TYPE_MODE (TREE_TYPE (from)),
3776					  result,
3777					  TREE_UNSIGNED (TREE_TYPE (to)))
3778	      : NULL_RTX);
3779    }
3780
3781  /* If the rhs is a function call and its value is not an aggregate,
3782     call the function before we start to compute the lhs.
3783     This is needed for correct code for cases such as
3784     val = setjmp (buf) on machines where reference to val
3785     requires loading up part of an address in a separate insn.
3786
3787     Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3788     since it might be a promoted variable where the zero- or sign- extension
3789     needs to be done.  Handling this in the normal way is safe because no
3790     computation is done before the call.  */
3791  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3792      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3793      && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3794	    && GET_CODE (DECL_RTL (to)) == REG))
3795    {
3796      rtx value;
3797
3798      push_temp_slots ();
3799      value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3800      if (to_rtx == 0)
3801	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3802
3803      /* Handle calls that return values in multiple non-contiguous locations.
3804	 The Irix 6 ABI has examples of this.  */
3805      if (GET_CODE (to_rtx) == PARALLEL)
3806	emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3807      else if (GET_MODE (to_rtx) == BLKmode)
3808	emit_block_move (to_rtx, value, expr_size (from));
3809      else
3810	{
3811#ifdef POINTERS_EXTEND_UNSIGNED
3812	  if (POINTER_TYPE_P (TREE_TYPE (to))
3813	      && GET_MODE (to_rtx) != GET_MODE (value))
3814	    value = convert_memory_address (GET_MODE (to_rtx), value);
3815#endif
3816	  emit_move_insn (to_rtx, value);
3817	}
3818      preserve_temp_slots (to_rtx);
3819      free_temp_slots ();
3820      pop_temp_slots ();
3821      return want_value ? to_rtx : NULL_RTX;
3822    }
3823
3824  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
3825     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
3826
3827  if (to_rtx == 0)
3828    to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3829
3830  /* Don't move directly into a return register.  */
3831  if (TREE_CODE (to) == RESULT_DECL
3832      && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3833    {
3834      rtx temp;
3835
3836      push_temp_slots ();
3837      temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3838
3839      if (GET_CODE (to_rtx) == PARALLEL)
3840	emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3841      else
3842	emit_move_insn (to_rtx, temp);
3843
3844      preserve_temp_slots (to_rtx);
3845      free_temp_slots ();
3846      pop_temp_slots ();
3847      return want_value ? to_rtx : NULL_RTX;
3848    }
3849
3850  /* In case we are returning the contents of an object which overlaps
3851     the place the value is being stored, use a safe function when copying
3852     a value through a pointer into a structure value return block.  */
3853  if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3854      && current_function_returns_struct
3855      && !current_function_returns_pcc_struct)
3856    {
3857      rtx from_rtx, size;
3858
3859      push_temp_slots ();
3860      size = expr_size (from);
3861      from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3862
3863#ifdef TARGET_MEM_FUNCTIONS
3864      emit_library_call (memmove_libfunc, LCT_NORMAL,
3865			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3866			 XEXP (from_rtx, 0), Pmode,
3867			 convert_to_mode (TYPE_MODE (sizetype),
3868					  size, TREE_UNSIGNED (sizetype)),
3869			 TYPE_MODE (sizetype));
3870#else
3871      emit_library_call (bcopy_libfunc, LCT_NORMAL,
3872			 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3873			 XEXP (to_rtx, 0), Pmode,
3874			 convert_to_mode (TYPE_MODE (integer_type_node),
3875					  size, TREE_UNSIGNED (integer_type_node)),
3876			 TYPE_MODE (integer_type_node));
3877#endif
3878
3879      preserve_temp_slots (to_rtx);
3880      free_temp_slots ();
3881      pop_temp_slots ();
3882      return want_value ? to_rtx : NULL_RTX;
3883    }
3884
3885  /* Compute FROM and store the value in the rtx we got.  */
3886
3887  push_temp_slots ();
3888  result = store_expr (from, to_rtx, want_value);
3889  preserve_temp_slots (result);
3890  free_temp_slots ();
3891  pop_temp_slots ();
3892  return want_value ? result : NULL_RTX;
3893}
3894
3895/* Generate code for computing expression EXP,
3896   and storing the value into TARGET.
3897   TARGET may contain a QUEUED rtx.
3898
3899   If WANT_VALUE is nonzero, return a copy of the value
3900   not in TARGET, so that we can be sure to use the proper
3901   value in a containing expression even if TARGET has something
3902   else stored in it.  If possible, we copy the value through a pseudo
3903   and return that pseudo.  Or, if the value is constant, we try to
3904   return the constant.  In some cases, we return a pseudo
3905   copied *from* TARGET.
3906
3907   If the mode is BLKmode then we may return TARGET itself.
3908   It turns out that in BLKmode it doesn't cause a problem.
3909   because C has no operators that could combine two different
3910   assignments into the same BLKmode object with different values
3911   with no sequence point.  Will other languages need this to
3912   be more thorough?
3913
3914   If WANT_VALUE is 0, we return NULL, to make sure
3915   to catch quickly any cases where the caller uses the value
3916   and fails to set WANT_VALUE.  */
3917
3918rtx
3919store_expr (exp, target, want_value)
3920     tree exp;
3921     rtx target;
3922     int want_value;
3923{
3924  rtx temp;
3925  int dont_return_target = 0;
3926  int dont_store_target = 0;
3927
3928  if (TREE_CODE (exp) == COMPOUND_EXPR)
3929    {
3930      /* Perform first part of compound expression, then assign from second
3931	 part.  */
3932      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3933      emit_queue ();
3934      return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3935    }
3936  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3937    {
3938      /* For conditional expression, get safe form of the target.  Then
3939	 test the condition, doing the appropriate assignment on either
3940	 side.  This avoids the creation of unnecessary temporaries.
3941	 For non-BLKmode, it is more efficient not to do this.  */
3942
3943      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3944
3945      emit_queue ();
3946      target = protect_from_queue (target, 1);
3947
3948      do_pending_stack_adjust ();
3949      NO_DEFER_POP;
3950      jumpifnot (TREE_OPERAND (exp, 0), lab1);
3951      start_cleanup_deferral ();
3952      store_expr (TREE_OPERAND (exp, 1), target, 0);
3953      end_cleanup_deferral ();
3954      emit_queue ();
3955      emit_jump_insn (gen_jump (lab2));
3956      emit_barrier ();
3957      emit_label (lab1);
3958      start_cleanup_deferral ();
3959      store_expr (TREE_OPERAND (exp, 2), target, 0);
3960      end_cleanup_deferral ();
3961      emit_queue ();
3962      emit_label (lab2);
3963      OK_DEFER_POP;
3964
3965      return want_value ? target : NULL_RTX;
3966    }
3967  else if (queued_subexp_p (target))
3968    /* If target contains a postincrement, let's not risk
3969       using it as the place to generate the rhs.  */
3970    {
3971      if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3972	{
3973	  /* Expand EXP into a new pseudo.  */
3974	  temp = gen_reg_rtx (GET_MODE (target));
3975	  temp = expand_expr (exp, temp, GET_MODE (target), 0);
3976	}
3977      else
3978	temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3979
3980      /* If target is volatile, ANSI requires accessing the value
3981	 *from* the target, if it is accessed.  So make that happen.
3982	 In no case return the target itself.  */
3983      if (! MEM_VOLATILE_P (target) && want_value)
3984	dont_return_target = 1;
3985    }
3986  else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3987	   && GET_MODE (target) != BLKmode)
3988    /* If target is in memory and caller wants value in a register instead,
3989       arrange that.  Pass TARGET as target for expand_expr so that,
3990       if EXP is another assignment, WANT_VALUE will be nonzero for it.
3991       We know expand_expr will not use the target in that case.
3992       Don't do this if TARGET is volatile because we are supposed
3993       to write it and then read it.  */
3994    {
3995      temp = expand_expr (exp, target, GET_MODE (target), 0);
3996      if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3997	{
3998	  /* If TEMP is already in the desired TARGET, only copy it from
3999	     memory and don't store it there again.  */
4000	  if (temp == target
4001	      || (rtx_equal_p (temp, target)
4002		  && ! side_effects_p (temp) && ! side_effects_p (target)))
4003	    dont_store_target = 1;
4004	  temp = copy_to_reg (temp);
4005	}
4006      dont_return_target = 1;
4007    }
4008  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4009    /* If this is an scalar in a register that is stored in a wider mode
4010       than the declared mode, compute the result into its declared mode
4011       and then convert to the wider mode.  Our value is the computed
4012       expression.  */
4013    {
4014      rtx inner_target = 0;
4015
4016      /* If we don't want a value, we can do the conversion inside EXP,
4017	 which will often result in some optimizations.  Do the conversion
4018	 in two steps: first change the signedness, if needed, then
4019	 the extend.  But don't do this if the type of EXP is a subtype
4020	 of something else since then the conversion might involve
4021	 more than just converting modes.  */
4022      if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4023	  && TREE_TYPE (TREE_TYPE (exp)) == 0)
4024	{
4025	  if (TREE_UNSIGNED (TREE_TYPE (exp))
4026	      != SUBREG_PROMOTED_UNSIGNED_P (target))
4027	    exp
4028	      = convert
4029		(signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4030					  TREE_TYPE (exp)),
4031		 exp);
4032
4033	  exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4034					SUBREG_PROMOTED_UNSIGNED_P (target)),
4035			 exp);
4036
4037	  inner_target = SUBREG_REG (target);
4038	}
4039
4040      temp = expand_expr (exp, inner_target, VOIDmode, 0);
4041
4042      /* If TEMP is a volatile MEM and we want a result value, make
4043	 the access now so it gets done only once.  Likewise if
4044	 it contains TARGET.  */
4045      if (GET_CODE (temp) == MEM && want_value
4046	  && (MEM_VOLATILE_P (temp)
4047	      || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4048	temp = copy_to_reg (temp);
4049
4050      /* If TEMP is a VOIDmode constant, use convert_modes to make
4051	 sure that we properly convert it.  */
4052      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4053	{
4054	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4055				temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4056	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4057			        GET_MODE (target), temp,
4058			        SUBREG_PROMOTED_UNSIGNED_P (target));
4059	}
4060
4061      convert_move (SUBREG_REG (target), temp,
4062		    SUBREG_PROMOTED_UNSIGNED_P (target));
4063
4064      /* If we promoted a constant, change the mode back down to match
4065	 target.  Otherwise, the caller might get confused by a result whose
4066	 mode is larger than expected.  */
4067
4068      if (want_value && GET_MODE (temp) != GET_MODE (target))
4069	{
4070	  if (GET_MODE (temp) != VOIDmode)
4071	    {
4072	      temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4073	      SUBREG_PROMOTED_VAR_P (temp) = 1;
4074	      SUBREG_PROMOTED_UNSIGNED_P (temp)
4075		= SUBREG_PROMOTED_UNSIGNED_P (target);
4076	    }
4077	  else
4078	    temp = convert_modes (GET_MODE (target),
4079				  GET_MODE (SUBREG_REG (target)),
4080				  temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4081	}
4082
4083      return want_value ? temp : NULL_RTX;
4084    }
4085  else
4086    {
4087      temp = expand_expr (exp, target, GET_MODE (target), 0);
4088      /* Return TARGET if it's a specified hardware register.
4089	 If TARGET is a volatile mem ref, either return TARGET
4090	 or return a reg copied *from* TARGET; ANSI requires this.
4091
4092	 Otherwise, if TEMP is not TARGET, return TEMP
4093	 if it is constant (for efficiency),
4094	 or if we really want the correct value.  */
4095      if (!(target && GET_CODE (target) == REG
4096	    && REGNO (target) < FIRST_PSEUDO_REGISTER)
4097	  && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4098	  && ! rtx_equal_p (temp, target)
4099	  && (CONSTANT_P (temp) || want_value))
4100	dont_return_target = 1;
4101    }
4102
4103  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4104     the same as that of TARGET, adjust the constant.  This is needed, for
4105     example, in case it is a CONST_DOUBLE and we want only a word-sized
4106     value.  */
4107  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4108      && TREE_CODE (exp) != ERROR_MARK
4109      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4110    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4111			  temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4112
4113  /* If value was not generated in the target, store it there.
4114     Convert the value to TARGET's type first if necessary.
4115     If TEMP and TARGET compare equal according to rtx_equal_p, but
4116     one or both of them are volatile memory refs, we have to distinguish
4117     two cases:
4118     - expand_expr has used TARGET.  In this case, we must not generate
4119       another copy.  This can be detected by TARGET being equal according
4120       to == .
4121     - expand_expr has not used TARGET - that means that the source just
4122       happens to have the same RTX form.  Since temp will have been created
4123       by expand_expr, it will compare unequal according to == .
4124       We must generate a copy in this case, to reach the correct number
4125       of volatile memory references.  */
4126
4127  if ((! rtx_equal_p (temp, target)
4128       || (temp != target && (side_effects_p (temp)
4129			      || side_effects_p (target))))
4130      && TREE_CODE (exp) != ERROR_MARK
4131      && ! dont_store_target
4132	 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4133	    but TARGET is not valid memory reference, TEMP will differ
4134	    from TARGET although it is really the same location.  */
4135      && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4136	  || target != DECL_RTL_IF_SET (exp)))
4137    {
4138      target = protect_from_queue (target, 1);
4139      if (GET_MODE (temp) != GET_MODE (target)
4140	  && GET_MODE (temp) != VOIDmode)
4141	{
4142	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4143	  if (dont_return_target)
4144	    {
4145	      /* In this case, we will return TEMP,
4146		 so make sure it has the proper mode.
4147		 But don't forget to store the value into TARGET.  */
4148	      temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4149	      emit_move_insn (target, temp);
4150	    }
4151	  else
4152	    convert_move (target, temp, unsignedp);
4153	}
4154
4155      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4156	{
4157	  /* Handle copying a string constant into an array.  The string
4158	     constant may be shorter than the array.  So copy just the string's
4159	     actual length, and clear the rest.  First get the size of the data
4160	     type of the string, which is actually the size of the target.  */
4161	  rtx size = expr_size (exp);
4162
4163	  if (GET_CODE (size) == CONST_INT
4164	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
4165	    emit_block_move (target, temp, size);
4166	  else
4167	    {
4168	      /* Compute the size of the data to copy from the string.  */
4169	      tree copy_size
4170		= size_binop (MIN_EXPR,
4171			      make_tree (sizetype, size),
4172			      size_int (TREE_STRING_LENGTH (exp)));
4173	      rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4174					       VOIDmode, 0);
4175	      rtx label = 0;
4176
4177	      /* Copy that much.  */
4178	      copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4179	      emit_block_move (target, temp, copy_size_rtx);
4180
4181	      /* Figure out how much is left in TARGET that we have to clear.
4182		 Do all calculations in ptr_mode.  */
4183	      if (GET_CODE (copy_size_rtx) == CONST_INT)
4184		{
4185		  size = plus_constant (size, -INTVAL (copy_size_rtx));
4186		  target = adjust_address (target, BLKmode,
4187					   INTVAL (copy_size_rtx));
4188		}
4189	      else
4190		{
4191		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4192				       copy_size_rtx, NULL_RTX, 0,
4193				       OPTAB_LIB_WIDEN);
4194
4195#ifdef POINTERS_EXTEND_UNSIGNED
4196		  if (GET_MODE (copy_size_rtx) != Pmode)
4197		    copy_size_rtx = convert_memory_address (Pmode,
4198							    copy_size_rtx);
4199#endif
4200
4201		  target = offset_address (target, copy_size_rtx,
4202					   highest_pow2_factor (copy_size));
4203		  label = gen_label_rtx ();
4204		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4205					   GET_MODE (size), 0, label);
4206		}
4207
4208	      if (size != const0_rtx)
4209		clear_storage (target, size);
4210
4211	      if (label)
4212		emit_label (label);
4213	    }
4214	}
4215      /* Handle calls that return values in multiple non-contiguous locations.
4216	 The Irix 6 ABI has examples of this.  */
4217      else if (GET_CODE (target) == PARALLEL)
4218	emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4219      else if (GET_MODE (temp) == BLKmode)
4220	emit_block_move (target, temp, expr_size (exp));
4221      else
4222	emit_move_insn (target, temp);
4223    }
4224
4225  /* If we don't want a value, return NULL_RTX.  */
4226  if (! want_value)
4227    return NULL_RTX;
4228
4229  /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4230     ??? The latter test doesn't seem to make sense.  */
4231  else if (dont_return_target && GET_CODE (temp) != MEM)
4232    return temp;
4233
4234  /* Return TARGET itself if it is a hard register.  */
4235  else if (want_value && GET_MODE (target) != BLKmode
4236	   && ! (GET_CODE (target) == REG
4237		 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4238    return copy_to_reg (target);
4239
4240  else
4241    return target;
4242}
4243
4244/* Return 1 if EXP just contains zeros.  */
4245
4246static int
4247is_zeros_p (exp)
4248     tree exp;
4249{
4250  tree elt;
4251
4252  switch (TREE_CODE (exp))
4253    {
4254    case CONVERT_EXPR:
4255    case NOP_EXPR:
4256    case NON_LVALUE_EXPR:
4257    case VIEW_CONVERT_EXPR:
4258      return is_zeros_p (TREE_OPERAND (exp, 0));
4259
4260    case INTEGER_CST:
4261      return integer_zerop (exp);
4262
4263    case COMPLEX_CST:
4264      return
4265	is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4266
4267    case REAL_CST:
4268      return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4269
4270    case VECTOR_CST:
4271      for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4272	   elt = TREE_CHAIN (elt))
4273	if (!is_zeros_p (TREE_VALUE (elt)))
4274	  return 0;
4275
4276      return 1;
4277
4278    case CONSTRUCTOR:
4279      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4280	return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4281      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4282	if (! is_zeros_p (TREE_VALUE (elt)))
4283	  return 0;
4284
4285      return 1;
4286
4287    default:
4288      return 0;
4289    }
4290}
4291
4292/* Return 1 if EXP contains mostly (3/4)  zeros.  */
4293
4294static int
4295mostly_zeros_p (exp)
4296     tree exp;
4297{
4298  if (TREE_CODE (exp) == CONSTRUCTOR)
4299    {
4300      int elts = 0, zeros = 0;
4301      tree elt = CONSTRUCTOR_ELTS (exp);
4302      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4303	{
4304	  /* If there are no ranges of true bits, it is all zero.  */
4305	  return elt == NULL_TREE;
4306	}
4307      for (; elt; elt = TREE_CHAIN (elt))
4308	{
4309	  /* We do not handle the case where the index is a RANGE_EXPR,
4310	     so the statistic will be somewhat inaccurate.
4311	     We do make a more accurate count in store_constructor itself,
4312	     so since this function is only used for nested array elements,
4313	     this should be close enough.  */
4314	  if (mostly_zeros_p (TREE_VALUE (elt)))
4315	    zeros++;
4316	  elts++;
4317	}
4318
4319      return 4 * zeros >= 3 * elts;
4320    }
4321
4322  return is_zeros_p (exp);
4323}
4324
4325/* Helper function for store_constructor.
4326   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4327   TYPE is the type of the CONSTRUCTOR, not the element type.
4328   CLEARED is as for store_constructor.
4329   ALIAS_SET is the alias set to use for any stores.
4330
4331   This provides a recursive shortcut back to store_constructor when it isn't
4332   necessary to go through store_field.  This is so that we can pass through
4333   the cleared field to let store_constructor know that we may not have to
4334   clear a substructure if the outer structure has already been cleared.  */
4335
4336static void
4337store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4338			 alias_set)
4339     rtx target;
4340     unsigned HOST_WIDE_INT bitsize;
4341     HOST_WIDE_INT bitpos;
4342     enum machine_mode mode;
4343     tree exp, type;
4344     int cleared;
4345     int alias_set;
4346{
4347  if (TREE_CODE (exp) == CONSTRUCTOR
4348      && bitpos % BITS_PER_UNIT == 0
4349      /* If we have a non-zero bitpos for a register target, then we just
4350	 let store_field do the bitfield handling.  This is unlikely to
4351	 generate unnecessary clear instructions anyways.  */
4352      && (bitpos == 0 || GET_CODE (target) == MEM))
4353    {
4354      if (GET_CODE (target) == MEM)
4355	target
4356	  = adjust_address (target,
4357			    GET_MODE (target) == BLKmode
4358			    || 0 != (bitpos
4359				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
4360			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4361
4362
4363      /* Update the alias set, if required.  */
4364      if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4365	  && MEM_ALIAS_SET (target) != 0)
4366	{
4367	  target = copy_rtx (target);
4368	  set_mem_alias_set (target, alias_set);
4369	}
4370
4371      store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4372    }
4373  else
4374    store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4375		 alias_set);
4376}
4377
4378/* Store the value of constructor EXP into the rtx TARGET.
4379   TARGET is either a REG or a MEM; we know it cannot conflict, since
4380   safe_from_p has been called.
4381   CLEARED is true if TARGET is known to have been zero'd.
4382   SIZE is the number of bytes of TARGET we are allowed to modify: this
4383   may not be the same as the size of EXP if we are assigning to a field
4384   which has been packed to exclude padding bits.  */
4385
4386static void
4387store_constructor (exp, target, cleared, size)
4388     tree exp;
4389     rtx target;
4390     int cleared;
4391     HOST_WIDE_INT size;
4392{
4393  tree type = TREE_TYPE (exp);
4394#ifdef WORD_REGISTER_OPERATIONS
4395  HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4396#endif
4397
4398  if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4399      || TREE_CODE (type) == QUAL_UNION_TYPE)
4400    {
4401      tree elt;
4402
4403      /* We either clear the aggregate or indicate the value is dead.  */
4404      if ((TREE_CODE (type) == UNION_TYPE
4405	   || TREE_CODE (type) == QUAL_UNION_TYPE)
4406	  && ! cleared
4407	  && ! CONSTRUCTOR_ELTS (exp))
4408	/* If the constructor is empty, clear the union.  */
4409	{
4410	  clear_storage (target, expr_size (exp));
4411	  cleared = 1;
4412	}
4413
4414      /* If we are building a static constructor into a register,
4415	 set the initial value as zero so we can fold the value into
4416	 a constant.  But if more than one register is involved,
4417	 this probably loses.  */
4418      else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4419	       && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4420	{
4421	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4422	  cleared = 1;
4423	}
4424
4425      /* If the constructor has fewer fields than the structure
4426	 or if we are initializing the structure to mostly zeros,
4427	 clear the whole structure first.  Don't do this if TARGET is a
4428	 register whose mode size isn't equal to SIZE since clear_storage
4429	 can't handle this case.  */
4430      else if (! cleared && size > 0
4431	       && ((list_length (CONSTRUCTOR_ELTS (exp))
4432		    != fields_length (type))
4433		   || mostly_zeros_p (exp))
4434	       && (GET_CODE (target) != REG
4435		   || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4436		       == size)))
4437	{
4438	  clear_storage (target, GEN_INT (size));
4439	  cleared = 1;
4440	}
4441
4442      if (! cleared)
4443	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4444
4445      /* Store each element of the constructor into
4446	 the corresponding field of TARGET.  */
4447
4448      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4449	{
4450	  tree field = TREE_PURPOSE (elt);
4451	  tree value = TREE_VALUE (elt);
4452	  enum machine_mode mode;
4453	  HOST_WIDE_INT bitsize;
4454	  HOST_WIDE_INT bitpos = 0;
4455	  int unsignedp;
4456	  tree offset;
4457	  rtx to_rtx = target;
4458
4459	  /* Just ignore missing fields.
4460	     We cleared the whole structure, above,
4461	     if any fields are missing.  */
4462	  if (field == 0)
4463	    continue;
4464
4465	  if (cleared && is_zeros_p (value))
4466	    continue;
4467
4468	  if (host_integerp (DECL_SIZE (field), 1))
4469	    bitsize = tree_low_cst (DECL_SIZE (field), 1);
4470	  else
4471	    bitsize = -1;
4472
4473	  unsignedp = TREE_UNSIGNED (field);
4474	  mode = DECL_MODE (field);
4475	  if (DECL_BIT_FIELD (field))
4476	    mode = VOIDmode;
4477
4478	  offset = DECL_FIELD_OFFSET (field);
4479	  if (host_integerp (offset, 0)
4480	      && host_integerp (bit_position (field), 0))
4481	    {
4482	      bitpos = int_bit_position (field);
4483	      offset = 0;
4484	    }
4485	  else
4486	    bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4487
4488	  if (offset)
4489	    {
4490	      rtx offset_rtx;
4491
4492	      if (contains_placeholder_p (offset))
4493		offset = build (WITH_RECORD_EXPR, sizetype,
4494				offset, make_tree (TREE_TYPE (exp), target));
4495
4496	      offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4497	      if (GET_CODE (to_rtx) != MEM)
4498		abort ();
4499
4500#ifdef POINTERS_EXTEND_UNSIGNED
4501	      if (GET_MODE (offset_rtx) != Pmode)
4502		offset_rtx = convert_memory_address (Pmode, offset_rtx);
4503#else
4504	      if (GET_MODE (offset_rtx) != ptr_mode)
4505		offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4506#endif
4507
4508	      to_rtx = offset_address (to_rtx, offset_rtx,
4509				       highest_pow2_factor (offset));
4510	    }
4511
4512	  if (TREE_READONLY (field))
4513	    {
4514	      if (GET_CODE (to_rtx) == MEM)
4515		to_rtx = copy_rtx (to_rtx);
4516
4517	      RTX_UNCHANGING_P (to_rtx) = 1;
4518	    }
4519
4520#ifdef WORD_REGISTER_OPERATIONS
4521	  /* If this initializes a field that is smaller than a word, at the
4522	     start of a word, try to widen it to a full word.
4523	     This special case allows us to output C++ member function
4524	     initializations in a form that the optimizers can understand.  */
4525	  if (GET_CODE (target) == REG
4526	      && bitsize < BITS_PER_WORD
4527	      && bitpos % BITS_PER_WORD == 0
4528	      && GET_MODE_CLASS (mode) == MODE_INT
4529	      && TREE_CODE (value) == INTEGER_CST
4530	      && exp_size >= 0
4531	      && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4532	    {
4533	      tree type = TREE_TYPE (value);
4534
4535	      if (TYPE_PRECISION (type) < BITS_PER_WORD)
4536		{
4537		  type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4538		  value = convert (type, value);
4539		}
4540
4541	      if (BYTES_BIG_ENDIAN)
4542		value
4543		  = fold (build (LSHIFT_EXPR, type, value,
4544				 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4545	      bitsize = BITS_PER_WORD;
4546	      mode = word_mode;
4547	    }
4548#endif
4549
4550	  if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4551	      && DECL_NONADDRESSABLE_P (field))
4552	    {
4553	      to_rtx = copy_rtx (to_rtx);
4554	      MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4555	    }
4556
4557	  store_constructor_field (to_rtx, bitsize, bitpos, mode,
4558				   value, type, cleared,
4559				   get_alias_set (TREE_TYPE (field)));
4560	}
4561    }
4562  else if (TREE_CODE (type) == ARRAY_TYPE
4563	   || TREE_CODE (type) == VECTOR_TYPE)
4564    {
4565      tree elt;
4566      int i;
4567      int need_to_clear;
4568      tree domain = TYPE_DOMAIN (type);
4569      tree elttype = TREE_TYPE (type);
4570      int const_bounds_p;
4571      HOST_WIDE_INT minelt = 0;
4572      HOST_WIDE_INT maxelt = 0;
4573
4574      /* Vectors are like arrays, but the domain is stored via an array
4575	 type indirectly.  */
4576      if (TREE_CODE (type) == VECTOR_TYPE)
4577	{
4578	  /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4579	     the same field as TYPE_DOMAIN, we are not guaranteed that
4580	     it always will.  */
4581	  domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4582	  domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4583	}
4584
4585      const_bounds_p = (TYPE_MIN_VALUE (domain)
4586			&& TYPE_MAX_VALUE (domain)
4587			&& host_integerp (TYPE_MIN_VALUE (domain), 0)
4588			&& host_integerp (TYPE_MAX_VALUE (domain), 0));
4589
4590      /* If we have constant bounds for the range of the type, get them.  */
4591      if (const_bounds_p)
4592	{
4593	  minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4594	  maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4595	}
4596
4597      /* If the constructor has fewer elements than the array,
4598         clear the whole array first.  Similarly if this is
4599         static constructor of a non-BLKmode object.  */
4600      if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4601	need_to_clear = 1;
4602      else
4603	{
4604	  HOST_WIDE_INT count = 0, zero_count = 0;
4605	  need_to_clear = ! const_bounds_p;
4606
4607	  /* This loop is a more accurate version of the loop in
4608	     mostly_zeros_p (it handles RANGE_EXPR in an index).
4609	     It is also needed to check for missing elements.  */
4610	  for (elt = CONSTRUCTOR_ELTS (exp);
4611	       elt != NULL_TREE && ! need_to_clear;
4612	       elt = TREE_CHAIN (elt))
4613	    {
4614	      tree index = TREE_PURPOSE (elt);
4615	      HOST_WIDE_INT this_node_count;
4616
4617	      if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4618		{
4619		  tree lo_index = TREE_OPERAND (index, 0);
4620		  tree hi_index = TREE_OPERAND (index, 1);
4621
4622		  if (! host_integerp (lo_index, 1)
4623		      || ! host_integerp (hi_index, 1))
4624		    {
4625		      need_to_clear = 1;
4626		      break;
4627		    }
4628
4629		  this_node_count = (tree_low_cst (hi_index, 1)
4630				     - tree_low_cst (lo_index, 1) + 1);
4631		}
4632	      else
4633		this_node_count = 1;
4634
4635	      count += this_node_count;
4636	      if (mostly_zeros_p (TREE_VALUE (elt)))
4637		zero_count += this_node_count;
4638	    }
4639
4640	  /* Clear the entire array first if there are any missing elements,
4641	     or if the incidence of zero elements is >= 75%.  */
4642	  if (! need_to_clear
4643	      && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4644	    need_to_clear = 1;
4645	}
4646
4647      if (need_to_clear && size > 0)
4648	{
4649	  if (! cleared)
4650	    {
4651	      if (REG_P (target))
4652		emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
4653	      else
4654		clear_storage (target, GEN_INT (size));
4655	    }
4656	  cleared = 1;
4657	}
4658      else if (REG_P (target))
4659	/* Inform later passes that the old value is dead.  */
4660	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4661
4662      /* Store each element of the constructor into
4663	 the corresponding element of TARGET, determined
4664	 by counting the elements.  */
4665      for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4666	   elt;
4667	   elt = TREE_CHAIN (elt), i++)
4668	{
4669	  enum machine_mode mode;
4670	  HOST_WIDE_INT bitsize;
4671	  HOST_WIDE_INT bitpos;
4672	  int unsignedp;
4673	  tree value = TREE_VALUE (elt);
4674	  tree index = TREE_PURPOSE (elt);
4675	  rtx xtarget = target;
4676
4677	  if (cleared && is_zeros_p (value))
4678	    continue;
4679
4680	  unsignedp = TREE_UNSIGNED (elttype);
4681	  mode = TYPE_MODE (elttype);
4682	  if (mode == BLKmode)
4683	    bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4684		       ? tree_low_cst (TYPE_SIZE (elttype), 1)
4685		       : -1);
4686	  else
4687	    bitsize = GET_MODE_BITSIZE (mode);
4688
4689	  if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4690	    {
4691	      tree lo_index = TREE_OPERAND (index, 0);
4692	      tree hi_index = TREE_OPERAND (index, 1);
4693	      rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4694	      struct nesting *loop;
4695	      HOST_WIDE_INT lo, hi, count;
4696	      tree position;
4697
4698	      /* If the range is constant and "small", unroll the loop.  */
4699	      if (const_bounds_p
4700		  && host_integerp (lo_index, 0)
4701		  && host_integerp (hi_index, 0)
4702		  && (lo = tree_low_cst (lo_index, 0),
4703		      hi = tree_low_cst (hi_index, 0),
4704		      count = hi - lo + 1,
4705		      (GET_CODE (target) != MEM
4706		       || count <= 2
4707		       || (host_integerp (TYPE_SIZE (elttype), 1)
4708			   && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4709			       <= 40 * 8)))))
4710		{
4711		  lo -= minelt;  hi -= minelt;
4712		  for (; lo <= hi; lo++)
4713		    {
4714		      bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4715
4716		      if (GET_CODE (target) == MEM
4717			  && !MEM_KEEP_ALIAS_SET_P (target)
4718			  && TREE_CODE (type) == ARRAY_TYPE
4719			  && TYPE_NONALIASED_COMPONENT (type))
4720			{
4721			  target = copy_rtx (target);
4722			  MEM_KEEP_ALIAS_SET_P (target) = 1;
4723			}
4724
4725		      store_constructor_field
4726			(target, bitsize, bitpos, mode, value, type, cleared,
4727			 get_alias_set (elttype));
4728		    }
4729		}
4730	      else
4731		{
4732		  hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4733		  loop_top = gen_label_rtx ();
4734		  loop_end = gen_label_rtx ();
4735
4736		  unsignedp = TREE_UNSIGNED (domain);
4737
4738		  index = build_decl (VAR_DECL, NULL_TREE, domain);
4739
4740		  index_r
4741		    = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4742						 &unsignedp, 0));
4743		  SET_DECL_RTL (index, index_r);
4744		  if (TREE_CODE (value) == SAVE_EXPR
4745		      && SAVE_EXPR_RTL (value) == 0)
4746		    {
4747		      /* Make sure value gets expanded once before the
4748                         loop.  */
4749		      expand_expr (value, const0_rtx, VOIDmode, 0);
4750		      emit_queue ();
4751		    }
4752		  store_expr (lo_index, index_r, 0);
4753		  loop = expand_start_loop (0);
4754
4755		  /* Assign value to element index.  */
4756		  position
4757		    = convert (ssizetype,
4758			       fold (build (MINUS_EXPR, TREE_TYPE (index),
4759					    index, TYPE_MIN_VALUE (domain))));
4760		  position = size_binop (MULT_EXPR, position,
4761					 convert (ssizetype,
4762						  TYPE_SIZE_UNIT (elttype)));
4763
4764		  pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4765		  xtarget = offset_address (target, pos_rtx,
4766					    highest_pow2_factor (position));
4767		  xtarget = adjust_address (xtarget, mode, 0);
4768		  if (TREE_CODE (value) == CONSTRUCTOR)
4769		    store_constructor (value, xtarget, cleared,
4770				       bitsize / BITS_PER_UNIT);
4771		  else
4772		    store_expr (value, xtarget, 0);
4773
4774		  expand_exit_loop_if_false (loop,
4775					     build (LT_EXPR, integer_type_node,
4776						    index, hi_index));
4777
4778		  expand_increment (build (PREINCREMENT_EXPR,
4779					   TREE_TYPE (index),
4780					   index, integer_one_node), 0, 0);
4781		  expand_end_loop ();
4782		  emit_label (loop_end);
4783		}
4784	    }
4785	  else if ((index != 0 && ! host_integerp (index, 0))
4786		   || ! host_integerp (TYPE_SIZE (elttype), 1))
4787	    {
4788	      tree position;
4789
4790	      if (index == 0)
4791		index = ssize_int (1);
4792
4793	      if (minelt)
4794		index = convert (ssizetype,
4795				 fold (build (MINUS_EXPR, index,
4796					      TYPE_MIN_VALUE (domain))));
4797
4798	      position = size_binop (MULT_EXPR, index,
4799				     convert (ssizetype,
4800					      TYPE_SIZE_UNIT (elttype)));
4801	      xtarget = offset_address (target,
4802					expand_expr (position, 0, VOIDmode, 0),
4803					highest_pow2_factor (position));
4804	      xtarget = adjust_address (xtarget, mode, 0);
4805	      store_expr (value, xtarget, 0);
4806	    }
4807	  else
4808	    {
4809	      if (index != 0)
4810		bitpos = ((tree_low_cst (index, 0) - minelt)
4811			  * tree_low_cst (TYPE_SIZE (elttype), 1));
4812	      else
4813		bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4814
4815	      if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4816		  && TREE_CODE (type) == ARRAY_TYPE
4817		  && TYPE_NONALIASED_COMPONENT (type))
4818		{
4819		  target = copy_rtx (target);
4820		  MEM_KEEP_ALIAS_SET_P (target) = 1;
4821		}
4822
4823	      store_constructor_field (target, bitsize, bitpos, mode, value,
4824				       type, cleared, get_alias_set (elttype));
4825
4826	    }
4827	}
4828    }
4829
4830  /* Set constructor assignments.  */
4831  else if (TREE_CODE (type) == SET_TYPE)
4832    {
4833      tree elt = CONSTRUCTOR_ELTS (exp);
4834      unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4835      tree domain = TYPE_DOMAIN (type);
4836      tree domain_min, domain_max, bitlength;
4837
4838      /* The default implementation strategy is to extract the constant
4839	 parts of the constructor, use that to initialize the target,
4840	 and then "or" in whatever non-constant ranges we need in addition.
4841
4842	 If a large set is all zero or all ones, it is
4843	 probably better to set it using memset (if available) or bzero.
4844	 Also, if a large set has just a single range, it may also be
4845	 better to first clear all the first clear the set (using
4846	 bzero/memset), and set the bits we want.  */
4847
4848      /* Check for all zeros.  */
4849      if (elt == NULL_TREE && size > 0)
4850	{
4851	  if (!cleared)
4852	    clear_storage (target, GEN_INT (size));
4853	  return;
4854	}
4855
4856      domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4857      domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4858      bitlength = size_binop (PLUS_EXPR,
4859			      size_diffop (domain_max, domain_min),
4860			      ssize_int (1));
4861
4862      nbits = tree_low_cst (bitlength, 1);
4863
4864      /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4865	 are "complicated" (more than one range), initialize (the
4866	 constant parts) by copying from a constant.  */
4867      if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4868	  || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4869	{
4870	  unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4871	  enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4872	  char *bit_buffer = (char *) alloca (nbits);
4873	  HOST_WIDE_INT word = 0;
4874	  unsigned int bit_pos = 0;
4875	  unsigned int ibit = 0;
4876	  unsigned int offset = 0;  /* In bytes from beginning of set.  */
4877
4878	  elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4879	  for (;;)
4880	    {
4881	      if (bit_buffer[ibit])
4882		{
4883		  if (BYTES_BIG_ENDIAN)
4884		    word |= (1 << (set_word_size - 1 - bit_pos));
4885		  else
4886		    word |= 1 << bit_pos;
4887		}
4888
4889	      bit_pos++;  ibit++;
4890	      if (bit_pos >= set_word_size || ibit == nbits)
4891		{
4892		  if (word != 0 || ! cleared)
4893		    {
4894		      rtx datum = GEN_INT (word);
4895		      rtx to_rtx;
4896
4897		      /* The assumption here is that it is safe to use
4898			 XEXP if the set is multi-word, but not if
4899			 it's single-word.  */
4900		      if (GET_CODE (target) == MEM)
4901			to_rtx = adjust_address (target, mode, offset);
4902		      else if (offset == 0)
4903			to_rtx = target;
4904		      else
4905			abort ();
4906		      emit_move_insn (to_rtx, datum);
4907		    }
4908
4909		  if (ibit == nbits)
4910		    break;
4911		  word = 0;
4912		  bit_pos = 0;
4913		  offset += set_word_size / BITS_PER_UNIT;
4914		}
4915	    }
4916	}
4917      else if (!cleared)
4918	/* Don't bother clearing storage if the set is all ones.  */
4919	if (TREE_CHAIN (elt) != NULL_TREE
4920	    || (TREE_PURPOSE (elt) == NULL_TREE
4921		? nbits != 1
4922		: ( ! host_integerp (TREE_VALUE (elt), 0)
4923		   || ! host_integerp (TREE_PURPOSE (elt), 0)
4924		   || (tree_low_cst (TREE_VALUE (elt), 0)
4925		       - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4926		       != (HOST_WIDE_INT) nbits))))
4927	  clear_storage (target, expr_size (exp));
4928
4929      for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4930	{
4931	  /* Start of range of element or NULL.  */
4932	  tree startbit = TREE_PURPOSE (elt);
4933	  /* End of range of element, or element value.  */
4934	  tree endbit   = TREE_VALUE (elt);
4935#ifdef TARGET_MEM_FUNCTIONS
4936	  HOST_WIDE_INT startb, endb;
4937#endif
4938	  rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4939
4940	  bitlength_rtx = expand_expr (bitlength,
4941				       NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4942
4943	  /* Handle non-range tuple element like [ expr ].  */
4944	  if (startbit == NULL_TREE)
4945	    {
4946	      startbit = save_expr (endbit);
4947	      endbit = startbit;
4948	    }
4949
4950	  startbit = convert (sizetype, startbit);
4951	  endbit = convert (sizetype, endbit);
4952	  if (! integer_zerop (domain_min))
4953	    {
4954	      startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4955	      endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4956	    }
4957	  startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4958				      EXPAND_CONST_ADDRESS);
4959	  endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4960				    EXPAND_CONST_ADDRESS);
4961
4962	  if (REG_P (target))
4963	    {
4964	      targetx
4965		= assign_temp
4966		  ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4967					  TYPE_QUAL_CONST)),
4968		   0, 1, 1);
4969	      emit_move_insn (targetx, target);
4970	    }
4971
4972	  else if (GET_CODE (target) == MEM)
4973	    targetx = target;
4974	  else
4975	    abort ();
4976
4977#ifdef TARGET_MEM_FUNCTIONS
4978	  /* Optimization:  If startbit and endbit are
4979	     constants divisible by BITS_PER_UNIT,
4980	     call memset instead.  */
4981	  if (TREE_CODE (startbit) == INTEGER_CST
4982	      && TREE_CODE (endbit) == INTEGER_CST
4983	      && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4984	      && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4985	    {
4986	      emit_library_call (memset_libfunc, LCT_NORMAL,
4987				 VOIDmode, 3,
4988				 plus_constant (XEXP (targetx, 0),
4989						startb / BITS_PER_UNIT),
4990				 Pmode,
4991				 constm1_rtx, TYPE_MODE (integer_type_node),
4992				 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4993				 TYPE_MODE (sizetype));
4994	    }
4995	  else
4996#endif
4997	    emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4998			       LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4999			       Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5000			       startbit_rtx, TYPE_MODE (sizetype),
5001			       endbit_rtx, TYPE_MODE (sizetype));
5002
5003	  if (REG_P (target))
5004	    emit_move_insn (target, targetx);
5005	}
5006    }
5007
5008  else
5009    abort ();
5010}
5011
5012/* Store the value of EXP (an expression tree)
5013   into a subfield of TARGET which has mode MODE and occupies
5014   BITSIZE bits, starting BITPOS bits from the start of TARGET.
5015   If MODE is VOIDmode, it means that we are storing into a bit-field.
5016
5017   If VALUE_MODE is VOIDmode, return nothing in particular.
5018   UNSIGNEDP is not used in this case.
5019
5020   Otherwise, return an rtx for the value stored.  This rtx
5021   has mode VALUE_MODE if that is convenient to do.
5022   In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5023
5024   TYPE is the type of the underlying object,
5025
5026   ALIAS_SET is the alias set for the destination.  This value will
5027   (in general) be different from that for TARGET, since TARGET is a
5028   reference to the containing structure.  */
5029
5030static rtx
5031store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5032	     alias_set)
5033     rtx target;
5034     HOST_WIDE_INT bitsize;
5035     HOST_WIDE_INT bitpos;
5036     enum machine_mode mode;
5037     tree exp;
5038     enum machine_mode value_mode;
5039     int unsignedp;
5040     tree type;
5041     int alias_set;
5042{
5043  HOST_WIDE_INT width_mask = 0;
5044
5045  if (TREE_CODE (exp) == ERROR_MARK)
5046    return const0_rtx;
5047
5048  /* If we have nothing to store, do nothing unless the expression has
5049     side-effects.  */
5050  if (bitsize == 0)
5051    return expand_expr (exp, const0_rtx, VOIDmode, 0);
5052  else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5053    width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5054
5055  /* If we are storing into an unaligned field of an aligned union that is
5056     in a register, we may have the mode of TARGET being an integer mode but
5057     MODE == BLKmode.  In that case, get an aligned object whose size and
5058     alignment are the same as TARGET and store TARGET into it (we can avoid
5059     the store if the field being stored is the entire width of TARGET).  Then
5060     call ourselves recursively to store the field into a BLKmode version of
5061     that object.  Finally, load from the object into TARGET.  This is not
5062     very efficient in general, but should only be slightly more expensive
5063     than the otherwise-required unaligned accesses.  Perhaps this can be
5064     cleaned up later.  */
5065
5066  if (mode == BLKmode
5067      && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5068    {
5069      rtx object
5070	= assign_temp
5071	  (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5072	   0, 1, 1);
5073      rtx blk_object = adjust_address (object, BLKmode, 0);
5074
5075      if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5076	emit_move_insn (object, target);
5077
5078      store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5079		   alias_set);
5080
5081      emit_move_insn (target, object);
5082
5083      /* We want to return the BLKmode version of the data.  */
5084      return blk_object;
5085    }
5086
5087  if (GET_CODE (target) == CONCAT)
5088    {
5089      /* We're storing into a struct containing a single __complex.  */
5090
5091      if (bitpos != 0)
5092	abort ();
5093      return store_expr (exp, target, 0);
5094    }
5095
5096  /* If the structure is in a register or if the component
5097     is a bit field, we cannot use addressing to access it.
5098     Use bit-field techniques or SUBREG to store in it.  */
5099
5100  if (mode == VOIDmode
5101      || (mode != BLKmode && ! direct_store[(int) mode]
5102	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5103	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5104      || GET_CODE (target) == REG
5105      || GET_CODE (target) == SUBREG
5106      /* If the field isn't aligned enough to store as an ordinary memref,
5107	 store it as a bit field.  */
5108      || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5109	  && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5110	      || bitpos % GET_MODE_ALIGNMENT (mode)))
5111      /* If the RHS and field are a constant size and the size of the
5112	 RHS isn't the same size as the bitfield, we must use bitfield
5113	 operations.  */
5114      || (bitsize >= 0
5115	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5116	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5117    {
5118      rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5119
5120      /* If BITSIZE is narrower than the size of the type of EXP
5121	 we will be narrowing TEMP.  Normally, what's wanted are the
5122	 low-order bits.  However, if EXP's type is a record and this is
5123	 big-endian machine, we want the upper BITSIZE bits.  */
5124      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5125	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5126	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5127	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5128			     size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5129				       - bitsize),
5130			     temp, 1);
5131
5132      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5133	 MODE.  */
5134      if (mode != VOIDmode && mode != BLKmode
5135	  && mode != TYPE_MODE (TREE_TYPE (exp)))
5136	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5137
5138      /* If the modes of TARGET and TEMP are both BLKmode, both
5139	 must be in memory and BITPOS must be aligned on a byte
5140	 boundary.  If so, we simply do a block copy.  */
5141      if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5142	{
5143	  if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5144	      || bitpos % BITS_PER_UNIT != 0)
5145	    abort ();
5146
5147	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5148	  emit_block_move (target, temp,
5149			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5150				    / BITS_PER_UNIT));
5151
5152	  return value_mode == VOIDmode ? const0_rtx : target;
5153	}
5154
5155      /* Store the value in the bitfield.  */
5156      store_bit_field (target, bitsize, bitpos, mode, temp,
5157		       int_size_in_bytes (type));
5158
5159      if (value_mode != VOIDmode)
5160	{
5161	  /* The caller wants an rtx for the value.
5162	     If possible, avoid refetching from the bitfield itself.  */
5163	  if (width_mask != 0
5164	      && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5165	    {
5166	      tree count;
5167	      enum machine_mode tmode;
5168
5169	      tmode = GET_MODE (temp);
5170	      if (tmode == VOIDmode)
5171		tmode = value_mode;
5172
5173	      if (unsignedp)
5174		return expand_and (tmode, temp,
5175				   GEN_INT (trunc_int_for_mode (width_mask,
5176								tmode)),
5177				   NULL_RTX);
5178
5179	      count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5180	      temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5181	      return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5182	    }
5183
5184	  return extract_bit_field (target, bitsize, bitpos, unsignedp,
5185				    NULL_RTX, value_mode, VOIDmode,
5186				    int_size_in_bytes (type));
5187	}
5188      return const0_rtx;
5189    }
5190  else
5191    {
5192      rtx addr = XEXP (target, 0);
5193      rtx to_rtx = target;
5194
5195      /* If a value is wanted, it must be the lhs;
5196	 so make the address stable for multiple use.  */
5197
5198      if (value_mode != VOIDmode && GET_CODE (addr) != REG
5199	  && ! CONSTANT_ADDRESS_P (addr)
5200	  /* A frame-pointer reference is already stable.  */
5201	  && ! (GET_CODE (addr) == PLUS
5202		&& GET_CODE (XEXP (addr, 1)) == CONST_INT
5203		&& (XEXP (addr, 0) == virtual_incoming_args_rtx
5204		    || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5205	to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5206
5207      /* Now build a reference to just the desired component.  */
5208
5209      to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5210
5211      if (to_rtx == target)
5212	to_rtx = copy_rtx (to_rtx);
5213
5214      MEM_SET_IN_STRUCT_P (to_rtx, 1);
5215      if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5216	set_mem_alias_set (to_rtx, alias_set);
5217
5218      return store_expr (exp, to_rtx, value_mode != VOIDmode);
5219    }
5220}
5221
5222/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5223   an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5224   codes and find the ultimate containing object, which we return.
5225
5226   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5227   bit position, and *PUNSIGNEDP to the signedness of the field.
5228   If the position of the field is variable, we store a tree
5229   giving the variable offset (in units) in *POFFSET.
5230   This offset is in addition to the bit position.
5231   If the position is not variable, we store 0 in *POFFSET.
5232
5233   If any of the extraction expressions is volatile,
5234   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
5235
5236   If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
5237   is a mode that can be used to access the field.  In that case, *PBITSIZE
5238   is redundant.
5239
5240   If the field describes a variable-sized object, *PMODE is set to
5241   VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
5242   this case, but the address of the object can be found.  */
5243
5244tree
5245get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5246		     punsignedp, pvolatilep)
5247     tree exp;
5248     HOST_WIDE_INT *pbitsize;
5249     HOST_WIDE_INT *pbitpos;
5250     tree *poffset;
5251     enum machine_mode *pmode;
5252     int *punsignedp;
5253     int *pvolatilep;
5254{
5255  tree size_tree = 0;
5256  enum machine_mode mode = VOIDmode;
5257  tree offset = size_zero_node;
5258  tree bit_offset = bitsize_zero_node;
5259  tree placeholder_ptr = 0;
5260  tree tem;
5261
5262  /* First get the mode, signedness, and size.  We do this from just the
5263     outermost expression.  */
5264  if (TREE_CODE (exp) == COMPONENT_REF)
5265    {
5266      size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5267      if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5268	mode = DECL_MODE (TREE_OPERAND (exp, 1));
5269
5270      *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5271    }
5272  else if (TREE_CODE (exp) == BIT_FIELD_REF)
5273    {
5274      size_tree = TREE_OPERAND (exp, 1);
5275      *punsignedp = TREE_UNSIGNED (exp);
5276    }
5277  else
5278    {
5279      mode = TYPE_MODE (TREE_TYPE (exp));
5280      *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5281
5282      if (mode == BLKmode)
5283	size_tree = TYPE_SIZE (TREE_TYPE (exp));
5284      else
5285	*pbitsize = GET_MODE_BITSIZE (mode);
5286    }
5287
5288  if (size_tree != 0)
5289    {
5290      if (! host_integerp (size_tree, 1))
5291	mode = BLKmode, *pbitsize = -1;
5292      else
5293	*pbitsize = tree_low_cst (size_tree, 1);
5294    }
5295
5296  /* Compute cumulative bit-offset for nested component-refs and array-refs,
5297     and find the ultimate containing object.  */
5298  while (1)
5299    {
5300      if (TREE_CODE (exp) == BIT_FIELD_REF)
5301	bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5302      else if (TREE_CODE (exp) == COMPONENT_REF)
5303	{
5304	  tree field = TREE_OPERAND (exp, 1);
5305	  tree this_offset = DECL_FIELD_OFFSET (field);
5306
5307	  /* If this field hasn't been filled in yet, don't go
5308	     past it.  This should only happen when folding expressions
5309	     made during type construction.  */
5310	  if (this_offset == 0)
5311	    break;
5312	  else if (! TREE_CONSTANT (this_offset)
5313		   && contains_placeholder_p (this_offset))
5314	    this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5315
5316	  offset = size_binop (PLUS_EXPR, offset, this_offset);
5317	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
5318				   DECL_FIELD_BIT_OFFSET (field));
5319
5320	  /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
5321	}
5322
5323      else if (TREE_CODE (exp) == ARRAY_REF
5324	       || TREE_CODE (exp) == ARRAY_RANGE_REF)
5325	{
5326	  tree index = TREE_OPERAND (exp, 1);
5327	  tree array = TREE_OPERAND (exp, 0);
5328	  tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5329	  tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5330	  tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5331
5332	  /* We assume all arrays have sizes that are a multiple of a byte.
5333	     First subtract the lower bound, if any, in the type of the
5334	     index, then convert to sizetype and multiply by the size of the
5335	     array element.  */
5336	  if (low_bound != 0 && ! integer_zerop (low_bound))
5337	    index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5338				 index, low_bound));
5339
5340	  /* If the index has a self-referential type, pass it to a
5341	     WITH_RECORD_EXPR; if the component size is, pass our
5342	     component to one.  */
5343	  if (! TREE_CONSTANT (index)
5344	      && contains_placeholder_p (index))
5345	    index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5346	  if (! TREE_CONSTANT (unit_size)
5347	      && contains_placeholder_p (unit_size))
5348	    unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5349
5350	  offset = size_binop (PLUS_EXPR, offset,
5351			       size_binop (MULT_EXPR,
5352					   convert (sizetype, index),
5353					   unit_size));
5354	}
5355
5356      else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5357	{
5358	  tree new = find_placeholder (exp, &placeholder_ptr);
5359
5360	  /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5361	     We might have been called from tree optimization where we
5362	     haven't set up an object yet.  */
5363	  if (new == 0)
5364	    break;
5365	  else
5366	    exp = new;
5367
5368	  continue;
5369	}
5370      else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5371	       && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5372	       && ! ((TREE_CODE (exp) == NOP_EXPR
5373		      || TREE_CODE (exp) == CONVERT_EXPR)
5374		     && (TYPE_MODE (TREE_TYPE (exp))
5375			 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5376	break;
5377
5378      /* If any reference in the chain is volatile, the effect is volatile.  */
5379      if (TREE_THIS_VOLATILE (exp))
5380	*pvolatilep = 1;
5381
5382      exp = TREE_OPERAND (exp, 0);
5383    }
5384
5385  /* If OFFSET is constant, see if we can return the whole thing as a
5386     constant bit position.  Otherwise, split it up.  */
5387  if (host_integerp (offset, 0)
5388      && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5389				 bitsize_unit_node))
5390      && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5391      && host_integerp (tem, 0))
5392    *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5393  else
5394    *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5395
5396  *pmode = mode;
5397  return exp;
5398}
5399
5400/* Return 1 if T is an expression that get_inner_reference handles.  */
5401
5402int
5403handled_component_p (t)
5404     tree t;
5405{
5406  switch (TREE_CODE (t))
5407    {
5408    case BIT_FIELD_REF:
5409    case COMPONENT_REF:
5410    case ARRAY_REF:
5411    case ARRAY_RANGE_REF:
5412    case NON_LVALUE_EXPR:
5413    case VIEW_CONVERT_EXPR:
5414      return 1;
5415
5416    case NOP_EXPR:
5417    case CONVERT_EXPR:
5418      return (TYPE_MODE (TREE_TYPE (t))
5419	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5420
5421    default:
5422      return 0;
5423    }
5424}
5425
5426/* Given an rtx VALUE that may contain additions and multiplications, return
5427   an equivalent value that just refers to a register, memory, or constant.
5428   This is done by generating instructions to perform the arithmetic and
5429   returning a pseudo-register containing the value.
5430
5431   The returned value may be a REG, SUBREG, MEM or constant.  */
5432
5433rtx
5434force_operand (value, target)
5435     rtx value, target;
5436{
5437  optab binoptab = 0;
5438  /* Use a temporary to force order of execution of calls to
5439     `force_operand'.  */
5440  rtx tmp;
5441  rtx op2;
5442  /* Use subtarget as the target for operand 0 of a binary operation.  */
5443  rtx subtarget = get_subtarget (target);
5444
5445  /* Check for a PIC address load.  */
5446  if ((GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5447      && XEXP (value, 0) == pic_offset_table_rtx
5448      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5449	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
5450	  || GET_CODE (XEXP (value, 1)) == CONST))
5451    {
5452      if (!subtarget)
5453	subtarget = gen_reg_rtx (GET_MODE (value));
5454      emit_move_insn (subtarget, value);
5455      return subtarget;
5456    }
5457
5458  if (GET_CODE (value) == PLUS)
5459    binoptab = add_optab;
5460  else if (GET_CODE (value) == MINUS)
5461    binoptab = sub_optab;
5462  else if (GET_CODE (value) == MULT)
5463    {
5464      op2 = XEXP (value, 1);
5465      if (!CONSTANT_P (op2)
5466	  && !(GET_CODE (op2) == REG && op2 != subtarget))
5467	subtarget = 0;
5468      tmp = force_operand (XEXP (value, 0), subtarget);
5469      return expand_mult (GET_MODE (value), tmp,
5470			  force_operand (op2, NULL_RTX),
5471			  target, 1);
5472    }
5473
5474  if (binoptab)
5475    {
5476      op2 = XEXP (value, 1);
5477      if (!CONSTANT_P (op2)
5478	  && !(GET_CODE (op2) == REG && op2 != subtarget))
5479	subtarget = 0;
5480      if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5481	{
5482	  binoptab = add_optab;
5483	  op2 = negate_rtx (GET_MODE (value), op2);
5484	}
5485
5486      /* Check for an addition with OP2 a constant integer and our first
5487	 operand a PLUS of a virtual register and something else.  In that
5488	 case, we want to emit the sum of the virtual register and the
5489	 constant first and then add the other value.  This allows virtual
5490	 register instantiation to simply modify the constant rather than
5491	 creating another one around this addition.  */
5492      if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5493	  && GET_CODE (XEXP (value, 0)) == PLUS
5494	  && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5495	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5496	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5497	{
5498	  rtx temp = expand_binop (GET_MODE (value), binoptab,
5499				   XEXP (XEXP (value, 0), 0), op2,
5500				   subtarget, 0, OPTAB_LIB_WIDEN);
5501	  return expand_binop (GET_MODE (value), binoptab, temp,
5502			       force_operand (XEXP (XEXP (value, 0), 1), 0),
5503			       target, 0, OPTAB_LIB_WIDEN);
5504	}
5505
5506      tmp = force_operand (XEXP (value, 0), subtarget);
5507      return expand_binop (GET_MODE (value), binoptab, tmp,
5508			   force_operand (op2, NULL_RTX),
5509			   target, 0, OPTAB_LIB_WIDEN);
5510      /* We give UNSIGNEDP = 0 to expand_binop
5511	 because the only operations we are expanding here are signed ones.  */
5512    }
5513
5514#ifdef INSN_SCHEDULING
5515  /* On machines that have insn scheduling, we want all memory reference to be
5516     explicit, so we need to deal with such paradoxical SUBREGs.  */
5517  if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5518      && (GET_MODE_SIZE (GET_MODE (value))
5519	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5520    value
5521      = simplify_gen_subreg (GET_MODE (value),
5522			     force_reg (GET_MODE (SUBREG_REG (value)),
5523					force_operand (SUBREG_REG (value),
5524						       NULL_RTX)),
5525			     GET_MODE (SUBREG_REG (value)),
5526			     SUBREG_BYTE (value));
5527#endif
5528
5529  return value;
5530}
5531
5532/* Subroutine of expand_expr: return nonzero iff there is no way that
5533   EXP can reference X, which is being modified.  TOP_P is nonzero if this
5534   call is going to be used to determine whether we need a temporary
5535   for EXP, as opposed to a recursive call to this function.
5536
5537   It is always safe for this routine to return zero since it merely
5538   searches for optimization opportunities.  */
5539
5540int
5541safe_from_p (x, exp, top_p)
5542     rtx x;
5543     tree exp;
5544     int top_p;
5545{
5546  rtx exp_rtl = 0;
5547  int i, nops;
5548  static tree save_expr_list;
5549
5550  if (x == 0
5551      /* If EXP has varying size, we MUST use a target since we currently
5552	 have no way of allocating temporaries of variable size
5553	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5554	 So we assume here that something at a higher level has prevented a
5555	 clash.  This is somewhat bogus, but the best we can do.  Only
5556	 do this when X is BLKmode and when we are at the top level.  */
5557      || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5558	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5559	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5560	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5561	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5562	      != INTEGER_CST)
5563	  && GET_MODE (x) == BLKmode)
5564      /* If X is in the outgoing argument area, it is always safe.  */
5565      || (GET_CODE (x) == MEM
5566	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
5567	      || (GET_CODE (XEXP (x, 0)) == PLUS
5568		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5569    return 1;
5570
5571  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5572     find the underlying pseudo.  */
5573  if (GET_CODE (x) == SUBREG)
5574    {
5575      x = SUBREG_REG (x);
5576      if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5577	return 0;
5578    }
5579
5580  /* A SAVE_EXPR might appear many times in the expression passed to the
5581     top-level safe_from_p call, and if it has a complex subexpression,
5582     examining it multiple times could result in a combinatorial explosion.
5583     E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5584     with optimization took about 28 minutes to compile -- even though it was
5585     only a few lines long.  So we mark each SAVE_EXPR we see with TREE_PRIVATE
5586     and turn that off when we are done.  We keep a list of the SAVE_EXPRs
5587     we have processed.  Note that the only test of top_p was above.  */
5588
5589  if (top_p)
5590    {
5591      int rtn;
5592      tree t;
5593
5594      save_expr_list = 0;
5595
5596      rtn = safe_from_p (x, exp, 0);
5597
5598      for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5599	TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5600
5601      return rtn;
5602    }
5603
5604  /* Now look at our tree code and possibly recurse.  */
5605  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5606    {
5607    case 'd':
5608      exp_rtl = DECL_RTL_IF_SET (exp);
5609      break;
5610
5611    case 'c':
5612      return 1;
5613
5614    case 'x':
5615      if (TREE_CODE (exp) == TREE_LIST)
5616	return ((TREE_VALUE (exp) == 0
5617		 || safe_from_p (x, TREE_VALUE (exp), 0))
5618		&& (TREE_CHAIN (exp) == 0
5619		    || safe_from_p (x, TREE_CHAIN (exp), 0)));
5620      else if (TREE_CODE (exp) == ERROR_MARK)
5621	return 1;	/* An already-visited SAVE_EXPR? */
5622      else
5623	return 0;
5624
5625    case '1':
5626      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5627
5628    case '2':
5629    case '<':
5630      return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5631	      && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5632
5633    case 'e':
5634    case 'r':
5635      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
5636	 the expression.  If it is set, we conflict iff we are that rtx or
5637	 both are in memory.  Otherwise, we check all operands of the
5638	 expression recursively.  */
5639
5640      switch (TREE_CODE (exp))
5641	{
5642	case ADDR_EXPR:
5643	  /* If the operand is static or we are static, we can't conflict.
5644	     Likewise if we don't conflict with the operand at all.  */
5645	  if (staticp (TREE_OPERAND (exp, 0))
5646	      || TREE_STATIC (exp)
5647	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5648	    return 1;
5649
5650	  /* Otherwise, the only way this can conflict is if we are taking
5651	     the address of a DECL a that address if part of X, which is
5652	     very rare.  */
5653	  exp = TREE_OPERAND (exp, 0);
5654	  if (DECL_P (exp))
5655	    {
5656	      if (!DECL_RTL_SET_P (exp)
5657		  || GET_CODE (DECL_RTL (exp)) != MEM)
5658		return 0;
5659	      else
5660		exp_rtl = XEXP (DECL_RTL (exp), 0);
5661	    }
5662	  break;
5663
5664	case INDIRECT_REF:
5665	  if (GET_CODE (x) == MEM
5666	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5667					get_alias_set (exp)))
5668	    return 0;
5669	  break;
5670
5671	case CALL_EXPR:
5672	  /* Assume that the call will clobber all hard registers and
5673	     all of memory.  */
5674	  if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5675	      || GET_CODE (x) == MEM)
5676	    return 0;
5677	  break;
5678
5679	case RTL_EXPR:
5680	  /* If a sequence exists, we would have to scan every instruction
5681	     in the sequence to see if it was safe.  This is probably not
5682	     worthwhile.  */
5683	  if (RTL_EXPR_SEQUENCE (exp))
5684	    return 0;
5685
5686	  exp_rtl = RTL_EXPR_RTL (exp);
5687	  break;
5688
5689	case WITH_CLEANUP_EXPR:
5690	  exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5691	  break;
5692
5693	case CLEANUP_POINT_EXPR:
5694	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5695
5696	case SAVE_EXPR:
5697	  exp_rtl = SAVE_EXPR_RTL (exp);
5698	  if (exp_rtl)
5699	    break;
5700
5701	  /* If we've already scanned this, don't do it again.  Otherwise,
5702	     show we've scanned it and record for clearing the flag if we're
5703	     going on.  */
5704	  if (TREE_PRIVATE (exp))
5705	    return 1;
5706
5707	  TREE_PRIVATE (exp) = 1;
5708	  if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5709	    {
5710	      TREE_PRIVATE (exp) = 0;
5711	      return 0;
5712	    }
5713
5714	  save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5715	  return 1;
5716
5717	case BIND_EXPR:
5718	  /* The only operand we look at is operand 1.  The rest aren't
5719	     part of the expression.  */
5720	  return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5721
5722	case METHOD_CALL_EXPR:
5723	  /* This takes an rtx argument, but shouldn't appear here.  */
5724	  abort ();
5725
5726	default:
5727	  break;
5728	}
5729
5730      /* If we have an rtx, we do not need to scan our operands.  */
5731      if (exp_rtl)
5732	break;
5733
5734      nops = first_rtl_op (TREE_CODE (exp));
5735      for (i = 0; i < nops; i++)
5736	if (TREE_OPERAND (exp, i) != 0
5737	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5738	  return 0;
5739
5740      /* If this is a language-specific tree code, it may require
5741	 special handling.  */
5742      if ((unsigned int) TREE_CODE (exp)
5743	  >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5744	  && !(*lang_hooks.safe_from_p) (x, exp))
5745	return 0;
5746    }
5747
5748  /* If we have an rtl, find any enclosed object.  Then see if we conflict
5749     with it.  */
5750  if (exp_rtl)
5751    {
5752      if (GET_CODE (exp_rtl) == SUBREG)
5753	{
5754	  exp_rtl = SUBREG_REG (exp_rtl);
5755	  if (GET_CODE (exp_rtl) == REG
5756	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5757	    return 0;
5758	}
5759
5760      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
5761	 are memory and they conflict.  */
5762      return ! (rtx_equal_p (x, exp_rtl)
5763		|| (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5764		    && true_dependence (exp_rtl, VOIDmode, x,
5765					rtx_addr_varies_p)));
5766    }
5767
5768  /* If we reach here, it is safe.  */
5769  return 1;
5770}
5771
5772/* Subroutine of expand_expr: return rtx if EXP is a
5773   variable or parameter; else return 0.  */
5774
5775static rtx
5776var_rtx (exp)
5777     tree exp;
5778{
5779  STRIP_NOPS (exp);
5780  switch (TREE_CODE (exp))
5781    {
5782    case PARM_DECL:
5783    case VAR_DECL:
5784      return DECL_RTL (exp);
5785    default:
5786      return 0;
5787    }
5788}
5789
5790#ifdef MAX_INTEGER_COMPUTATION_MODE
5791
5792void
5793check_max_integer_computation_mode (exp)
5794     tree exp;
5795{
5796  enum tree_code code;
5797  enum machine_mode mode;
5798
5799  /* Strip any NOPs that don't change the mode.  */
5800  STRIP_NOPS (exp);
5801  code = TREE_CODE (exp);
5802
5803  /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE.  */
5804  if (code == NOP_EXPR
5805      && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5806    return;
5807
5808  /* First check the type of the overall operation.   We need only look at
5809     unary, binary and relational operations.  */
5810  if (TREE_CODE_CLASS (code) == '1'
5811      || TREE_CODE_CLASS (code) == '2'
5812      || TREE_CODE_CLASS (code) == '<')
5813    {
5814      mode = TYPE_MODE (TREE_TYPE (exp));
5815      if (GET_MODE_CLASS (mode) == MODE_INT
5816	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5817	internal_error ("unsupported wide integer operation");
5818    }
5819
5820  /* Check operand of a unary op.  */
5821  if (TREE_CODE_CLASS (code) == '1')
5822    {
5823      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5824      if (GET_MODE_CLASS (mode) == MODE_INT
5825	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5826	internal_error ("unsupported wide integer operation");
5827    }
5828
5829  /* Check operands of a binary/comparison op.  */
5830  if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5831    {
5832      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5833      if (GET_MODE_CLASS (mode) == MODE_INT
5834	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5835	internal_error ("unsupported wide integer operation");
5836
5837      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5838      if (GET_MODE_CLASS (mode) == MODE_INT
5839	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5840	internal_error ("unsupported wide integer operation");
5841    }
5842}
5843#endif
5844
5845/* Return the highest power of two that EXP is known to be a multiple of.
5846   This is used in updating alignment of MEMs in array references.  */
5847
5848static HOST_WIDE_INT
5849highest_pow2_factor (exp)
5850     tree exp;
5851{
5852  HOST_WIDE_INT c0, c1;
5853
5854  switch (TREE_CODE (exp))
5855    {
5856    case INTEGER_CST:
5857      /* We can find the lowest bit that's a one.  If the low
5858	 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5859	 We need to handle this case since we can find it in a COND_EXPR,
5860	 a MIN_EXPR, or a MAX_EXPR.  If the constant overlows, we have an
5861	 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5862	 later ICE.  */
5863      if (TREE_CONSTANT_OVERFLOW (exp))
5864	return BIGGEST_ALIGNMENT;
5865      else
5866	{
5867	  /* Note: tree_low_cst is intentionally not used here,
5868	     we don't care about the upper bits.  */
5869	  c0 = TREE_INT_CST_LOW (exp);
5870	  c0 &= -c0;
5871	  return c0 ? c0 : BIGGEST_ALIGNMENT;
5872	}
5873      break;
5874
5875    case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
5876      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5877      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5878      return MIN (c0, c1);
5879
5880    case MULT_EXPR:
5881      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5882      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5883      return c0 * c1;
5884
5885    case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
5886    case CEIL_DIV_EXPR:
5887      if (integer_pow2p (TREE_OPERAND (exp, 1))
5888	  && host_integerp (TREE_OPERAND (exp, 1), 1))
5889	{
5890	  c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5891	  c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5892	  return MAX (1, c0 / c1);
5893	}
5894      break;
5895
5896    case NON_LVALUE_EXPR:  case NOP_EXPR:  case CONVERT_EXPR:
5897    case SAVE_EXPR: case WITH_RECORD_EXPR:
5898      return highest_pow2_factor (TREE_OPERAND (exp, 0));
5899
5900    case COMPOUND_EXPR:
5901      return highest_pow2_factor (TREE_OPERAND (exp, 1));
5902
5903    case COND_EXPR:
5904      c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5905      c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5906      return MIN (c0, c1);
5907
5908    default:
5909      break;
5910    }
5911
5912  return 1;
5913}
5914
5915/* Similar, except that it is known that the expression must be a multiple
5916   of the alignment of TYPE.  */
5917
5918static HOST_WIDE_INT
5919highest_pow2_factor_for_type (type, exp)
5920     tree type;
5921     tree exp;
5922{
5923  HOST_WIDE_INT type_align, factor;
5924
5925  factor = highest_pow2_factor (exp);
5926  type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
5927  return MAX (factor, type_align);
5928}
5929
5930/* Return an object on the placeholder list that matches EXP, a
5931   PLACEHOLDER_EXPR.  An object "matches" if it is of the type of the
5932   PLACEHOLDER_EXPR or a pointer type to it.  For further information, see
5933   tree.def.  If no such object is found, return 0.  If PLIST is nonzero, it
5934   is a location which initially points to a starting location in the
5935   placeholder list (zero means start of the list) and where a pointer into
5936   the placeholder list at which the object is found is placed.  */
5937
5938tree
5939find_placeholder (exp, plist)
5940     tree exp;
5941     tree *plist;
5942{
5943  tree type = TREE_TYPE (exp);
5944  tree placeholder_expr;
5945
5946  for (placeholder_expr
5947       = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5948       placeholder_expr != 0;
5949       placeholder_expr = TREE_CHAIN (placeholder_expr))
5950    {
5951      tree need_type = TYPE_MAIN_VARIANT (type);
5952      tree elt;
5953
5954      /* Find the outermost reference that is of the type we want.  If none,
5955	 see if any object has a type that is a pointer to the type we
5956	 want.  */
5957      for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5958	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5959		   || TREE_CODE (elt) == COND_EXPR)
5960		  ? TREE_OPERAND (elt, 1)
5961		  : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5962		     || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5963		     || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5964		     || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5965		  ? TREE_OPERAND (elt, 0) : 0))
5966	if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5967	  {
5968	    if (plist)
5969	      *plist = placeholder_expr;
5970	    return elt;
5971	  }
5972
5973      for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5974	   elt
5975	   = ((TREE_CODE (elt) == COMPOUND_EXPR
5976	       || TREE_CODE (elt) == COND_EXPR)
5977	      ? TREE_OPERAND (elt, 1)
5978	      : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5979		 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5980		 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5981		 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5982	      ? TREE_OPERAND (elt, 0) : 0))
5983	if (POINTER_TYPE_P (TREE_TYPE (elt))
5984	    && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5985		== need_type))
5986	  {
5987	    if (plist)
5988	      *plist = placeholder_expr;
5989	    return build1 (INDIRECT_REF, need_type, elt);
5990	  }
5991    }
5992
5993  return 0;
5994}
5995
5996/* expand_expr: generate code for computing expression EXP.
5997   An rtx for the computed value is returned.  The value is never null.
5998   In the case of a void EXP, const0_rtx is returned.
5999
6000   The value may be stored in TARGET if TARGET is nonzero.
6001   TARGET is just a suggestion; callers must assume that
6002   the rtx returned may not be the same as TARGET.
6003
6004   If TARGET is CONST0_RTX, it means that the value will be ignored.
6005
6006   If TMODE is not VOIDmode, it suggests generating the
6007   result in mode TMODE.  But this is done only when convenient.
6008   Otherwise, TMODE is ignored and the value generated in its natural mode.
6009   TMODE is just a suggestion; callers must assume that
6010   the rtx returned may not have mode TMODE.
6011
6012   Note that TARGET may have neither TMODE nor MODE.  In that case, it
6013   probably will not be used.
6014
6015   If MODIFIER is EXPAND_SUM then when EXP is an addition
6016   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6017   or a nest of (PLUS ...) and (MINUS ...) where the terms are
6018   products as above, or REG or MEM, or constant.
6019   Ordinarily in such cases we would output mul or add instructions
6020   and then return a pseudo reg containing the sum.
6021
6022   EXPAND_INITIALIZER is much like EXPAND_SUM except that
6023   it also marks a label as absolutely required (it can't be dead).
6024   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6025   This is used for outputting expressions used in initializers.
6026
6027   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6028   with a constant address even if that address is not normally legitimate.
6029   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.  */
6030
6031rtx
6032expand_expr (exp, target, tmode, modifier)
6033     tree exp;
6034     rtx target;
6035     enum machine_mode tmode;
6036     enum expand_modifier modifier;
6037{
6038  rtx op0, op1, temp;
6039  tree type = TREE_TYPE (exp);
6040  int unsignedp = TREE_UNSIGNED (type);
6041  enum machine_mode mode;
6042  enum tree_code code = TREE_CODE (exp);
6043  optab this_optab;
6044  rtx subtarget, original_target;
6045  int ignore;
6046  tree context;
6047
6048  /* Handle ERROR_MARK before anybody tries to access its type.  */
6049  if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6050    {
6051      op0 = CONST0_RTX (tmode);
6052      if (op0 != 0)
6053	return op0;
6054      return const0_rtx;
6055    }
6056
6057  mode = TYPE_MODE (type);
6058  /* Use subtarget as the target for operand 0 of a binary operation.  */
6059  subtarget = get_subtarget (target);
6060  original_target = target;
6061  ignore = (target == const0_rtx
6062	    || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6063		 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6064		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6065		&& TREE_CODE (type) == VOID_TYPE));
6066
6067  /* If we are going to ignore this result, we need only do something
6068     if there is a side-effect somewhere in the expression.  If there
6069     is, short-circuit the most common cases here.  Note that we must
6070     not call expand_expr with anything but const0_rtx in case this
6071     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
6072
6073  if (ignore)
6074    {
6075      if (! TREE_SIDE_EFFECTS (exp))
6076	return const0_rtx;
6077
6078      /* Ensure we reference a volatile object even if value is ignored, but
6079	 don't do this if all we are doing is taking its address.  */
6080      if (TREE_THIS_VOLATILE (exp)
6081	  && TREE_CODE (exp) != FUNCTION_DECL
6082	  && mode != VOIDmode && mode != BLKmode
6083	  && modifier != EXPAND_CONST_ADDRESS)
6084	{
6085	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6086	  if (GET_CODE (temp) == MEM)
6087	    temp = copy_to_reg (temp);
6088	  return const0_rtx;
6089	}
6090
6091      if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6092	  || code == INDIRECT_REF || code == BUFFER_REF)
6093	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6094			    modifier);
6095
6096      else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6097	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6098	{
6099	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6100	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6101	  return const0_rtx;
6102	}
6103      else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6104	       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6105	/* If the second operand has no side effects, just evaluate
6106	   the first.  */
6107	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6108			    modifier);
6109      else if (code == BIT_FIELD_REF)
6110	{
6111	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6112	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6113	  expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6114	  return const0_rtx;
6115	}
6116
6117      target = 0;
6118    }
6119
6120#ifdef MAX_INTEGER_COMPUTATION_MODE
6121  /* Only check stuff here if the mode we want is different from the mode
6122     of the expression; if it's the same, check_max_integer_computiation_mode
6123     will handle it.  Do we really need to check this stuff at all?  */
6124
6125  if (target
6126      && GET_MODE (target) != mode
6127      && TREE_CODE (exp) != INTEGER_CST
6128      && TREE_CODE (exp) != PARM_DECL
6129      && TREE_CODE (exp) != ARRAY_REF
6130      && TREE_CODE (exp) != ARRAY_RANGE_REF
6131      && TREE_CODE (exp) != COMPONENT_REF
6132      && TREE_CODE (exp) != BIT_FIELD_REF
6133      && TREE_CODE (exp) != INDIRECT_REF
6134      && TREE_CODE (exp) != CALL_EXPR
6135      && TREE_CODE (exp) != VAR_DECL
6136      && TREE_CODE (exp) != RTL_EXPR)
6137    {
6138      enum machine_mode mode = GET_MODE (target);
6139
6140      if (GET_MODE_CLASS (mode) == MODE_INT
6141	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6142	internal_error ("unsupported wide integer operation");
6143    }
6144
6145  if (tmode != mode
6146      && TREE_CODE (exp) != INTEGER_CST
6147      && TREE_CODE (exp) != PARM_DECL
6148      && TREE_CODE (exp) != ARRAY_REF
6149      && TREE_CODE (exp) != ARRAY_RANGE_REF
6150      && TREE_CODE (exp) != COMPONENT_REF
6151      && TREE_CODE (exp) != BIT_FIELD_REF
6152      && TREE_CODE (exp) != INDIRECT_REF
6153      && TREE_CODE (exp) != VAR_DECL
6154      && TREE_CODE (exp) != CALL_EXPR
6155      && TREE_CODE (exp) != RTL_EXPR
6156      && GET_MODE_CLASS (tmode) == MODE_INT
6157      && tmode > MAX_INTEGER_COMPUTATION_MODE)
6158    internal_error ("unsupported wide integer operation");
6159
6160  check_max_integer_computation_mode (exp);
6161#endif
6162
6163  /* If will do cse, generate all results into pseudo registers
6164     since 1) that allows cse to find more things
6165     and 2) otherwise cse could produce an insn the machine
6166     cannot support.  And exception is a CONSTRUCTOR into a multi-word
6167     MEM: that's much more likely to be most efficient into the MEM.  */
6168
6169  if (! cse_not_expected && mode != BLKmode && target
6170      && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6171      && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6172    target = subtarget;
6173
6174  switch (code)
6175    {
6176    case LABEL_DECL:
6177      {
6178	tree function = decl_function_context (exp);
6179	/* Handle using a label in a containing function.  */
6180	if (function != current_function_decl
6181	    && function != inline_function_decl && function != 0)
6182	  {
6183	    struct function *p = find_function_data (function);
6184	    p->expr->x_forced_labels
6185	      = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6186				   p->expr->x_forced_labels);
6187	  }
6188	else
6189	  {
6190	    if (modifier == EXPAND_INITIALIZER)
6191	      forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6192						 label_rtx (exp),
6193						 forced_labels);
6194	  }
6195
6196	temp = gen_rtx_MEM (FUNCTION_MODE,
6197			    gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6198	if (function != current_function_decl
6199	    && function != inline_function_decl && function != 0)
6200	  LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6201	return temp;
6202      }
6203
6204    case PARM_DECL:
6205      if (DECL_RTL (exp) == 0)
6206	{
6207	  error_with_decl (exp, "prior parameter's size depends on `%s'");
6208	  return CONST0_RTX (mode);
6209	}
6210
6211      /* ... fall through ...  */
6212
6213    case VAR_DECL:
6214      /* If a static var's type was incomplete when the decl was written,
6215	 but the type is complete now, lay out the decl now.  */
6216      if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6217	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6218	{
6219	  rtx value = DECL_RTL_IF_SET (exp);
6220
6221	  layout_decl (exp, 0);
6222
6223	  /* If the RTL was already set, update its mode and memory
6224	     attributes.  */
6225	  if (value != 0)
6226	    {
6227	      PUT_MODE (value, DECL_MODE (exp));
6228	      SET_DECL_RTL (exp, 0);
6229	      set_mem_attributes (value, exp, 1);
6230	      SET_DECL_RTL (exp, value);
6231	    }
6232	}
6233
6234      /* ... fall through ...  */
6235
6236    case FUNCTION_DECL:
6237    case RESULT_DECL:
6238      if (DECL_RTL (exp) == 0)
6239	abort ();
6240
6241      /* Ensure variable marked as used even if it doesn't go through
6242	 a parser.  If it hasn't be used yet, write out an external
6243	 definition.  */
6244      if (! TREE_USED (exp))
6245	{
6246	  assemble_external (exp);
6247	  TREE_USED (exp) = 1;
6248	}
6249
6250      /* Show we haven't gotten RTL for this yet.  */
6251      temp = 0;
6252
6253      /* Handle variables inherited from containing functions.  */
6254      context = decl_function_context (exp);
6255
6256      /* We treat inline_function_decl as an alias for the current function
6257	 because that is the inline function whose vars, types, etc.
6258	 are being merged into the current function.
6259	 See expand_inline_function.  */
6260
6261      if (context != 0 && context != current_function_decl
6262	  && context != inline_function_decl
6263	  /* If var is static, we don't need a static chain to access it.  */
6264	  && ! (GET_CODE (DECL_RTL (exp)) == MEM
6265		&& CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6266	{
6267	  rtx addr;
6268
6269	  /* Mark as non-local and addressable.  */
6270	  DECL_NONLOCAL (exp) = 1;
6271	  if (DECL_NO_STATIC_CHAIN (current_function_decl))
6272	    abort ();
6273	  mark_addressable (exp);
6274	  if (GET_CODE (DECL_RTL (exp)) != MEM)
6275	    abort ();
6276	  addr = XEXP (DECL_RTL (exp), 0);
6277	  if (GET_CODE (addr) == MEM)
6278	    addr
6279	      = replace_equiv_address (addr,
6280				       fix_lexical_addr (XEXP (addr, 0), exp));
6281	  else
6282	    addr = fix_lexical_addr (addr, exp);
6283
6284	  temp = replace_equiv_address (DECL_RTL (exp), addr);
6285	}
6286
6287      /* This is the case of an array whose size is to be determined
6288	 from its initializer, while the initializer is still being parsed.
6289	 See expand_decl.  */
6290
6291      else if (GET_CODE (DECL_RTL (exp)) == MEM
6292	       && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6293	temp = validize_mem (DECL_RTL (exp));
6294
6295      /* If DECL_RTL is memory, we are in the normal case and either
6296	 the address is not valid or it is not a register and -fforce-addr
6297	 is specified, get the address into a register.  */
6298
6299      else if (GET_CODE (DECL_RTL (exp)) == MEM
6300	       && modifier != EXPAND_CONST_ADDRESS
6301	       && modifier != EXPAND_SUM
6302	       && modifier != EXPAND_INITIALIZER
6303	       && (! memory_address_p (DECL_MODE (exp),
6304				       XEXP (DECL_RTL (exp), 0))
6305		   || (flag_force_addr
6306		       && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6307	temp = replace_equiv_address (DECL_RTL (exp),
6308				      copy_rtx (XEXP (DECL_RTL (exp), 0)));
6309
6310      /* If we got something, return it.  But first, set the alignment
6311	 if the address is a register.  */
6312      if (temp != 0)
6313	{
6314	  if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6315	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6316
6317	  return temp;
6318	}
6319
6320      /* If the mode of DECL_RTL does not match that of the decl, it
6321	 must be a promoted value.  We return a SUBREG of the wanted mode,
6322	 but mark it so that we know that it was already extended.  */
6323
6324      if (GET_CODE (DECL_RTL (exp)) == REG
6325	  && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6326	{
6327	  /* Get the signedness used for this variable.  Ensure we get the
6328	     same mode we got when the variable was declared.  */
6329	  if (GET_MODE (DECL_RTL (exp))
6330	      != promote_mode (type, DECL_MODE (exp), &unsignedp,
6331			       (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6332	    abort ();
6333
6334	  temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6335	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6336	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6337	  return temp;
6338	}
6339
6340      return DECL_RTL (exp);
6341
6342    case INTEGER_CST:
6343      temp = immed_double_const (TREE_INT_CST_LOW (exp),
6344				 TREE_INT_CST_HIGH (exp), mode);
6345
6346      /* ??? If overflow is set, fold will have done an incomplete job,
6347	 which can result in (plus xx (const_int 0)), which can get
6348	 simplified by validate_replace_rtx during virtual register
6349	 instantiation, which can result in unrecognizable insns.
6350	 Avoid this by forcing all overflows into registers.  */
6351      if (TREE_CONSTANT_OVERFLOW (exp)
6352	  && modifier != EXPAND_INITIALIZER)
6353	temp = force_reg (mode, temp);
6354
6355      return temp;
6356
6357    case CONST_DECL:
6358      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6359
6360    case REAL_CST:
6361      /* If optimized, generate immediate CONST_DOUBLE
6362	 which will be turned into memory by reload if necessary.
6363
6364	 We used to force a register so that loop.c could see it.  But
6365	 this does not allow gen_* patterns to perform optimizations with
6366	 the constants.  It also produces two insns in cases like "x = 1.0;".
6367	 On most machines, floating-point constants are not permitted in
6368	 many insns, so we'd end up copying it to a register in any case.
6369
6370	 Now, we do the copying in expand_binop, if appropriate.  */
6371      return immed_real_const (exp);
6372
6373    case COMPLEX_CST:
6374    case STRING_CST:
6375      if (! TREE_CST_RTL (exp))
6376	output_constant_def (exp, 1);
6377
6378      /* TREE_CST_RTL probably contains a constant address.
6379	 On RISC machines where a constant address isn't valid,
6380	 make some insns to get that address into a register.  */
6381      if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6382	  && modifier != EXPAND_CONST_ADDRESS
6383	  && modifier != EXPAND_INITIALIZER
6384	  && modifier != EXPAND_SUM
6385	  && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6386	      || (flag_force_addr
6387		  && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6388	return replace_equiv_address (TREE_CST_RTL (exp),
6389				      copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6390      return TREE_CST_RTL (exp);
6391
6392    case EXPR_WITH_FILE_LOCATION:
6393      {
6394	rtx to_return;
6395	const char *saved_input_filename = input_filename;
6396	int saved_lineno = lineno;
6397	input_filename = EXPR_WFL_FILENAME (exp);
6398	lineno = EXPR_WFL_LINENO (exp);
6399	if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6400	  emit_line_note (input_filename, lineno);
6401	/* Possibly avoid switching back and forth here.  */
6402	to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6403	input_filename = saved_input_filename;
6404	lineno = saved_lineno;
6405	return to_return;
6406      }
6407
6408    case SAVE_EXPR:
6409      context = decl_function_context (exp);
6410
6411      /* If this SAVE_EXPR was at global context, assume we are an
6412	 initialization function and move it into our context.  */
6413      if (context == 0)
6414	SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6415
6416      /* We treat inline_function_decl as an alias for the current function
6417	 because that is the inline function whose vars, types, etc.
6418	 are being merged into the current function.
6419	 See expand_inline_function.  */
6420      if (context == current_function_decl || context == inline_function_decl)
6421	context = 0;
6422
6423      /* If this is non-local, handle it.  */
6424      if (context)
6425	{
6426	  /* The following call just exists to abort if the context is
6427	     not of a containing function.  */
6428	  find_function_data (context);
6429
6430	  temp = SAVE_EXPR_RTL (exp);
6431	  if (temp && GET_CODE (temp) == REG)
6432	    {
6433	      put_var_into_stack (exp);
6434	      temp = SAVE_EXPR_RTL (exp);
6435	    }
6436	  if (temp == 0 || GET_CODE (temp) != MEM)
6437	    abort ();
6438	  return
6439	    replace_equiv_address (temp,
6440				   fix_lexical_addr (XEXP (temp, 0), exp));
6441	}
6442      if (SAVE_EXPR_RTL (exp) == 0)
6443	{
6444	  if (mode == VOIDmode)
6445	    temp = const0_rtx;
6446	  else
6447	    temp = assign_temp (build_qualified_type (type,
6448						      (TYPE_QUALS (type)
6449						       | TYPE_QUAL_CONST)),
6450				3, 0, 0);
6451
6452	  SAVE_EXPR_RTL (exp) = temp;
6453	  if (!optimize && GET_CODE (temp) == REG)
6454	    save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6455						save_expr_regs);
6456
6457	  /* If the mode of TEMP does not match that of the expression, it
6458	     must be a promoted value.  We pass store_expr a SUBREG of the
6459	     wanted mode but mark it so that we know that it was already
6460	     extended.  Note that `unsignedp' was modified above in
6461	     this case.  */
6462
6463	  if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6464	    {
6465	      temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6466	      SUBREG_PROMOTED_VAR_P (temp) = 1;
6467	      SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6468	    }
6469
6470	  if (temp == const0_rtx)
6471	    expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6472	  else
6473	    store_expr (TREE_OPERAND (exp, 0), temp, 0);
6474
6475	  TREE_USED (exp) = 1;
6476	}
6477
6478      /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6479	 must be a promoted value.  We return a SUBREG of the wanted mode,
6480	 but mark it so that we know that it was already extended.  */
6481
6482      if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6483	  && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6484	{
6485	  /* Compute the signedness and make the proper SUBREG.  */
6486	  promote_mode (type, mode, &unsignedp, 0);
6487	  temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6488	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6489	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6490	  return temp;
6491	}
6492
6493      return SAVE_EXPR_RTL (exp);
6494
6495    case UNSAVE_EXPR:
6496      {
6497	rtx temp;
6498	temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6499	TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6500	return temp;
6501      }
6502
6503    case PLACEHOLDER_EXPR:
6504      {
6505	tree old_list = placeholder_list;
6506	tree placeholder_expr = 0;
6507
6508	exp = find_placeholder (exp, &placeholder_expr);
6509	if (exp == 0)
6510	  abort ();
6511
6512	placeholder_list = TREE_CHAIN (placeholder_expr);
6513	temp = expand_expr (exp, original_target, tmode, modifier);
6514	placeholder_list = old_list;
6515	return temp;
6516      }
6517
6518      /* We can't find the object or there was a missing WITH_RECORD_EXPR.  */
6519      abort ();
6520
6521    case WITH_RECORD_EXPR:
6522      /* Put the object on the placeholder list, expand our first operand,
6523	 and pop the list.  */
6524      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6525				    placeholder_list);
6526      target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6527			    modifier);
6528      placeholder_list = TREE_CHAIN (placeholder_list);
6529      return target;
6530
6531    case GOTO_EXPR:
6532      if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6533	expand_goto (TREE_OPERAND (exp, 0));
6534      else
6535	expand_computed_goto (TREE_OPERAND (exp, 0));
6536      return const0_rtx;
6537
6538    case EXIT_EXPR:
6539      expand_exit_loop_if_false (NULL,
6540				 invert_truthvalue (TREE_OPERAND (exp, 0)));
6541      return const0_rtx;
6542
6543    case LABELED_BLOCK_EXPR:
6544      if (LABELED_BLOCK_BODY (exp))
6545	expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6546      /* Should perhaps use expand_label, but this is simpler and safer.  */
6547      do_pending_stack_adjust ();
6548      emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6549      return const0_rtx;
6550
6551    case EXIT_BLOCK_EXPR:
6552      if (EXIT_BLOCK_RETURN (exp))
6553	sorry ("returned value in block_exit_expr");
6554      expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6555      return const0_rtx;
6556
6557    case LOOP_EXPR:
6558      push_temp_slots ();
6559      expand_start_loop (1);
6560      expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6561      expand_end_loop ();
6562      pop_temp_slots ();
6563
6564      return const0_rtx;
6565
6566    case BIND_EXPR:
6567      {
6568	tree vars = TREE_OPERAND (exp, 0);
6569	int vars_need_expansion = 0;
6570
6571	/* Need to open a binding contour here because
6572	   if there are any cleanups they must be contained here.  */
6573	expand_start_bindings (2);
6574
6575	/* Mark the corresponding BLOCK for output in its proper place.  */
6576	if (TREE_OPERAND (exp, 2) != 0
6577	    && ! TREE_USED (TREE_OPERAND (exp, 2)))
6578	  insert_block (TREE_OPERAND (exp, 2));
6579
6580	/* If VARS have not yet been expanded, expand them now.  */
6581	while (vars)
6582	  {
6583	    if (!DECL_RTL_SET_P (vars))
6584	      {
6585		vars_need_expansion = 1;
6586		expand_decl (vars);
6587	      }
6588	    expand_decl_init (vars);
6589	    vars = TREE_CHAIN (vars);
6590	  }
6591
6592	temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6593
6594	expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6595
6596	return temp;
6597      }
6598
6599    case RTL_EXPR:
6600      if (RTL_EXPR_SEQUENCE (exp))
6601	{
6602	  if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6603	    abort ();
6604	  emit_insns (RTL_EXPR_SEQUENCE (exp));
6605	  RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6606	}
6607      preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6608      free_temps_for_rtl_expr (exp);
6609      return RTL_EXPR_RTL (exp);
6610
6611    case CONSTRUCTOR:
6612      /* If we don't need the result, just ensure we evaluate any
6613	 subexpressions.  */
6614      if (ignore)
6615	{
6616	  tree elt;
6617
6618	  for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6619	    expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6620
6621	  return const0_rtx;
6622	}
6623
6624      /* All elts simple constants => refer to a constant in memory.  But
6625	 if this is a non-BLKmode mode, let it store a field at a time
6626	 since that should make a CONST_INT or CONST_DOUBLE when we
6627	 fold.  Likewise, if we have a target we can use, it is best to
6628	 store directly into the target unless the type is large enough
6629	 that memcpy will be used.  If we are making an initializer and
6630	 all operands are constant, put it in memory as well.  */
6631      else if ((TREE_STATIC (exp)
6632		&& ((mode == BLKmode
6633		     && ! (target != 0 && safe_from_p (target, exp, 1)))
6634		    || TREE_ADDRESSABLE (exp)
6635		    || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6636			&& (! MOVE_BY_PIECES_P
6637			    (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6638			     TYPE_ALIGN (type)))
6639			&& ! mostly_zeros_p (exp))))
6640	       || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6641	{
6642	  rtx constructor = output_constant_def (exp, 1);
6643
6644	  if (modifier != EXPAND_CONST_ADDRESS
6645	      && modifier != EXPAND_INITIALIZER
6646	      && modifier != EXPAND_SUM)
6647	    constructor = validize_mem (constructor);
6648
6649	  return constructor;
6650	}
6651      else
6652	{
6653	  /* Handle calls that pass values in multiple non-contiguous
6654	     locations.  The Irix 6 ABI has examples of this.  */
6655	  if (target == 0 || ! safe_from_p (target, exp, 1)
6656	      || GET_CODE (target) == PARALLEL)
6657	    target
6658	      = assign_temp (build_qualified_type (type,
6659						   (TYPE_QUALS (type)
6660						    | (TREE_READONLY (exp)
6661						       * TYPE_QUAL_CONST))),
6662			     0, TREE_ADDRESSABLE (exp), 1);
6663
6664	  store_constructor (exp, target, 0,
6665			     int_size_in_bytes (TREE_TYPE (exp)));
6666	  return target;
6667	}
6668
6669    case INDIRECT_REF:
6670      {
6671	tree exp1 = TREE_OPERAND (exp, 0);
6672	tree index;
6673	tree string = string_constant (exp1, &index);
6674
6675	/* Try to optimize reads from const strings.  */
6676 	if (string
6677 	    && TREE_CODE (string) == STRING_CST
6678 	    && TREE_CODE (index) == INTEGER_CST
6679	    && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6680 	    && GET_MODE_CLASS (mode) == MODE_INT
6681 	    && GET_MODE_SIZE (mode) == 1
6682	    && modifier != EXPAND_WRITE)
6683 	  return
6684	    GEN_INT (trunc_int_for_mode (TREE_STRING_POINTER (string)
6685					 [TREE_INT_CST_LOW (index)], mode));
6686
6687	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6688	op0 = memory_address (mode, op0);
6689	temp = gen_rtx_MEM (mode, op0);
6690	set_mem_attributes (temp, exp, 0);
6691
6692	/* If we are writing to this object and its type is a record with
6693	   readonly fields, we must mark it as readonly so it will
6694	   conflict with readonly references to those fields.  */
6695	if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6696	  RTX_UNCHANGING_P (temp) = 1;
6697
6698	return temp;
6699      }
6700
6701    case ARRAY_REF:
6702      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6703	abort ();
6704
6705      {
6706	tree array = TREE_OPERAND (exp, 0);
6707	tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6708	tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6709	tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6710	HOST_WIDE_INT i;
6711
6712	/* Optimize the special-case of a zero lower bound.
6713
6714	   We convert the low_bound to sizetype to avoid some problems
6715	   with constant folding.  (E.g. suppose the lower bound is 1,
6716	   and its mode is QI.  Without the conversion,  (ARRAY
6717	   +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6718	   +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
6719
6720	if (! integer_zerop (low_bound))
6721	  index = size_diffop (index, convert (sizetype, low_bound));
6722
6723	/* Fold an expression like: "foo"[2].
6724	   This is not done in fold so it won't happen inside &.
6725	   Don't fold if this is for wide characters since it's too
6726	   difficult to do correctly and this is a very rare case.  */
6727
6728	if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6729	    && TREE_CODE (array) == STRING_CST
6730	    && TREE_CODE (index) == INTEGER_CST
6731	    && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6732	    && GET_MODE_CLASS (mode) == MODE_INT
6733	    && GET_MODE_SIZE (mode) == 1)
6734	  return
6735	    GEN_INT (trunc_int_for_mode (TREE_STRING_POINTER (array)
6736					 [TREE_INT_CST_LOW (index)], mode));
6737
6738	/* If this is a constant index into a constant array,
6739	   just get the value from the array.  Handle both the cases when
6740	   we have an explicit constructor and when our operand is a variable
6741	   that was declared const.  */
6742
6743	if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6744	    && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6745	    && TREE_CODE (index) == INTEGER_CST
6746	    && 0 > compare_tree_int (index,
6747				     list_length (CONSTRUCTOR_ELTS
6748						  (TREE_OPERAND (exp, 0)))))
6749	  {
6750	    tree elem;
6751
6752	    for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6753		 i = TREE_INT_CST_LOW (index);
6754		 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6755	      ;
6756
6757	    if (elem)
6758	      return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6759				  modifier);
6760	  }
6761
6762	else if (optimize >= 1
6763		 && modifier != EXPAND_CONST_ADDRESS
6764		 && modifier != EXPAND_INITIALIZER
6765		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6766		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6767		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6768	  {
6769	    if (TREE_CODE (index) == INTEGER_CST)
6770	      {
6771		tree init = DECL_INITIAL (array);
6772
6773		if (TREE_CODE (init) == CONSTRUCTOR)
6774		  {
6775		    tree elem;
6776
6777		    for (elem = CONSTRUCTOR_ELTS (init);
6778			 (elem
6779			  && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6780			 elem = TREE_CHAIN (elem))
6781		      ;
6782
6783		    if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6784		      return expand_expr (fold (TREE_VALUE (elem)), target,
6785					  tmode, modifier);
6786		  }
6787		else if (TREE_CODE (init) == STRING_CST
6788			 && 0 > compare_tree_int (index,
6789						  TREE_STRING_LENGTH (init)))
6790		  {
6791		    tree type = TREE_TYPE (TREE_TYPE (init));
6792		    enum machine_mode mode = TYPE_MODE (type);
6793
6794		    if (GET_MODE_CLASS (mode) == MODE_INT
6795			&& GET_MODE_SIZE (mode) == 1)
6796		      return GEN_INT (trunc_int_for_mode
6797				      (TREE_STRING_POINTER (init)
6798				       [TREE_INT_CST_LOW (index)], mode));
6799		  }
6800	      }
6801	  }
6802      }
6803      /* Fall through.  */
6804
6805    case COMPONENT_REF:
6806    case BIT_FIELD_REF:
6807    case ARRAY_RANGE_REF:
6808      /* If the operand is a CONSTRUCTOR, we can just extract the
6809	 appropriate field if it is present.  Don't do this if we have
6810	 already written the data since we want to refer to that copy
6811	 and varasm.c assumes that's what we'll do.  */
6812      if (code == COMPONENT_REF
6813	  && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6814	  && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6815	{
6816	  tree elt;
6817
6818	  for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6819	       elt = TREE_CHAIN (elt))
6820	    if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6821		/* We can normally use the value of the field in the
6822		   CONSTRUCTOR.  However, if this is a bitfield in
6823		   an integral mode that we can fit in a HOST_WIDE_INT,
6824		   we must mask only the number of bits in the bitfield,
6825		   since this is done implicitly by the constructor.  If
6826		   the bitfield does not meet either of those conditions,
6827		   we can't do this optimization.  */
6828		&& (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6829		    || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6830			 == MODE_INT)
6831			&& (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6832			    <= HOST_BITS_PER_WIDE_INT))))
6833	      {
6834		op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6835		if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6836		  {
6837		    HOST_WIDE_INT bitsize
6838		      = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6839		    enum machine_mode imode
6840		      = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6841
6842		    if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6843		      {
6844			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6845			op0 = expand_and (imode, op0, op1, target);
6846		      }
6847		    else
6848		      {
6849			tree count
6850			  = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6851					 0);
6852
6853			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6854					    target, 0);
6855			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6856					    target, 0);
6857		      }
6858		  }
6859
6860		return op0;
6861	      }
6862	}
6863
6864      {
6865	enum machine_mode mode1;
6866	HOST_WIDE_INT bitsize, bitpos;
6867	tree offset;
6868	int volatilep = 0;
6869	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6870					&mode1, &unsignedp, &volatilep);
6871	rtx orig_op0;
6872
6873	/* If we got back the original object, something is wrong.  Perhaps
6874	   we are evaluating an expression too early.  In any event, don't
6875	   infinitely recurse.  */
6876	if (tem == exp)
6877	  abort ();
6878
6879	/* If TEM's type is a union of variable size, pass TARGET to the inner
6880	   computation, since it will need a temporary and TARGET is known
6881	   to have to do.  This occurs in unchecked conversion in Ada.  */
6882
6883	orig_op0 = op0
6884	  = expand_expr (tem,
6885			 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6886			  && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6887			      != INTEGER_CST)
6888			  ? target : NULL_RTX),
6889			 VOIDmode,
6890			 (modifier == EXPAND_INITIALIZER
6891			  || modifier == EXPAND_CONST_ADDRESS)
6892			 ? modifier : EXPAND_NORMAL);
6893
6894	/* If this is a constant, put it into a register if it is a
6895	   legitimate constant and OFFSET is 0 and memory if it isn't.  */
6896	if (CONSTANT_P (op0))
6897	  {
6898	    enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6899	    if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6900		&& offset == 0)
6901	      op0 = force_reg (mode, op0);
6902	    else
6903	      op0 = validize_mem (force_const_mem (mode, op0));
6904	  }
6905
6906	if (offset != 0)
6907	  {
6908	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
6909
6910	    /* If this object is in a register, put it into memory.
6911	       This case can't occur in C, but can in Ada if we have
6912	       unchecked conversion of an expression from a scalar type to
6913	       an array or record type.  */
6914	    if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6915		|| GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6916	      {
6917		/* If the operand is a SAVE_EXPR, we can deal with this by
6918		   forcing the SAVE_EXPR into memory.  */
6919		if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6920		  {
6921		    put_var_into_stack (TREE_OPERAND (exp, 0));
6922		    op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6923		  }
6924		else
6925		  {
6926		    tree nt
6927		      = build_qualified_type (TREE_TYPE (tem),
6928					      (TYPE_QUALS (TREE_TYPE (tem))
6929					       | TYPE_QUAL_CONST));
6930		    rtx memloc = assign_temp (nt, 1, 1, 1);
6931
6932		    emit_move_insn (memloc, op0);
6933		    op0 = memloc;
6934		  }
6935	      }
6936
6937	    if (GET_CODE (op0) != MEM)
6938	      abort ();
6939
6940#ifdef POINTERS_EXTEND_UNSIGNED
6941	    if (GET_MODE (offset_rtx) != Pmode)
6942	      offset_rtx = convert_memory_address (Pmode, offset_rtx);
6943#else
6944	    if (GET_MODE (offset_rtx) != ptr_mode)
6945	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6946#endif
6947
6948	    /* A constant address in OP0 can have VOIDmode, we must not try
6949	       to call force_reg for that case.  Avoid that case.  */
6950	    if (GET_CODE (op0) == MEM
6951		&& GET_MODE (op0) == BLKmode
6952		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
6953		&& bitsize != 0
6954		&& (bitpos % bitsize) == 0
6955		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6956		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6957	      {
6958		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6959		bitpos = 0;
6960	      }
6961
6962	    op0 = offset_address (op0, offset_rtx,
6963				  highest_pow2_factor (offset));
6964	  }
6965
6966	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6967	   record its alignment as BIGGEST_ALIGNMENT.  */
6968	if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
6969	    && is_aligning_offset (offset, tem))
6970	  set_mem_align (op0, BIGGEST_ALIGNMENT);
6971
6972	/* Don't forget about volatility even if this is a bitfield.  */
6973	if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6974	  {
6975	    if (op0 == orig_op0)
6976	      op0 = copy_rtx (op0);
6977
6978	    MEM_VOLATILE_P (op0) = 1;
6979	  }
6980
6981	/* The following code doesn't handle CONCAT.
6982	   Assume only bitpos == 0 can be used for CONCAT, due to
6983	   one element arrays having the same mode as its element.  */
6984	if (GET_CODE (op0) == CONCAT)
6985	  {
6986	    if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
6987	      abort ();
6988	    return op0;
6989	  }
6990
6991	/* In cases where an aligned union has an unaligned object
6992	   as a field, we might be extracting a BLKmode value from
6993	   an integer-mode (e.g., SImode) object.  Handle this case
6994	   by doing the extract into an object as wide as the field
6995	   (which we know to be the width of a basic mode), then
6996	   storing into memory, and changing the mode to BLKmode.  */
6997	if (mode1 == VOIDmode
6998	    || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6999	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
7000		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7001		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7002		&& modifier != EXPAND_CONST_ADDRESS
7003		&& modifier != EXPAND_INITIALIZER)
7004	    /* If the field isn't aligned enough to fetch as a memref,
7005	       fetch it as a bit field.  */
7006	    || (mode1 != BLKmode
7007		&& SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7008		&& ((TYPE_ALIGN (TREE_TYPE (tem))
7009		     < GET_MODE_ALIGNMENT (mode))
7010		    || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7011	    /* If the type and the field are a constant size and the
7012	       size of the type isn't the same size as the bitfield,
7013	       we must use bitfield operations.  */
7014	    || (bitsize >= 0
7015		&& (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7016		    == INTEGER_CST)
7017		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7018					  bitsize)))
7019	  {
7020	    enum machine_mode ext_mode = mode;
7021
7022	    if (ext_mode == BLKmode
7023		&& ! (target != 0 && GET_CODE (op0) == MEM
7024		      && GET_CODE (target) == MEM
7025		      && bitpos % BITS_PER_UNIT == 0))
7026	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7027
7028	    if (ext_mode == BLKmode)
7029	      {
7030		/* In this case, BITPOS must start at a byte boundary and
7031		   TARGET, if specified, must be a MEM.  */
7032		if (GET_CODE (op0) != MEM
7033		    || (target != 0 && GET_CODE (target) != MEM)
7034		    || bitpos % BITS_PER_UNIT != 0)
7035		  abort ();
7036
7037		op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7038		if (target == 0)
7039		  target = assign_temp (type, 0, 1, 1);
7040
7041		emit_block_move (target, op0,
7042				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7043					  / BITS_PER_UNIT));
7044
7045		return target;
7046	      }
7047
7048	    op0 = validize_mem (op0);
7049
7050	    if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7051	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7052
7053	    op0 = extract_bit_field (op0, bitsize, bitpos,
7054				     unsignedp, target, ext_mode, ext_mode,
7055				     int_size_in_bytes (TREE_TYPE (tem)));
7056
7057	    /* If the result is a record type and BITSIZE is narrower than
7058	       the mode of OP0, an integral mode, and this is a big endian
7059	       machine, we must put the field into the high-order bits.  */
7060	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7061		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7062		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7063	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7064				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7065					    - bitsize),
7066				  op0, 1);
7067
7068	    if (mode == BLKmode)
7069	      {
7070		rtx new = assign_temp (build_qualified_type
7071				       (type_for_mode (ext_mode, 0),
7072					TYPE_QUAL_CONST), 0, 1, 1);
7073
7074		emit_move_insn (new, op0);
7075		op0 = copy_rtx (new);
7076		PUT_MODE (op0, BLKmode);
7077		set_mem_attributes (op0, exp, 1);
7078	      }
7079
7080	    return op0;
7081	  }
7082
7083	/* If the result is BLKmode, use that to access the object
7084	   now as well.  */
7085	if (mode == BLKmode)
7086	  mode1 = BLKmode;
7087
7088	/* Get a reference to just this component.  */
7089	if (modifier == EXPAND_CONST_ADDRESS
7090	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7091	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7092	else
7093	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7094
7095	if (op0 == orig_op0)
7096	  op0 = copy_rtx (op0);
7097
7098	set_mem_attributes (op0, exp, 0);
7099	if (GET_CODE (XEXP (op0, 0)) == REG)
7100	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7101
7102	MEM_VOLATILE_P (op0) |= volatilep;
7103	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7104	    || modifier == EXPAND_CONST_ADDRESS
7105	    || modifier == EXPAND_INITIALIZER)
7106	  return op0;
7107	else if (target == 0)
7108	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7109
7110	convert_move (target, op0, unsignedp);
7111	return target;
7112      }
7113
7114    case VTABLE_REF:
7115      {
7116	rtx insn, before = get_last_insn (), vtbl_ref;
7117
7118	/* Evaluate the interior expression.  */
7119	subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7120				 tmode, modifier);
7121
7122	/* Get or create an instruction off which to hang a note.  */
7123	if (REG_P (subtarget))
7124	  {
7125	    target = subtarget;
7126	    insn = get_last_insn ();
7127	    if (insn == before)
7128	      abort ();
7129	    if (! INSN_P (insn))
7130	      insn = prev_nonnote_insn (insn);
7131	  }
7132	else
7133	  {
7134	    target = gen_reg_rtx (GET_MODE (subtarget));
7135	    insn = emit_move_insn (target, subtarget);
7136	  }
7137
7138	/* Collect the data for the note.  */
7139	vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7140	vtbl_ref = plus_constant (vtbl_ref,
7141				  tree_low_cst (TREE_OPERAND (exp, 2), 0));
7142	/* Discard the initial CONST that was added.  */
7143	vtbl_ref = XEXP (vtbl_ref, 0);
7144
7145	REG_NOTES (insn)
7146	  = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7147
7148	return target;
7149      }
7150
7151      /* Intended for a reference to a buffer of a file-object in Pascal.
7152	 But it's not certain that a special tree code will really be
7153	 necessary for these.  INDIRECT_REF might work for them.  */
7154    case BUFFER_REF:
7155      abort ();
7156
7157    case IN_EXPR:
7158      {
7159	/* Pascal set IN expression.
7160
7161	   Algorithm:
7162	       rlo       = set_low - (set_low%bits_per_word);
7163	       the_word  = set [ (index - rlo)/bits_per_word ];
7164	       bit_index = index % bits_per_word;
7165	       bitmask   = 1 << bit_index;
7166	       return !!(the_word & bitmask);  */
7167
7168	tree set = TREE_OPERAND (exp, 0);
7169	tree index = TREE_OPERAND (exp, 1);
7170	int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7171	tree set_type = TREE_TYPE (set);
7172	tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7173	tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7174	rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7175	rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7176	rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7177	rtx setval = expand_expr (set, 0, VOIDmode, 0);
7178	rtx setaddr = XEXP (setval, 0);
7179	enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7180	rtx rlow;
7181	rtx diff, quo, rem, addr, bit, result;
7182
7183	/* If domain is empty, answer is no.  Likewise if index is constant
7184	   and out of bounds.  */
7185	if (((TREE_CODE (set_high_bound) == INTEGER_CST
7186	     && TREE_CODE (set_low_bound) == INTEGER_CST
7187	     && tree_int_cst_lt (set_high_bound, set_low_bound))
7188	     || (TREE_CODE (index) == INTEGER_CST
7189		 && TREE_CODE (set_low_bound) == INTEGER_CST
7190		 && tree_int_cst_lt (index, set_low_bound))
7191	     || (TREE_CODE (set_high_bound) == INTEGER_CST
7192		 && TREE_CODE (index) == INTEGER_CST
7193		 && tree_int_cst_lt (set_high_bound, index))))
7194	  return const0_rtx;
7195
7196	if (target == 0)
7197	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7198
7199	/* If we get here, we have to generate the code for both cases
7200	   (in range and out of range).  */
7201
7202	op0 = gen_label_rtx ();
7203	op1 = gen_label_rtx ();
7204
7205	if (! (GET_CODE (index_val) == CONST_INT
7206	       && GET_CODE (lo_r) == CONST_INT))
7207	  emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7208				   GET_MODE (index_val), iunsignedp, op1);
7209
7210	if (! (GET_CODE (index_val) == CONST_INT
7211	       && GET_CODE (hi_r) == CONST_INT))
7212	  emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7213				   GET_MODE (index_val), iunsignedp, op1);
7214
7215	/* Calculate the element number of bit zero in the first word
7216	   of the set.  */
7217	if (GET_CODE (lo_r) == CONST_INT)
7218	  rlow = GEN_INT (INTVAL (lo_r)
7219			  & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7220	else
7221	  rlow = expand_binop (index_mode, and_optab, lo_r,
7222			       GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7223			       NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7224
7225	diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7226			     NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7227
7228	quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7229			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7230	rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7231			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7232
7233	addr = memory_address (byte_mode,
7234			       expand_binop (index_mode, add_optab, diff,
7235					     setaddr, NULL_RTX, iunsignedp,
7236					     OPTAB_LIB_WIDEN));
7237
7238	/* Extract the bit we want to examine.  */
7239	bit = expand_shift (RSHIFT_EXPR, byte_mode,
7240			    gen_rtx_MEM (byte_mode, addr),
7241			    make_tree (TREE_TYPE (index), rem),
7242			    NULL_RTX, 1);
7243	result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7244			       GET_MODE (target) == byte_mode ? target : 0,
7245			       1, OPTAB_LIB_WIDEN);
7246
7247	if (result != target)
7248	  convert_move (target, result, 1);
7249
7250	/* Output the code to handle the out-of-range case.  */
7251	emit_jump (op0);
7252	emit_label (op1);
7253	emit_move_insn (target, const0_rtx);
7254	emit_label (op0);
7255	return target;
7256      }
7257
7258    case WITH_CLEANUP_EXPR:
7259      if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7260	{
7261	  WITH_CLEANUP_EXPR_RTL (exp)
7262	    = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7263	  expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7264
7265	  /* That's it for this cleanup.  */
7266	  TREE_OPERAND (exp, 1) = 0;
7267	}
7268      return WITH_CLEANUP_EXPR_RTL (exp);
7269
7270    case CLEANUP_POINT_EXPR:
7271      {
7272	/* Start a new binding layer that will keep track of all cleanup
7273	   actions to be performed.  */
7274	expand_start_bindings (2);
7275
7276	target_temp_slot_level = temp_slot_level;
7277
7278	op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7279	/* If we're going to use this value, load it up now.  */
7280	if (! ignore)
7281	  op0 = force_not_mem (op0);
7282	preserve_temp_slots (op0);
7283	expand_end_bindings (NULL_TREE, 0, 0);
7284      }
7285      return op0;
7286
7287    case CALL_EXPR:
7288      /* Check for a built-in function.  */
7289      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7290	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7291	      == FUNCTION_DECL)
7292	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7293        {
7294	  if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7295	      == BUILT_IN_FRONTEND)
7296	    return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7297	  else
7298	    return expand_builtin (exp, target, subtarget, tmode, ignore);
7299	}
7300
7301      return expand_call (exp, target, ignore);
7302
7303    case NON_LVALUE_EXPR:
7304    case NOP_EXPR:
7305    case CONVERT_EXPR:
7306    case REFERENCE_EXPR:
7307      if (TREE_OPERAND (exp, 0) == error_mark_node)
7308	return const0_rtx;
7309
7310      if (TREE_CODE (type) == UNION_TYPE)
7311	{
7312	  tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7313
7314	  /* If both input and output are BLKmode, this conversion isn't doing
7315	     anything except possibly changing memory attribute.  */
7316	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7317	    {
7318	      rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7319					modifier);
7320
7321	      result = copy_rtx (result);
7322	      set_mem_attributes (result, exp, 0);
7323	      return result;
7324	    }
7325
7326	  if (target == 0)
7327	    target = assign_temp (type, 0, 1, 1);
7328
7329	  if (GET_CODE (target) == MEM)
7330	    /* Store data into beginning of memory target.  */
7331	    store_expr (TREE_OPERAND (exp, 0),
7332			adjust_address (target, TYPE_MODE (valtype), 0), 0);
7333
7334	  else if (GET_CODE (target) == REG)
7335	    /* Store this field into a union of the proper type.  */
7336	    store_field (target,
7337			 MIN ((int_size_in_bytes (TREE_TYPE
7338						  (TREE_OPERAND (exp, 0)))
7339			       * BITS_PER_UNIT),
7340			      (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7341			 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7342			 VOIDmode, 0, type, 0);
7343	  else
7344	    abort ();
7345
7346	  /* Return the entire union.  */
7347	  return target;
7348	}
7349
7350      if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7351	{
7352	  op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7353			     modifier);
7354
7355	  /* If the signedness of the conversion differs and OP0 is
7356	     a promoted SUBREG, clear that indication since we now
7357	     have to do the proper extension.  */
7358	  if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7359	      && GET_CODE (op0) == SUBREG)
7360	    SUBREG_PROMOTED_VAR_P (op0) = 0;
7361
7362	  return op0;
7363	}
7364
7365      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7366      if (GET_MODE (op0) == mode)
7367	return op0;
7368
7369      /* If OP0 is a constant, just convert it into the proper mode.  */
7370      if (CONSTANT_P (op0))
7371	{
7372	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7373	  enum machine_mode inner_mode = TYPE_MODE (inner_type);
7374
7375          if (modifier == EXPAND_INITIALIZER)
7376	    return simplify_gen_subreg (mode, op0, inner_mode,
7377					subreg_lowpart_offset (mode,
7378							       inner_mode));
7379	  else
7380	    return convert_modes (mode, inner_mode, op0,
7381				  TREE_UNSIGNED (inner_type));
7382	}
7383
7384      if (modifier == EXPAND_INITIALIZER)
7385	return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7386
7387      if (target == 0)
7388	return
7389	  convert_to_mode (mode, op0,
7390			   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7391      else
7392	convert_move (target, op0,
7393		      TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7394      return target;
7395
7396    case VIEW_CONVERT_EXPR:
7397      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7398
7399      /* If the input and output modes are both the same, we are done.
7400	 Otherwise, if neither mode is BLKmode and both are within a word, we
7401	 can use gen_lowpart.  If neither is true, make sure the operand is
7402	 in memory and convert the MEM to the new mode.  */
7403      if (TYPE_MODE (type) == GET_MODE (op0))
7404	;
7405      else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7406	       && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7407	       && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7408	op0 = gen_lowpart (TYPE_MODE (type), op0);
7409      else if (GET_CODE (op0) != MEM)
7410	{
7411	  /* If the operand is not a MEM, force it into memory.  Since we
7412	     are going to be be changing the mode of the MEM, don't call
7413	     force_const_mem for constants because we don't allow pool
7414	     constants to change mode.  */
7415	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7416
7417	  if (TREE_ADDRESSABLE (exp))
7418	    abort ();
7419
7420	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7421	    target
7422	      = assign_stack_temp_for_type
7423		(TYPE_MODE (inner_type),
7424		 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7425
7426	  emit_move_insn (target, op0);
7427	  op0 = target;
7428	}
7429
7430      /* At this point, OP0 is in the correct mode.  If the output type is such
7431	 that the operand is known to be aligned, indicate that it is.
7432	 Otherwise, we need only be concerned about alignment for non-BLKmode
7433	 results.  */
7434      if (GET_CODE (op0) == MEM)
7435	{
7436	  op0 = copy_rtx (op0);
7437
7438	  if (TYPE_ALIGN_OK (type))
7439	    set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7440	  else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7441		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7442	    {
7443	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7444	      HOST_WIDE_INT temp_size
7445		= MAX (int_size_in_bytes (inner_type),
7446		       (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7447	      rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7448						    temp_size, 0, type);
7449	      rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7450
7451	      if (TREE_ADDRESSABLE (exp))
7452		abort ();
7453
7454	      if (GET_MODE (op0) == BLKmode)
7455		emit_block_move (new_with_op0_mode, op0,
7456				 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7457	      else
7458		emit_move_insn (new_with_op0_mode, op0);
7459
7460	      op0 = new;
7461	    }
7462
7463	  op0 = adjust_address (op0, TYPE_MODE (type), 0);
7464	}
7465
7466      return op0;
7467
7468    case PLUS_EXPR:
7469      /* We come here from MINUS_EXPR when the second operand is a
7470         constant.  */
7471    plus_expr:
7472      this_optab = ! unsignedp && flag_trapv
7473                   && (GET_MODE_CLASS (mode) == MODE_INT)
7474                   ? addv_optab : add_optab;
7475
7476      /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7477	 something else, make sure we add the register to the constant and
7478	 then to the other thing.  This case can occur during strength
7479	 reduction and doing it this way will produce better code if the
7480	 frame pointer or argument pointer is eliminated.
7481
7482	 fold-const.c will ensure that the constant is always in the inner
7483	 PLUS_EXPR, so the only case we need to do anything about is if
7484	 sp, ap, or fp is our second argument, in which case we must swap
7485	 the innermost first argument and our second argument.  */
7486
7487      if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7488	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7489	  && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7490	  && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7491	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7492	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7493	{
7494	  tree t = TREE_OPERAND (exp, 1);
7495
7496	  TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7497	  TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7498	}
7499
7500      /* If the result is to be ptr_mode and we are adding an integer to
7501	 something, we might be forming a constant.  So try to use
7502	 plus_constant.  If it produces a sum and we can't accept it,
7503	 use force_operand.  This allows P = &ARR[const] to generate
7504	 efficient code on machines where a SYMBOL_REF is not a valid
7505	 address.
7506
7507	 If this is an EXPAND_SUM call, always return the sum.  */
7508      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7509          || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7510	{
7511	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7512	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7513	      && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7514	    {
7515	      rtx constant_part;
7516
7517	      op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7518				 EXPAND_SUM);
7519	      /* Use immed_double_const to ensure that the constant is
7520		 truncated according to the mode of OP1, then sign extended
7521		 to a HOST_WIDE_INT.  Using the constant directly can result
7522		 in non-canonical RTL in a 64x32 cross compile.  */
7523	      constant_part
7524		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7525				      (HOST_WIDE_INT) 0,
7526				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7527	      op1 = plus_constant (op1, INTVAL (constant_part));
7528	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7529		op1 = force_operand (op1, target);
7530	      return op1;
7531	    }
7532
7533	  else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7534		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7535		   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7536	    {
7537	      rtx constant_part;
7538
7539	      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7540				 (modifier == EXPAND_INITIALIZER
7541				 ? EXPAND_INITIALIZER : EXPAND_SUM));
7542	      if (! CONSTANT_P (op0))
7543		{
7544		  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7545				     VOIDmode, modifier);
7546		  /* Don't go to both_summands if modifier
7547		     says it's not right to return a PLUS.  */
7548		  if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7549		    goto binop2;
7550		  goto both_summands;
7551		}
7552	      /* Use immed_double_const to ensure that the constant is
7553		 truncated according to the mode of OP1, then sign extended
7554		 to a HOST_WIDE_INT.  Using the constant directly can result
7555		 in non-canonical RTL in a 64x32 cross compile.  */
7556	      constant_part
7557		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7558				      (HOST_WIDE_INT) 0,
7559				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7560	      op0 = plus_constant (op0, INTVAL (constant_part));
7561	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7562		op0 = force_operand (op0, target);
7563	      return op0;
7564	    }
7565	}
7566
7567      /* No sense saving up arithmetic to be done
7568	 if it's all in the wrong mode to form part of an address.
7569	 And force_operand won't know whether to sign-extend or
7570	 zero-extend.  */
7571      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7572	  || mode != ptr_mode)
7573	goto binop;
7574
7575      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7576	subtarget = 0;
7577
7578      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7579      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7580
7581    both_summands:
7582      /* Make sure any term that's a sum with a constant comes last.  */
7583      if (GET_CODE (op0) == PLUS
7584	  && CONSTANT_P (XEXP (op0, 1)))
7585	{
7586	  temp = op0;
7587	  op0 = op1;
7588	  op1 = temp;
7589	}
7590      /* If adding to a sum including a constant,
7591	 associate it to put the constant outside.  */
7592      if (GET_CODE (op1) == PLUS
7593	  && CONSTANT_P (XEXP (op1, 1)))
7594	{
7595	  rtx constant_term = const0_rtx;
7596
7597	  temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7598	  if (temp != 0)
7599	    op0 = temp;
7600	  /* Ensure that MULT comes first if there is one.  */
7601	  else if (GET_CODE (op0) == MULT)
7602	    op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7603	  else
7604	    op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7605
7606	  /* Let's also eliminate constants from op0 if possible.  */
7607	  op0 = eliminate_constant_term (op0, &constant_term);
7608
7609	  /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7610	     their sum should be a constant.  Form it into OP1, since the
7611	     result we want will then be OP0 + OP1.  */
7612
7613	  temp = simplify_binary_operation (PLUS, mode, constant_term,
7614					    XEXP (op1, 1));
7615	  if (temp != 0)
7616	    op1 = temp;
7617	  else
7618	    op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7619	}
7620
7621      /* Put a constant term last and put a multiplication first.  */
7622      if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7623	temp = op1, op1 = op0, op0 = temp;
7624
7625      temp = simplify_binary_operation (PLUS, mode, op0, op1);
7626      return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7627
7628    case MINUS_EXPR:
7629      /* For initializers, we are allowed to return a MINUS of two
7630	 symbolic constants.  Here we handle all cases when both operands
7631	 are constant.  */
7632      /* Handle difference of two symbolic constants,
7633	 for the sake of an initializer.  */
7634      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7635	  && really_constant_p (TREE_OPERAND (exp, 0))
7636	  && really_constant_p (TREE_OPERAND (exp, 1)))
7637	{
7638	  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7639				 modifier);
7640	  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7641				 modifier);
7642
7643	  /* If the last operand is a CONST_INT, use plus_constant of
7644	     the negated constant.  Else make the MINUS.  */
7645	  if (GET_CODE (op1) == CONST_INT)
7646	    return plus_constant (op0, - INTVAL (op1));
7647	  else
7648	    return gen_rtx_MINUS (mode, op0, op1);
7649	}
7650      /* Convert A - const to A + (-const).  */
7651      if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7652	{
7653	  tree negated = fold (build1 (NEGATE_EXPR, type,
7654				       TREE_OPERAND (exp, 1)));
7655
7656	  if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7657	    /* If we can't negate the constant in TYPE, leave it alone and
7658	       expand_binop will negate it for us.  We used to try to do it
7659	       here in the signed version of TYPE, but that doesn't work
7660	       on POINTER_TYPEs.  */;
7661	  else
7662	    {
7663	      exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7664	      goto plus_expr;
7665	    }
7666	}
7667      this_optab = ! unsignedp && flag_trapv
7668                   && (GET_MODE_CLASS(mode) == MODE_INT)
7669                   ? subv_optab : sub_optab;
7670      goto binop;
7671
7672    case MULT_EXPR:
7673      /* If first operand is constant, swap them.
7674	 Thus the following special case checks need only
7675	 check the second operand.  */
7676      if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7677	{
7678	  tree t1 = TREE_OPERAND (exp, 0);
7679	  TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7680	  TREE_OPERAND (exp, 1) = t1;
7681	}
7682
7683      /* Attempt to return something suitable for generating an
7684	 indexed address, for machines that support that.  */
7685
7686      if (modifier == EXPAND_SUM && mode == ptr_mode
7687	  && host_integerp (TREE_OPERAND (exp, 1), 0))
7688	{
7689	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7690			     EXPAND_SUM);
7691
7692	  /* If we knew for certain that this is arithmetic for an array
7693	     reference, and we knew the bounds of the array, then we could
7694	     apply the distributive law across (PLUS X C) for constant C.
7695	     Without such knowledge, we risk overflowing the computation
7696	     when both X and C are large, but X+C isn't.  */
7697	  /* ??? Could perhaps special-case EXP being unsigned and C being
7698	     positive.  In that case we are certain that X+C is no smaller
7699	     than X and so the transformed expression will overflow iff the
7700	     original would have.  */
7701
7702	  if (GET_CODE (op0) != REG)
7703	    op0 = force_operand (op0, NULL_RTX);
7704	  if (GET_CODE (op0) != REG)
7705	    op0 = copy_to_mode_reg (mode, op0);
7706
7707	  return
7708	    gen_rtx_MULT (mode, op0,
7709			  GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7710	}
7711
7712      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7713	subtarget = 0;
7714
7715      /* Check for multiplying things that have been extended
7716	 from a narrower type.  If this machine supports multiplying
7717	 in that narrower type with a result in the desired type,
7718	 do it that way, and avoid the explicit type-conversion.  */
7719      if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7720	  && TREE_CODE (type) == INTEGER_TYPE
7721	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7722	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7723	  && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7724	       && int_fits_type_p (TREE_OPERAND (exp, 1),
7725				   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7726	       /* Don't use a widening multiply if a shift will do.  */
7727	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7728		    > HOST_BITS_PER_WIDE_INT)
7729		   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7730	      ||
7731	      (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7732	       && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7733		   ==
7734		   TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7735	       /* If both operands are extended, they must either both
7736		  be zero-extended or both be sign-extended.  */
7737	       && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7738		   ==
7739		   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7740	{
7741	  enum machine_mode innermode
7742	    = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7743	  optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7744			? smul_widen_optab : umul_widen_optab);
7745	  this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7746			? umul_widen_optab : smul_widen_optab);
7747	  if (mode == GET_MODE_WIDER_MODE (innermode))
7748	    {
7749	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7750		{
7751		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7752				     NULL_RTX, VOIDmode, 0);
7753		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7754		    op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7755				       VOIDmode, 0);
7756		  else
7757		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7758				       NULL_RTX, VOIDmode, 0);
7759		  goto binop2;
7760		}
7761	      else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7762		       && innermode == word_mode)
7763		{
7764		  rtx htem;
7765		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7766				     NULL_RTX, VOIDmode, 0);
7767		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7768		    op1 = convert_modes (innermode, mode,
7769					 expand_expr (TREE_OPERAND (exp, 1),
7770						      NULL_RTX, VOIDmode, 0),
7771					 unsignedp);
7772		  else
7773		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7774				       NULL_RTX, VOIDmode, 0);
7775		  temp = expand_binop (mode, other_optab, op0, op1, target,
7776				       unsignedp, OPTAB_LIB_WIDEN);
7777		  htem = expand_mult_highpart_adjust (innermode,
7778						      gen_highpart (innermode, temp),
7779						      op0, op1,
7780						      gen_highpart (innermode, temp),
7781						      unsignedp);
7782		  emit_move_insn (gen_highpart (innermode, temp), htem);
7783		  return temp;
7784		}
7785	    }
7786	}
7787      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7788      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7789      return expand_mult (mode, op0, op1, target, unsignedp);
7790
7791    case TRUNC_DIV_EXPR:
7792    case FLOOR_DIV_EXPR:
7793    case CEIL_DIV_EXPR:
7794    case ROUND_DIV_EXPR:
7795    case EXACT_DIV_EXPR:
7796      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7797	subtarget = 0;
7798      /* Possible optimization: compute the dividend with EXPAND_SUM
7799	 then if the divisor is constant can optimize the case
7800	 where some terms of the dividend have coeffs divisible by it.  */
7801      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7802      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7803      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7804
7805    case RDIV_EXPR:
7806      /* Emit a/b as a*(1/b).  Later we may manage CSE the reciprocal saving
7807         expensive divide.  If not, combine will rebuild the original
7808         computation.  */
7809      if (flag_unsafe_math_optimizations && optimize && !optimize_size
7810	  && TREE_CODE (type) == REAL_TYPE
7811	  && !real_onep (TREE_OPERAND (exp, 0)))
7812        return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7813				   build (RDIV_EXPR, type,
7814					  build_real (type, dconst1),
7815					  TREE_OPERAND (exp, 1))),
7816			    target, tmode, unsignedp);
7817      this_optab = sdiv_optab;
7818      goto binop;
7819
7820    case TRUNC_MOD_EXPR:
7821    case FLOOR_MOD_EXPR:
7822    case CEIL_MOD_EXPR:
7823    case ROUND_MOD_EXPR:
7824      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7825	subtarget = 0;
7826      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7827      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7828      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7829
7830    case FIX_ROUND_EXPR:
7831    case FIX_FLOOR_EXPR:
7832    case FIX_CEIL_EXPR:
7833      abort ();			/* Not used for C.  */
7834
7835    case FIX_TRUNC_EXPR:
7836      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7837      if (target == 0)
7838	target = gen_reg_rtx (mode);
7839      expand_fix (target, op0, unsignedp);
7840      return target;
7841
7842    case FLOAT_EXPR:
7843      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7844      if (target == 0)
7845	target = gen_reg_rtx (mode);
7846      /* expand_float can't figure out what to do if FROM has VOIDmode.
7847	 So give it the correct mode.  With -O, cse will optimize this.  */
7848      if (GET_MODE (op0) == VOIDmode)
7849	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7850				op0);
7851      expand_float (target, op0,
7852		    TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7853      return target;
7854
7855    case NEGATE_EXPR:
7856      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7857      temp = expand_unop (mode,
7858                          ! unsignedp && flag_trapv
7859                          && (GET_MODE_CLASS(mode) == MODE_INT)
7860                          ? negv_optab : neg_optab, op0, target, 0);
7861      if (temp == 0)
7862	abort ();
7863      return temp;
7864
7865    case ABS_EXPR:
7866      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7867
7868      /* Handle complex values specially.  */
7869      if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7870	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7871	return expand_complex_abs (mode, op0, target, unsignedp);
7872
7873      /* Unsigned abs is simply the operand.  Testing here means we don't
7874	 risk generating incorrect code below.  */
7875      if (TREE_UNSIGNED (type))
7876	return op0;
7877
7878      return expand_abs (mode, op0, target, unsignedp,
7879			 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7880
7881    case MAX_EXPR:
7882    case MIN_EXPR:
7883      target = original_target;
7884      if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7885	  || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7886	  || GET_MODE (target) != mode
7887	  || (GET_CODE (target) == REG
7888	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
7889	target = gen_reg_rtx (mode);
7890      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7891      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7892
7893      /* First try to do it with a special MIN or MAX instruction.
7894	 If that does not win, use a conditional jump to select the proper
7895	 value.  */
7896      this_optab = (TREE_UNSIGNED (type)
7897		    ? (code == MIN_EXPR ? umin_optab : umax_optab)
7898		    : (code == MIN_EXPR ? smin_optab : smax_optab));
7899
7900      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7901			   OPTAB_WIDEN);
7902      if (temp != 0)
7903	return temp;
7904
7905      /* At this point, a MEM target is no longer useful; we will get better
7906	 code without it.  */
7907
7908      if (GET_CODE (target) == MEM)
7909	target = gen_reg_rtx (mode);
7910
7911      if (target != op0)
7912	emit_move_insn (target, op0);
7913
7914      op0 = gen_label_rtx ();
7915
7916      /* If this mode is an integer too wide to compare properly,
7917	 compare word by word.  Rely on cse to optimize constant cases.  */
7918      if (GET_MODE_CLASS (mode) == MODE_INT
7919	  && ! can_compare_p (GE, mode, ccp_jump))
7920	{
7921	  if (code == MAX_EXPR)
7922	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7923					  target, op1, NULL_RTX, op0);
7924	  else
7925	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7926					  op1, target, NULL_RTX, op0);
7927	}
7928      else
7929	{
7930	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7931	  do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7932				   unsignedp, mode, NULL_RTX, NULL_RTX,
7933				   op0);
7934	}
7935      emit_move_insn (target, op1);
7936      emit_label (op0);
7937      return target;
7938
7939    case BIT_NOT_EXPR:
7940      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7941      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7942      if (temp == 0)
7943	abort ();
7944      return temp;
7945
7946    case FFS_EXPR:
7947      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7948      temp = expand_unop (mode, ffs_optab, op0, target, 1);
7949      if (temp == 0)
7950	abort ();
7951      return temp;
7952
7953      /* ??? Can optimize bitwise operations with one arg constant.
7954	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7955	 and (a bitwise1 b) bitwise2 b (etc)
7956	 but that is probably not worth while.  */
7957
7958      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
7959	 boolean values when we want in all cases to compute both of them.  In
7960	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7961	 as actual zero-or-1 values and then bitwise anding.  In cases where
7962	 there cannot be any side effects, better code would be made by
7963	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7964	 how to recognize those cases.  */
7965
7966    case TRUTH_AND_EXPR:
7967    case BIT_AND_EXPR:
7968      this_optab = and_optab;
7969      goto binop;
7970
7971    case TRUTH_OR_EXPR:
7972    case BIT_IOR_EXPR:
7973      this_optab = ior_optab;
7974      goto binop;
7975
7976    case TRUTH_XOR_EXPR:
7977    case BIT_XOR_EXPR:
7978      this_optab = xor_optab;
7979      goto binop;
7980
7981    case LSHIFT_EXPR:
7982    case RSHIFT_EXPR:
7983    case LROTATE_EXPR:
7984    case RROTATE_EXPR:
7985      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7986	subtarget = 0;
7987      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7988      return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7989			   unsignedp);
7990
7991      /* Could determine the answer when only additive constants differ.  Also,
7992	 the addition of one can be handled by changing the condition.  */
7993    case LT_EXPR:
7994    case LE_EXPR:
7995    case GT_EXPR:
7996    case GE_EXPR:
7997    case EQ_EXPR:
7998    case NE_EXPR:
7999    case UNORDERED_EXPR:
8000    case ORDERED_EXPR:
8001    case UNLT_EXPR:
8002    case UNLE_EXPR:
8003    case UNGT_EXPR:
8004    case UNGE_EXPR:
8005    case UNEQ_EXPR:
8006      temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8007      if (temp != 0)
8008	return temp;
8009
8010      /* For foo != 0, load foo, and if it is nonzero load 1 instead.  */
8011      if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8012	  && original_target
8013	  && GET_CODE (original_target) == REG
8014	  && (GET_MODE (original_target)
8015	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8016	{
8017	  temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8018			      VOIDmode, 0);
8019
8020	  /* If temp is constant, we can just compute the result.  */
8021	  if (GET_CODE (temp) == CONST_INT)
8022	    {
8023	      if (INTVAL (temp) != 0)
8024	        emit_move_insn (target, const1_rtx);
8025	      else
8026	        emit_move_insn (target, const0_rtx);
8027
8028	      return target;
8029	    }
8030
8031	  if (temp != original_target)
8032	    {
8033	      enum machine_mode mode1 = GET_MODE (temp);
8034	      if (mode1 == VOIDmode)
8035		mode1 = tmode != VOIDmode ? tmode : mode;
8036
8037	      temp = copy_to_mode_reg (mode1, temp);
8038	    }
8039
8040	  op1 = gen_label_rtx ();
8041	  emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8042				   GET_MODE (temp), unsignedp, op1);
8043	  emit_move_insn (temp, const1_rtx);
8044	  emit_label (op1);
8045	  return temp;
8046	}
8047
8048      /* If no set-flag instruction, must generate a conditional
8049	 store into a temporary variable.  Drop through
8050	 and handle this like && and ||.  */
8051
8052    case TRUTH_ANDIF_EXPR:
8053    case TRUTH_ORIF_EXPR:
8054      if (! ignore
8055	  && (target == 0 || ! safe_from_p (target, exp, 1)
8056	      /* Make sure we don't have a hard reg (such as function's return
8057		 value) live across basic blocks, if not optimizing.  */
8058	      || (!optimize && GET_CODE (target) == REG
8059		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8060	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8061
8062      if (target)
8063	emit_clr_insn (target);
8064
8065      op1 = gen_label_rtx ();
8066      jumpifnot (exp, op1);
8067
8068      if (target)
8069	emit_0_to_1_insn (target);
8070
8071      emit_label (op1);
8072      return ignore ? const0_rtx : target;
8073
8074    case TRUTH_NOT_EXPR:
8075      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8076      /* The parser is careful to generate TRUTH_NOT_EXPR
8077	 only with operands that are always zero or one.  */
8078      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8079			   target, 1, OPTAB_LIB_WIDEN);
8080      if (temp == 0)
8081	abort ();
8082      return temp;
8083
8084    case COMPOUND_EXPR:
8085      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8086      emit_queue ();
8087      return expand_expr (TREE_OPERAND (exp, 1),
8088			  (ignore ? const0_rtx : target),
8089			  VOIDmode, 0);
8090
8091    case COND_EXPR:
8092      /* If we would have a "singleton" (see below) were it not for a
8093	 conversion in each arm, bring that conversion back out.  */
8094      if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8095	  && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8096	  && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8097	      == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8098	{
8099	  tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8100	  tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8101
8102	  if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8103	       && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8104	      || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8105		  && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8106	      || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8107		  && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8108	      || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8109		  && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8110	    return expand_expr (build1 (NOP_EXPR, type,
8111					build (COND_EXPR, TREE_TYPE (iftrue),
8112					       TREE_OPERAND (exp, 0),
8113					       iftrue, iffalse)),
8114				target, tmode, modifier);
8115	}
8116
8117      {
8118	/* Note that COND_EXPRs whose type is a structure or union
8119	   are required to be constructed to contain assignments of
8120	   a temporary variable, so that we can evaluate them here
8121	   for side effect only.  If type is void, we must do likewise.  */
8122
8123	/* If an arm of the branch requires a cleanup,
8124	   only that cleanup is performed.  */
8125
8126	tree singleton = 0;
8127	tree binary_op = 0, unary_op = 0;
8128
8129	/* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8130	   convert it to our mode, if necessary.  */
8131	if (integer_onep (TREE_OPERAND (exp, 1))
8132	    && integer_zerop (TREE_OPERAND (exp, 2))
8133	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8134	  {
8135	    if (ignore)
8136	      {
8137		expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8138			     modifier);
8139		return const0_rtx;
8140	      }
8141
8142	    op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8143	    if (GET_MODE (op0) == mode)
8144	      return op0;
8145
8146	    if (target == 0)
8147	      target = gen_reg_rtx (mode);
8148	    convert_move (target, op0, unsignedp);
8149	    return target;
8150	  }
8151
8152	/* Check for X ? A + B : A.  If we have this, we can copy A to the
8153	   output and conditionally add B.  Similarly for unary operations.
8154	   Don't do this if X has side-effects because those side effects
8155	   might affect A or B and the "?" operation is a sequence point in
8156	   ANSI.  (operand_equal_p tests for side effects.)  */
8157
8158	if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8159	    && operand_equal_p (TREE_OPERAND (exp, 2),
8160				TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8161	  singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8162	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8163		 && operand_equal_p (TREE_OPERAND (exp, 1),
8164				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8165	  singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8166	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8167		 && operand_equal_p (TREE_OPERAND (exp, 2),
8168				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8169	  singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8170	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8171		 && operand_equal_p (TREE_OPERAND (exp, 1),
8172				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8173	  singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8174
8175	/* If we are not to produce a result, we have no target.  Otherwise,
8176	   if a target was specified use it; it will not be used as an
8177	   intermediate target unless it is safe.  If no target, use a
8178	   temporary.  */
8179
8180	if (ignore)
8181	  temp = 0;
8182	else if (original_target
8183		 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8184		     || (singleton && GET_CODE (original_target) == REG
8185			 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8186			 && original_target == var_rtx (singleton)))
8187		 && GET_MODE (original_target) == mode
8188#ifdef HAVE_conditional_move
8189		 && (! can_conditionally_move_p (mode)
8190		     || GET_CODE (original_target) == REG
8191		     || TREE_ADDRESSABLE (type))
8192#endif
8193		 && (GET_CODE (original_target) != MEM
8194		     || TREE_ADDRESSABLE (type)))
8195	  temp = original_target;
8196	else if (TREE_ADDRESSABLE (type))
8197	  abort ();
8198	else
8199	  temp = assign_temp (type, 0, 0, 1);
8200
8201	/* If we had X ? A + C : A, with C a constant power of 2, and we can
8202	   do the test of X as a store-flag operation, do this as
8203	   A + ((X != 0) << log C).  Similarly for other simple binary
8204	   operators.  Only do for C == 1 if BRANCH_COST is low.  */
8205	if (temp && singleton && binary_op
8206	    && (TREE_CODE (binary_op) == PLUS_EXPR
8207		|| TREE_CODE (binary_op) == MINUS_EXPR
8208		|| TREE_CODE (binary_op) == BIT_IOR_EXPR
8209		|| TREE_CODE (binary_op) == BIT_XOR_EXPR)
8210	    && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8211		: integer_onep (TREE_OPERAND (binary_op, 1)))
8212	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8213	  {
8214	    rtx result;
8215	    optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8216                            ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8217                               ? addv_optab : add_optab)
8218                            : TREE_CODE (binary_op) == MINUS_EXPR
8219                              ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8220                                 ? subv_optab : sub_optab)
8221                            : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8222                            : xor_optab);
8223
8224	    /* If we had X ? A : A + 1, do this as A + (X == 0).
8225
8226	       We have to invert the truth value here and then put it
8227	       back later if do_store_flag fails.  We cannot simply copy
8228	       TREE_OPERAND (exp, 0) to another variable and modify that
8229	       because invert_truthvalue can modify the tree pointed to
8230	       by its argument.  */
8231	    if (singleton == TREE_OPERAND (exp, 1))
8232	      TREE_OPERAND (exp, 0)
8233		= invert_truthvalue (TREE_OPERAND (exp, 0));
8234
8235	    result = do_store_flag (TREE_OPERAND (exp, 0),
8236				    (safe_from_p (temp, singleton, 1)
8237				     ? temp : NULL_RTX),
8238				    mode, BRANCH_COST <= 1);
8239
8240	    if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8241	      result = expand_shift (LSHIFT_EXPR, mode, result,
8242				     build_int_2 (tree_log2
8243						  (TREE_OPERAND
8244						   (binary_op, 1)),
8245						  0),
8246				     (safe_from_p (temp, singleton, 1)
8247				      ? temp : NULL_RTX), 0);
8248
8249	    if (result)
8250	      {
8251		op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8252		return expand_binop (mode, boptab, op1, result, temp,
8253				     unsignedp, OPTAB_LIB_WIDEN);
8254	      }
8255	    else if (singleton == TREE_OPERAND (exp, 1))
8256	      TREE_OPERAND (exp, 0)
8257		= invert_truthvalue (TREE_OPERAND (exp, 0));
8258	  }
8259
8260	do_pending_stack_adjust ();
8261	NO_DEFER_POP;
8262	op0 = gen_label_rtx ();
8263
8264	if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8265	  {
8266	    if (temp != 0)
8267	      {
8268		/* If the target conflicts with the other operand of the
8269		   binary op, we can't use it.  Also, we can't use the target
8270		   if it is a hard register, because evaluating the condition
8271		   might clobber it.  */
8272		if ((binary_op
8273		     && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8274		    || (GET_CODE (temp) == REG
8275			&& REGNO (temp) < FIRST_PSEUDO_REGISTER))
8276		  temp = gen_reg_rtx (mode);
8277		store_expr (singleton, temp, 0);
8278	      }
8279	    else
8280	      expand_expr (singleton,
8281			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8282	    if (singleton == TREE_OPERAND (exp, 1))
8283	      jumpif (TREE_OPERAND (exp, 0), op0);
8284	    else
8285	      jumpifnot (TREE_OPERAND (exp, 0), op0);
8286
8287	    start_cleanup_deferral ();
8288	    if (binary_op && temp == 0)
8289	      /* Just touch the other operand.  */
8290	      expand_expr (TREE_OPERAND (binary_op, 1),
8291			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8292	    else if (binary_op)
8293	      store_expr (build (TREE_CODE (binary_op), type,
8294				 make_tree (type, temp),
8295				 TREE_OPERAND (binary_op, 1)),
8296			  temp, 0);
8297	    else
8298	      store_expr (build1 (TREE_CODE (unary_op), type,
8299				  make_tree (type, temp)),
8300			  temp, 0);
8301	    op1 = op0;
8302	  }
8303	/* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8304	   comparison operator.  If we have one of these cases, set the
8305	   output to A, branch on A (cse will merge these two references),
8306	   then set the output to FOO.  */
8307	else if (temp
8308		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8309		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8310		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8311				     TREE_OPERAND (exp, 1), 0)
8312		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8313		     || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8314		 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8315	  {
8316	    if (GET_CODE (temp) == REG
8317		&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
8318	      temp = gen_reg_rtx (mode);
8319	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
8320	    jumpif (TREE_OPERAND (exp, 0), op0);
8321
8322	    start_cleanup_deferral ();
8323	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
8324	    op1 = op0;
8325	  }
8326	else if (temp
8327		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8328		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8329		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8330				     TREE_OPERAND (exp, 2), 0)
8331		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8332		     || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8333		 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8334	  {
8335	    if (GET_CODE (temp) == REG
8336		&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
8337	      temp = gen_reg_rtx (mode);
8338	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
8339	    jumpifnot (TREE_OPERAND (exp, 0), op0);
8340
8341	    start_cleanup_deferral ();
8342	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
8343	    op1 = op0;
8344	  }
8345	else
8346	  {
8347	    op1 = gen_label_rtx ();
8348	    jumpifnot (TREE_OPERAND (exp, 0), op0);
8349
8350	    start_cleanup_deferral ();
8351
8352	    /* One branch of the cond can be void, if it never returns. For
8353	       example A ? throw : E  */
8354	    if (temp != 0
8355		&& TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8356	      store_expr (TREE_OPERAND (exp, 1), temp, 0);
8357	    else
8358	      expand_expr (TREE_OPERAND (exp, 1),
8359			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8360	    end_cleanup_deferral ();
8361	    emit_queue ();
8362	    emit_jump_insn (gen_jump (op1));
8363	    emit_barrier ();
8364	    emit_label (op0);
8365	    start_cleanup_deferral ();
8366	    if (temp != 0
8367		&& TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8368	      store_expr (TREE_OPERAND (exp, 2), temp, 0);
8369	    else
8370	      expand_expr (TREE_OPERAND (exp, 2),
8371			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8372	  }
8373
8374	end_cleanup_deferral ();
8375
8376	emit_queue ();
8377	emit_label (op1);
8378	OK_DEFER_POP;
8379
8380	return temp;
8381      }
8382
8383    case TARGET_EXPR:
8384      {
8385	/* Something needs to be initialized, but we didn't know
8386	   where that thing was when building the tree.  For example,
8387	   it could be the return value of a function, or a parameter
8388	   to a function which lays down in the stack, or a temporary
8389	   variable which must be passed by reference.
8390
8391	   We guarantee that the expression will either be constructed
8392	   or copied into our original target.  */
8393
8394	tree slot = TREE_OPERAND (exp, 0);
8395	tree cleanups = NULL_TREE;
8396	tree exp1;
8397
8398	if (TREE_CODE (slot) != VAR_DECL)
8399	  abort ();
8400
8401	if (! ignore)
8402	  target = original_target;
8403
8404	/* Set this here so that if we get a target that refers to a
8405	   register variable that's already been used, put_reg_into_stack
8406	   knows that it should fix up those uses.  */
8407	TREE_USED (slot) = 1;
8408
8409	if (target == 0)
8410	  {
8411	    if (DECL_RTL_SET_P (slot))
8412	      {
8413		target = DECL_RTL (slot);
8414		/* If we have already expanded the slot, so don't do
8415		   it again.  (mrs)  */
8416		if (TREE_OPERAND (exp, 1) == NULL_TREE)
8417		  return target;
8418	      }
8419	    else
8420	      {
8421		target = assign_temp (type, 2, 0, 1);
8422		/* All temp slots at this level must not conflict.  */
8423		preserve_temp_slots (target);
8424		SET_DECL_RTL (slot, target);
8425		if (TREE_ADDRESSABLE (slot))
8426		  put_var_into_stack (slot);
8427
8428		/* Since SLOT is not known to the called function
8429		   to belong to its stack frame, we must build an explicit
8430		   cleanup.  This case occurs when we must build up a reference
8431		   to pass the reference as an argument.  In this case,
8432		   it is very likely that such a reference need not be
8433		   built here.  */
8434
8435		if (TREE_OPERAND (exp, 2) == 0)
8436		  TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8437		cleanups = TREE_OPERAND (exp, 2);
8438	      }
8439	  }
8440	else
8441	  {
8442	    /* This case does occur, when expanding a parameter which
8443	       needs to be constructed on the stack.  The target
8444	       is the actual stack address that we want to initialize.
8445	       The function we call will perform the cleanup in this case.  */
8446
8447	    /* If we have already assigned it space, use that space,
8448	       not target that we were passed in, as our target
8449	       parameter is only a hint.  */
8450	    if (DECL_RTL_SET_P (slot))
8451	      {
8452		target = DECL_RTL (slot);
8453		/* If we have already expanded the slot, so don't do
8454                   it again.  (mrs)  */
8455		if (TREE_OPERAND (exp, 1) == NULL_TREE)
8456		  return target;
8457	      }
8458	    else
8459	      {
8460		SET_DECL_RTL (slot, target);
8461		/* If we must have an addressable slot, then make sure that
8462		   the RTL that we just stored in slot is OK.  */
8463		if (TREE_ADDRESSABLE (slot))
8464		  put_var_into_stack (slot);
8465	      }
8466	  }
8467
8468	exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8469	/* Mark it as expanded.  */
8470	TREE_OPERAND (exp, 1) = NULL_TREE;
8471
8472	store_expr (exp1, target, 0);
8473
8474	expand_decl_cleanup (NULL_TREE, cleanups);
8475
8476	return target;
8477      }
8478
8479    case INIT_EXPR:
8480      {
8481	tree lhs = TREE_OPERAND (exp, 0);
8482	tree rhs = TREE_OPERAND (exp, 1);
8483
8484	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8485	return temp;
8486      }
8487
8488    case MODIFY_EXPR:
8489      {
8490	/* If lhs is complex, expand calls in rhs before computing it.
8491	   That's so we don't compute a pointer and save it over a
8492	   call.  If lhs is simple, compute it first so we can give it
8493	   as a target if the rhs is just a call.  This avoids an
8494	   extra temp and copy and that prevents a partial-subsumption
8495	   which makes bad code.  Actually we could treat
8496	   component_ref's of vars like vars.  */
8497
8498	tree lhs = TREE_OPERAND (exp, 0);
8499	tree rhs = TREE_OPERAND (exp, 1);
8500
8501	temp = 0;
8502
8503	/* Check for |= or &= of a bitfield of size one into another bitfield
8504	   of size 1.  In this case, (unless we need the result of the
8505	   assignment) we can do this more efficiently with a
8506	   test followed by an assignment, if necessary.
8507
8508	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
8509	   things change so we do, this code should be enhanced to
8510	   support it.  */
8511	if (ignore
8512	    && TREE_CODE (lhs) == COMPONENT_REF
8513	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
8514		|| TREE_CODE (rhs) == BIT_AND_EXPR)
8515	    && TREE_OPERAND (rhs, 0) == lhs
8516	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8517	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8518	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8519	  {
8520	    rtx label = gen_label_rtx ();
8521
8522	    do_jump (TREE_OPERAND (rhs, 1),
8523		     TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8524		     TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8525	    expand_assignment (lhs, convert (TREE_TYPE (rhs),
8526					     (TREE_CODE (rhs) == BIT_IOR_EXPR
8527					      ? integer_one_node
8528					      : integer_zero_node)),
8529			       0, 0);
8530	    do_pending_stack_adjust ();
8531	    emit_label (label);
8532	    return const0_rtx;
8533	  }
8534
8535	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8536
8537	return temp;
8538      }
8539
8540    case RETURN_EXPR:
8541      if (!TREE_OPERAND (exp, 0))
8542	expand_null_return ();
8543      else
8544	expand_return (TREE_OPERAND (exp, 0));
8545      return const0_rtx;
8546
8547    case PREINCREMENT_EXPR:
8548    case PREDECREMENT_EXPR:
8549      return expand_increment (exp, 0, ignore);
8550
8551    case POSTINCREMENT_EXPR:
8552    case POSTDECREMENT_EXPR:
8553      /* Faster to treat as pre-increment if result is not used.  */
8554      return expand_increment (exp, ! ignore, ignore);
8555
8556    case ADDR_EXPR:
8557      /* Are we taking the address of a nested function?  */
8558      if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8559	  && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8560	  && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8561	  && ! TREE_STATIC (exp))
8562	{
8563	  op0 = trampoline_address (TREE_OPERAND (exp, 0));
8564	  op0 = force_operand (op0, target);
8565	}
8566      /* If we are taking the address of something erroneous, just
8567	 return a zero.  */
8568      else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8569	return const0_rtx;
8570      /* If we are taking the address of a constant and are at the
8571	 top level, we have to use output_constant_def since we can't
8572	 call force_const_mem at top level.  */
8573      else if (cfun == 0
8574	       && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8575		   || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8576		       == 'c')))
8577	op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8578      else
8579	{
8580	  /* We make sure to pass const0_rtx down if we came in with
8581	     ignore set, to avoid doing the cleanups twice for something.  */
8582	  op0 = expand_expr (TREE_OPERAND (exp, 0),
8583			     ignore ? const0_rtx : NULL_RTX, VOIDmode,
8584			     (modifier == EXPAND_INITIALIZER
8585			      ? modifier : EXPAND_CONST_ADDRESS));
8586
8587	  /* If we are going to ignore the result, OP0 will have been set
8588	     to const0_rtx, so just return it.  Don't get confused and
8589	     think we are taking the address of the constant.  */
8590	  if (ignore)
8591	    return op0;
8592
8593	  /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8594	     clever and returns a REG when given a MEM.  */
8595	  op0 = protect_from_queue (op0, 1);
8596
8597	  /* We would like the object in memory.  If it is a constant, we can
8598	     have it be statically allocated into memory.  For a non-constant,
8599	     we need to allocate some memory and store the value into it.  */
8600
8601	  if (CONSTANT_P (op0))
8602	    op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8603				   op0);
8604	  else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8605		   || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8606		   || GET_CODE (op0) == PARALLEL)
8607	    {
8608	      /* If the operand is a SAVE_EXPR, we can deal with this by
8609		 forcing the SAVE_EXPR into memory.  */
8610	      if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8611		{
8612		  put_var_into_stack (TREE_OPERAND (exp, 0));
8613		  op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8614		}
8615	      else
8616		{
8617		  /* If this object is in a register, it can't be BLKmode.  */
8618		  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8619		  rtx memloc = assign_temp (inner_type, 1, 1, 1);
8620
8621		  if (GET_CODE (op0) == PARALLEL)
8622		    /* Handle calls that pass values in multiple
8623		       non-contiguous locations.  The Irix 6 ABI has examples
8624		       of this.  */
8625		    emit_group_store (memloc, op0,
8626				      int_size_in_bytes (inner_type));
8627		  else
8628		    emit_move_insn (memloc, op0);
8629
8630		  op0 = memloc;
8631		}
8632	    }
8633
8634	  if (GET_CODE (op0) != MEM)
8635	    abort ();
8636
8637	  mark_temp_addr_taken (op0);
8638	  if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8639	    {
8640	      op0 = XEXP (op0, 0);
8641#ifdef POINTERS_EXTEND_UNSIGNED
8642	      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8643		  && mode == ptr_mode)
8644		op0 = convert_memory_address (ptr_mode, op0);
8645#endif
8646	      return op0;
8647	    }
8648
8649	  /* If OP0 is not aligned as least as much as the type requires, we
8650	     need to make a temporary, copy OP0 to it, and take the address of
8651	     the temporary.  We want to use the alignment of the type, not of
8652	     the operand.  Note that this is incorrect for FUNCTION_TYPE, but
8653	     the test for BLKmode means that can't happen.  The test for
8654	     BLKmode is because we never make mis-aligned MEMs with
8655	     non-BLKmode.
8656
8657	     We don't need to do this at all if the machine doesn't have
8658	     strict alignment.  */
8659	  if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8660	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8661		  > MEM_ALIGN (op0))
8662	      && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8663	    {
8664	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8665	      rtx new
8666		= assign_stack_temp_for_type
8667		  (TYPE_MODE (inner_type),
8668		   MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8669		   : int_size_in_bytes (inner_type),
8670		   1, build_qualified_type (inner_type,
8671					    (TYPE_QUALS (inner_type)
8672					     | TYPE_QUAL_CONST)));
8673
8674	      if (TYPE_ALIGN_OK (inner_type))
8675		abort ();
8676
8677	      emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8678	      op0 = new;
8679	    }
8680
8681	  op0 = force_operand (XEXP (op0, 0), target);
8682	}
8683
8684      if (flag_force_addr
8685	  && GET_CODE (op0) != REG
8686	  && modifier != EXPAND_CONST_ADDRESS
8687	  && modifier != EXPAND_INITIALIZER
8688	  && modifier != EXPAND_SUM)
8689	op0 = force_reg (Pmode, op0);
8690
8691      if (GET_CODE (op0) == REG
8692	  && ! REG_USERVAR_P (op0))
8693	mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8694
8695#ifdef POINTERS_EXTEND_UNSIGNED
8696      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8697	  && mode == ptr_mode)
8698	op0 = convert_memory_address (ptr_mode, op0);
8699#endif
8700
8701      return op0;
8702
8703    case ENTRY_VALUE_EXPR:
8704      abort ();
8705
8706    /* COMPLEX type for Extended Pascal & Fortran  */
8707    case COMPLEX_EXPR:
8708      {
8709	enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8710	rtx insns;
8711
8712	/* Get the rtx code of the operands.  */
8713	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8714	op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8715
8716	if (! target)
8717	  target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8718
8719	start_sequence ();
8720
8721	/* Move the real (op0) and imaginary (op1) parts to their location.  */
8722	emit_move_insn (gen_realpart (mode, target), op0);
8723	emit_move_insn (gen_imagpart (mode, target), op1);
8724
8725	insns = get_insns ();
8726	end_sequence ();
8727
8728	/* Complex construction should appear as a single unit.  */
8729	/* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8730	   each with a separate pseudo as destination.
8731	   It's not correct for flow to treat them as a unit.  */
8732	if (GET_CODE (target) != CONCAT)
8733	  emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8734	else
8735	  emit_insns (insns);
8736
8737	return target;
8738      }
8739
8740    case REALPART_EXPR:
8741      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8742      return gen_realpart (mode, op0);
8743
8744    case IMAGPART_EXPR:
8745      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8746      return gen_imagpart (mode, op0);
8747
8748    case CONJ_EXPR:
8749      {
8750	enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8751	rtx imag_t;
8752	rtx insns;
8753
8754	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8755
8756	if (! target)
8757	  target = gen_reg_rtx (mode);
8758
8759	start_sequence ();
8760
8761	/* Store the realpart and the negated imagpart to target.  */
8762	emit_move_insn (gen_realpart (partmode, target),
8763			gen_realpart (partmode, op0));
8764
8765	imag_t = gen_imagpart (partmode, target);
8766	temp = expand_unop (partmode,
8767                            ! unsignedp && flag_trapv
8768                            && (GET_MODE_CLASS(partmode) == MODE_INT)
8769                            ? negv_optab : neg_optab,
8770			    gen_imagpart (partmode, op0), imag_t, 0);
8771	if (temp != imag_t)
8772	  emit_move_insn (imag_t, temp);
8773
8774	insns = get_insns ();
8775	end_sequence ();
8776
8777	/* Conjugate should appear as a single unit
8778	   If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8779	   each with a separate pseudo as destination.
8780	   It's not correct for flow to treat them as a unit.  */
8781	if (GET_CODE (target) != CONCAT)
8782	  emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8783	else
8784	  emit_insns (insns);
8785
8786	return target;
8787      }
8788
8789    case TRY_CATCH_EXPR:
8790      {
8791	tree handler = TREE_OPERAND (exp, 1);
8792
8793	expand_eh_region_start ();
8794
8795	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8796
8797	expand_eh_region_end_cleanup (handler);
8798
8799	return op0;
8800      }
8801
8802    case TRY_FINALLY_EXPR:
8803      {
8804	tree try_block = TREE_OPERAND (exp, 0);
8805	tree finally_block = TREE_OPERAND (exp, 1);
8806	rtx finally_label = gen_label_rtx ();
8807	rtx done_label = gen_label_rtx ();
8808	rtx return_link = gen_reg_rtx (Pmode);
8809	tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8810			      (tree) finally_label, (tree) return_link);
8811	TREE_SIDE_EFFECTS (cleanup) = 1;
8812
8813	/* Start a new binding layer that will keep track of all cleanup
8814	   actions to be performed.  */
8815	expand_start_bindings (2);
8816
8817	target_temp_slot_level = temp_slot_level;
8818
8819	expand_decl_cleanup (NULL_TREE, cleanup);
8820	op0 = expand_expr (try_block, target, tmode, modifier);
8821
8822	preserve_temp_slots (op0);
8823	expand_end_bindings (NULL_TREE, 0, 0);
8824	emit_jump (done_label);
8825	emit_label (finally_label);
8826	expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8827	emit_indirect_jump (return_link);
8828	emit_label (done_label);
8829	return op0;
8830      }
8831
8832    case GOTO_SUBROUTINE_EXPR:
8833      {
8834	rtx subr = (rtx) TREE_OPERAND (exp, 0);
8835	rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8836	rtx return_address = gen_label_rtx ();
8837	emit_move_insn (return_link,
8838			gen_rtx_LABEL_REF (Pmode, return_address));
8839	emit_jump (subr);
8840	emit_label (return_address);
8841	return const0_rtx;
8842      }
8843
8844    case VA_ARG_EXPR:
8845      return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8846
8847    case EXC_PTR_EXPR:
8848      return get_exception_pointer (cfun);
8849
8850    case FDESC_EXPR:
8851      /* Function descriptors are not valid except for as
8852	 initialization constants, and should not be expanded.  */
8853      abort ();
8854
8855    default:
8856      return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8857    }
8858
8859  /* Here to do an ordinary binary operator, generating an instruction
8860     from the optab already placed in `this_optab'.  */
8861 binop:
8862  if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8863    subtarget = 0;
8864  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8865  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8866 binop2:
8867  temp = expand_binop (mode, this_optab, op0, op1, target,
8868		       unsignedp, OPTAB_LIB_WIDEN);
8869  if (temp == 0)
8870    abort ();
8871  return temp;
8872}
8873
8874/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8875   when applied to the address of EXP produces an address known to be
8876   aligned more than BIGGEST_ALIGNMENT.  */
8877
8878static int
8879is_aligning_offset (offset, exp)
8880     tree offset;
8881     tree exp;
8882{
8883  /* Strip off any conversions and WITH_RECORD_EXPR nodes.  */
8884  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8885	 || TREE_CODE (offset) == NOP_EXPR
8886	 || TREE_CODE (offset) == CONVERT_EXPR
8887	 || TREE_CODE (offset) == WITH_RECORD_EXPR)
8888    offset = TREE_OPERAND (offset, 0);
8889
8890  /* We must now have a BIT_AND_EXPR with a constant that is one less than
8891     power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
8892  if (TREE_CODE (offset) != BIT_AND_EXPR
8893      || !host_integerp (TREE_OPERAND (offset, 1), 1)
8894      || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
8895      || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8896    return 0;
8897
8898  /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8899     It must be NEGATE_EXPR.  Then strip any more conversions.  */
8900  offset = TREE_OPERAND (offset, 0);
8901  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8902	 || TREE_CODE (offset) == NOP_EXPR
8903	 || TREE_CODE (offset) == CONVERT_EXPR)
8904    offset = TREE_OPERAND (offset, 0);
8905
8906  if (TREE_CODE (offset) != NEGATE_EXPR)
8907    return 0;
8908
8909  offset = TREE_OPERAND (offset, 0);
8910  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8911	 || TREE_CODE (offset) == NOP_EXPR
8912	 || TREE_CODE (offset) == CONVERT_EXPR)
8913    offset = TREE_OPERAND (offset, 0);
8914
8915  /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
8916     whose type is the same as EXP.  */
8917  return (TREE_CODE (offset) == ADDR_EXPR
8918	  && (TREE_OPERAND (offset, 0) == exp
8919	      || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
8920		  && (TREE_TYPE (TREE_OPERAND (offset, 0))
8921		      == TREE_TYPE (exp)))));
8922}
8923
8924/* Return the tree node if a ARG corresponds to a string constant or zero
8925   if it doesn't.  If we return non-zero, set *PTR_OFFSET to the offset
8926   in bytes within the string that ARG is accessing.  The type of the
8927   offset will be `sizetype'.  */
8928
8929tree
8930string_constant (arg, ptr_offset)
8931     tree arg;
8932     tree *ptr_offset;
8933{
8934  STRIP_NOPS (arg);
8935
8936  if (TREE_CODE (arg) == ADDR_EXPR
8937      && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8938    {
8939      *ptr_offset = size_zero_node;
8940      return TREE_OPERAND (arg, 0);
8941    }
8942  else if (TREE_CODE (arg) == PLUS_EXPR)
8943    {
8944      tree arg0 = TREE_OPERAND (arg, 0);
8945      tree arg1 = TREE_OPERAND (arg, 1);
8946
8947      STRIP_NOPS (arg0);
8948      STRIP_NOPS (arg1);
8949
8950      if (TREE_CODE (arg0) == ADDR_EXPR
8951	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8952	{
8953	  *ptr_offset = convert (sizetype, arg1);
8954	  return TREE_OPERAND (arg0, 0);
8955	}
8956      else if (TREE_CODE (arg1) == ADDR_EXPR
8957	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8958	{
8959	  *ptr_offset = convert (sizetype, arg0);
8960	  return TREE_OPERAND (arg1, 0);
8961	}
8962    }
8963
8964  return 0;
8965}
8966
8967/* Expand code for a post- or pre- increment or decrement
8968   and return the RTX for the result.
8969   POST is 1 for postinc/decrements and 0 for preinc/decrements.  */
8970
8971static rtx
8972expand_increment (exp, post, ignore)
8973     tree exp;
8974     int post, ignore;
8975{
8976  rtx op0, op1;
8977  rtx temp, value;
8978  tree incremented = TREE_OPERAND (exp, 0);
8979  optab this_optab = add_optab;
8980  int icode;
8981  enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8982  int op0_is_copy = 0;
8983  int single_insn = 0;
8984  /* 1 means we can't store into OP0 directly,
8985     because it is a subreg narrower than a word,
8986     and we don't dare clobber the rest of the word.  */
8987  int bad_subreg = 0;
8988
8989  /* Stabilize any component ref that might need to be
8990     evaluated more than once below.  */
8991  if (!post
8992      || TREE_CODE (incremented) == BIT_FIELD_REF
8993      || (TREE_CODE (incremented) == COMPONENT_REF
8994	  && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8995	      || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8996    incremented = stabilize_reference (incremented);
8997  /* Nested *INCREMENT_EXPRs can happen in C++.  We must force innermost
8998     ones into save exprs so that they don't accidentally get evaluated
8999     more than once by the code below.  */
9000  if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9001      || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9002    incremented = save_expr (incremented);
9003
9004  /* Compute the operands as RTX.
9005     Note whether OP0 is the actual lvalue or a copy of it:
9006     I believe it is a copy iff it is a register or subreg
9007     and insns were generated in computing it.  */
9008
9009  temp = get_last_insn ();
9010  op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9011
9012  /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9013     in place but instead must do sign- or zero-extension during assignment,
9014     so we copy it into a new register and let the code below use it as
9015     a copy.
9016
9017     Note that we can safely modify this SUBREG since it is know not to be
9018     shared (it was made by the expand_expr call above).  */
9019
9020  if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9021    {
9022      if (post)
9023	SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9024      else
9025	bad_subreg = 1;
9026    }
9027  else if (GET_CODE (op0) == SUBREG
9028	   && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9029    {
9030      /* We cannot increment this SUBREG in place.  If we are
9031	 post-incrementing, get a copy of the old value.  Otherwise,
9032	 just mark that we cannot increment in place.  */
9033      if (post)
9034	op0 = copy_to_reg (op0);
9035      else
9036	bad_subreg = 1;
9037    }
9038
9039  op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9040		 && temp != get_last_insn ());
9041  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9042
9043  /* Decide whether incrementing or decrementing.  */
9044  if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9045      || TREE_CODE (exp) == PREDECREMENT_EXPR)
9046    this_optab = sub_optab;
9047
9048  /* Convert decrement by a constant into a negative increment.  */
9049  if (this_optab == sub_optab
9050      && GET_CODE (op1) == CONST_INT)
9051    {
9052      op1 = GEN_INT (-INTVAL (op1));
9053      this_optab = add_optab;
9054    }
9055
9056  if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9057    this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9058
9059  /* For a preincrement, see if we can do this with a single instruction.  */
9060  if (!post)
9061    {
9062      icode = (int) this_optab->handlers[(int) mode].insn_code;
9063      if (icode != (int) CODE_FOR_nothing
9064	  /* Make sure that OP0 is valid for operands 0 and 1
9065	     of the insn we want to queue.  */
9066	  && (*insn_data[icode].operand[0].predicate) (op0, mode)
9067	  && (*insn_data[icode].operand[1].predicate) (op0, mode)
9068	  && (*insn_data[icode].operand[2].predicate) (op1, mode))
9069	single_insn = 1;
9070    }
9071
9072  /* If OP0 is not the actual lvalue, but rather a copy in a register,
9073     then we cannot just increment OP0.  We must therefore contrive to
9074     increment the original value.  Then, for postincrement, we can return
9075     OP0 since it is a copy of the old value.  For preincrement, expand here
9076     unless we can do it with a single insn.
9077
9078     Likewise if storing directly into OP0 would clobber high bits
9079     we need to preserve (bad_subreg).  */
9080  if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9081    {
9082      /* This is the easiest way to increment the value wherever it is.
9083	 Problems with multiple evaluation of INCREMENTED are prevented
9084	 because either (1) it is a component_ref or preincrement,
9085	 in which case it was stabilized above, or (2) it is an array_ref
9086	 with constant index in an array in a register, which is
9087	 safe to reevaluate.  */
9088      tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9089			     || TREE_CODE (exp) == PREDECREMENT_EXPR)
9090			    ? MINUS_EXPR : PLUS_EXPR),
9091			   TREE_TYPE (exp),
9092			   incremented,
9093			   TREE_OPERAND (exp, 1));
9094
9095      while (TREE_CODE (incremented) == NOP_EXPR
9096	     || TREE_CODE (incremented) == CONVERT_EXPR)
9097	{
9098	  newexp = convert (TREE_TYPE (incremented), newexp);
9099	  incremented = TREE_OPERAND (incremented, 0);
9100	}
9101
9102      temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9103      return post ? op0 : temp;
9104    }
9105
9106  if (post)
9107    {
9108      /* We have a true reference to the value in OP0.
9109	 If there is an insn to add or subtract in this mode, queue it.
9110	 Queueing the increment insn avoids the register shuffling
9111	 that often results if we must increment now and first save
9112	 the old value for subsequent use.  */
9113
9114#if 0  /* Turned off to avoid making extra insn for indexed memref.  */
9115      op0 = stabilize (op0);
9116#endif
9117
9118      icode = (int) this_optab->handlers[(int) mode].insn_code;
9119      if (icode != (int) CODE_FOR_nothing
9120	  /* Make sure that OP0 is valid for operands 0 and 1
9121	     of the insn we want to queue.  */
9122	  && (*insn_data[icode].operand[0].predicate) (op0, mode)
9123	  && (*insn_data[icode].operand[1].predicate) (op0, mode))
9124	{
9125	  if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9126	    op1 = force_reg (mode, op1);
9127
9128	  return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9129	}
9130      if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9131	{
9132	  rtx addr = (general_operand (XEXP (op0, 0), mode)
9133		      ? force_reg (Pmode, XEXP (op0, 0))
9134		      : copy_to_reg (XEXP (op0, 0)));
9135	  rtx temp, result;
9136
9137	  op0 = replace_equiv_address (op0, addr);
9138	  temp = force_reg (GET_MODE (op0), op0);
9139	  if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9140	    op1 = force_reg (mode, op1);
9141
9142	  /* The increment queue is LIFO, thus we have to `queue'
9143	     the instructions in reverse order.  */
9144	  enqueue_insn (op0, gen_move_insn (op0, temp));
9145	  result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9146	  return result;
9147	}
9148    }
9149
9150  /* Preincrement, or we can't increment with one simple insn.  */
9151  if (post)
9152    /* Save a copy of the value before inc or dec, to return it later.  */
9153    temp = value = copy_to_reg (op0);
9154  else
9155    /* Arrange to return the incremented value.  */
9156    /* Copy the rtx because expand_binop will protect from the queue,
9157       and the results of that would be invalid for us to return
9158       if our caller does emit_queue before using our result.  */
9159    temp = copy_rtx (value = op0);
9160
9161  /* Increment however we can.  */
9162  op1 = expand_binop (mode, this_optab, value, op1, op0,
9163		      TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9164
9165  /* Make sure the value is stored into OP0.  */
9166  if (op1 != op0)
9167    emit_move_insn (op0, op1);
9168
9169  return temp;
9170}
9171
9172/* At the start of a function, record that we have no previously-pushed
9173   arguments waiting to be popped.  */
9174
9175void
9176init_pending_stack_adjust ()
9177{
9178  pending_stack_adjust = 0;
9179}
9180
9181/* When exiting from function, if safe, clear out any pending stack adjust
9182   so the adjustment won't get done.
9183
9184   Note, if the current function calls alloca, then it must have a
9185   frame pointer regardless of the value of flag_omit_frame_pointer.  */
9186
9187void
9188clear_pending_stack_adjust ()
9189{
9190#ifdef EXIT_IGNORE_STACK
9191  if (optimize > 0
9192      && (! flag_omit_frame_pointer || current_function_calls_alloca)
9193      && EXIT_IGNORE_STACK
9194      && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9195      && ! flag_inline_functions)
9196    {
9197      stack_pointer_delta -= pending_stack_adjust,
9198      pending_stack_adjust = 0;
9199    }
9200#endif
9201}
9202
9203/* Pop any previously-pushed arguments that have not been popped yet.  */
9204
9205void
9206do_pending_stack_adjust ()
9207{
9208  if (inhibit_defer_pop == 0)
9209    {
9210      if (pending_stack_adjust != 0)
9211	adjust_stack (GEN_INT (pending_stack_adjust));
9212      pending_stack_adjust = 0;
9213    }
9214}
9215
9216/* Expand conditional expressions.  */
9217
9218/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9219   LABEL is an rtx of code CODE_LABEL, in this function and all the
9220   functions here.  */
9221
9222void
9223jumpifnot (exp, label)
9224     tree exp;
9225     rtx label;
9226{
9227  do_jump (exp, label, NULL_RTX);
9228}
9229
9230/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero.  */
9231
9232void
9233jumpif (exp, label)
9234     tree exp;
9235     rtx label;
9236{
9237  do_jump (exp, NULL_RTX, label);
9238}
9239
9240/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9241   the result is zero, or IF_TRUE_LABEL if the result is one.
9242   Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9243   meaning fall through in that case.
9244
9245   do_jump always does any pending stack adjust except when it does not
9246   actually perform a jump.  An example where there is no jump
9247   is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9248
9249   This function is responsible for optimizing cases such as
9250   &&, || and comparison operators in EXP.  */
9251
9252void
9253do_jump (exp, if_false_label, if_true_label)
9254     tree exp;
9255     rtx if_false_label, if_true_label;
9256{
9257  enum tree_code code = TREE_CODE (exp);
9258  /* Some cases need to create a label to jump to
9259     in order to properly fall through.
9260     These cases set DROP_THROUGH_LABEL nonzero.  */
9261  rtx drop_through_label = 0;
9262  rtx temp;
9263  int i;
9264  tree type;
9265  enum machine_mode mode;
9266
9267#ifdef MAX_INTEGER_COMPUTATION_MODE
9268  check_max_integer_computation_mode (exp);
9269#endif
9270
9271  emit_queue ();
9272
9273  switch (code)
9274    {
9275    case ERROR_MARK:
9276      break;
9277
9278    case INTEGER_CST:
9279      temp = integer_zerop (exp) ? if_false_label : if_true_label;
9280      if (temp)
9281	emit_jump (temp);
9282      break;
9283
9284#if 0
9285      /* This is not true with #pragma weak  */
9286    case ADDR_EXPR:
9287      /* The address of something can never be zero.  */
9288      if (if_true_label)
9289	emit_jump (if_true_label);
9290      break;
9291#endif
9292
9293    case NOP_EXPR:
9294      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9295	  || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9296	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9297	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9298	goto normal;
9299    case CONVERT_EXPR:
9300      /* If we are narrowing the operand, we have to do the compare in the
9301	 narrower mode.  */
9302      if ((TYPE_PRECISION (TREE_TYPE (exp))
9303	   < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9304	goto normal;
9305    case NON_LVALUE_EXPR:
9306    case REFERENCE_EXPR:
9307    case ABS_EXPR:
9308    case NEGATE_EXPR:
9309    case LROTATE_EXPR:
9310    case RROTATE_EXPR:
9311      /* These cannot change zero->non-zero or vice versa.  */
9312      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9313      break;
9314
9315    case WITH_RECORD_EXPR:
9316      /* Put the object on the placeholder list, recurse through our first
9317	 operand, and pop the list.  */
9318      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9319				    placeholder_list);
9320      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9321      placeholder_list = TREE_CHAIN (placeholder_list);
9322      break;
9323
9324#if 0
9325      /* This is never less insns than evaluating the PLUS_EXPR followed by
9326	 a test and can be longer if the test is eliminated.  */
9327    case PLUS_EXPR:
9328      /* Reduce to minus.  */
9329      exp = build (MINUS_EXPR, TREE_TYPE (exp),
9330		   TREE_OPERAND (exp, 0),
9331		   fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9332				 TREE_OPERAND (exp, 1))));
9333      /* Process as MINUS.  */
9334#endif
9335
9336    case MINUS_EXPR:
9337      /* Non-zero iff operands of minus differ.  */
9338      do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9339				  TREE_OPERAND (exp, 0),
9340				  TREE_OPERAND (exp, 1)),
9341			   NE, NE, if_false_label, if_true_label);
9342      break;
9343
9344    case BIT_AND_EXPR:
9345      /* If we are AND'ing with a small constant, do this comparison in the
9346	 smallest type that fits.  If the machine doesn't have comparisons
9347	 that small, it will be converted back to the wider comparison.
9348	 This helps if we are testing the sign bit of a narrower object.
9349	 combine can't do this for us because it can't know whether a
9350	 ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */
9351
9352      if (! SLOW_BYTE_ACCESS
9353	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9354	  && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9355	  && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9356	  && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9357	  && (type = type_for_mode (mode, 1)) != 0
9358	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9359	  && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9360	      != CODE_FOR_nothing))
9361	{
9362	  do_jump (convert (type, exp), if_false_label, if_true_label);
9363	  break;
9364	}
9365      goto normal;
9366
9367    case TRUTH_NOT_EXPR:
9368      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9369      break;
9370
9371    case TRUTH_ANDIF_EXPR:
9372      if (if_false_label == 0)
9373	if_false_label = drop_through_label = gen_label_rtx ();
9374      do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9375      start_cleanup_deferral ();
9376      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9377      end_cleanup_deferral ();
9378      break;
9379
9380    case TRUTH_ORIF_EXPR:
9381      if (if_true_label == 0)
9382	if_true_label = drop_through_label = gen_label_rtx ();
9383      do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9384      start_cleanup_deferral ();
9385      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9386      end_cleanup_deferral ();
9387      break;
9388
9389    case COMPOUND_EXPR:
9390      push_temp_slots ();
9391      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9392      preserve_temp_slots (NULL_RTX);
9393      free_temp_slots ();
9394      pop_temp_slots ();
9395      emit_queue ();
9396      do_pending_stack_adjust ();
9397      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9398      break;
9399
9400    case COMPONENT_REF:
9401    case BIT_FIELD_REF:
9402    case ARRAY_REF:
9403    case ARRAY_RANGE_REF:
9404      {
9405	HOST_WIDE_INT bitsize, bitpos;
9406	int unsignedp;
9407	enum machine_mode mode;
9408	tree type;
9409	tree offset;
9410	int volatilep = 0;
9411
9412	/* Get description of this reference.  We don't actually care
9413	   about the underlying object here.  */
9414	get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9415			     &unsignedp, &volatilep);
9416
9417	type = type_for_size (bitsize, unsignedp);
9418	if (! SLOW_BYTE_ACCESS
9419	    && type != 0 && bitsize >= 0
9420	    && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9421	    && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9422		!= CODE_FOR_nothing))
9423	  {
9424	    do_jump (convert (type, exp), if_false_label, if_true_label);
9425	    break;
9426	  }
9427	goto normal;
9428      }
9429
9430    case COND_EXPR:
9431      /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases.  */
9432      if (integer_onep (TREE_OPERAND (exp, 1))
9433	  && integer_zerop (TREE_OPERAND (exp, 2)))
9434	do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9435
9436      else if (integer_zerop (TREE_OPERAND (exp, 1))
9437	       && integer_onep (TREE_OPERAND (exp, 2)))
9438	do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9439
9440      else
9441	{
9442	  rtx label1 = gen_label_rtx ();
9443	  drop_through_label = gen_label_rtx ();
9444
9445	  do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9446
9447	  start_cleanup_deferral ();
9448	  /* Now the THEN-expression.  */
9449	  do_jump (TREE_OPERAND (exp, 1),
9450		   if_false_label ? if_false_label : drop_through_label,
9451		   if_true_label ? if_true_label : drop_through_label);
9452	  /* In case the do_jump just above never jumps.  */
9453	  do_pending_stack_adjust ();
9454	  emit_label (label1);
9455
9456	  /* Now the ELSE-expression.  */
9457	  do_jump (TREE_OPERAND (exp, 2),
9458		   if_false_label ? if_false_label : drop_through_label,
9459		   if_true_label ? if_true_label : drop_through_label);
9460	  end_cleanup_deferral ();
9461	}
9462      break;
9463
9464    case EQ_EXPR:
9465      {
9466	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9467
9468	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9469	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9470	  {
9471	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9472	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9473	    do_jump
9474	      (fold
9475	       (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9476		       fold (build (EQ_EXPR, TREE_TYPE (exp),
9477				    fold (build1 (REALPART_EXPR,
9478						  TREE_TYPE (inner_type),
9479						  exp0)),
9480				    fold (build1 (REALPART_EXPR,
9481						  TREE_TYPE (inner_type),
9482						  exp1)))),
9483		       fold (build (EQ_EXPR, TREE_TYPE (exp),
9484				    fold (build1 (IMAGPART_EXPR,
9485						  TREE_TYPE (inner_type),
9486						  exp0)),
9487				    fold (build1 (IMAGPART_EXPR,
9488						  TREE_TYPE (inner_type),
9489						  exp1)))))),
9490	       if_false_label, if_true_label);
9491	  }
9492
9493	else if (integer_zerop (TREE_OPERAND (exp, 1)))
9494	  do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9495
9496	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9497		 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9498	  do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9499	else
9500	  do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9501	break;
9502      }
9503
9504    case NE_EXPR:
9505      {
9506	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9507
9508	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9509	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9510	  {
9511	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9512	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9513	    do_jump
9514	      (fold
9515	       (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9516		       fold (build (NE_EXPR, TREE_TYPE (exp),
9517				    fold (build1 (REALPART_EXPR,
9518						  TREE_TYPE (inner_type),
9519						  exp0)),
9520				    fold (build1 (REALPART_EXPR,
9521						  TREE_TYPE (inner_type),
9522						  exp1)))),
9523		       fold (build (NE_EXPR, TREE_TYPE (exp),
9524				    fold (build1 (IMAGPART_EXPR,
9525						  TREE_TYPE (inner_type),
9526						  exp0)),
9527				    fold (build1 (IMAGPART_EXPR,
9528						  TREE_TYPE (inner_type),
9529						  exp1)))))),
9530	       if_false_label, if_true_label);
9531	  }
9532
9533	else if (integer_zerop (TREE_OPERAND (exp, 1)))
9534	  do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9535
9536	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9537		 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9538	  do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9539	else
9540	  do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9541	break;
9542      }
9543
9544    case LT_EXPR:
9545      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9546      if (GET_MODE_CLASS (mode) == MODE_INT
9547	  && ! can_compare_p (LT, mode, ccp_jump))
9548	do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9549      else
9550	do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9551      break;
9552
9553    case LE_EXPR:
9554      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9555      if (GET_MODE_CLASS (mode) == MODE_INT
9556	  && ! can_compare_p (LE, mode, ccp_jump))
9557	do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9558      else
9559	do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9560      break;
9561
9562    case GT_EXPR:
9563      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9564      if (GET_MODE_CLASS (mode) == MODE_INT
9565	  && ! can_compare_p (GT, mode, ccp_jump))
9566	do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9567      else
9568	do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9569      break;
9570
9571    case GE_EXPR:
9572      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9573      if (GET_MODE_CLASS (mode) == MODE_INT
9574	  && ! can_compare_p (GE, mode, ccp_jump))
9575	do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9576      else
9577	do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9578      break;
9579
9580    case UNORDERED_EXPR:
9581    case ORDERED_EXPR:
9582      {
9583	enum rtx_code cmp, rcmp;
9584	int do_rev;
9585
9586	if (code == UNORDERED_EXPR)
9587	  cmp = UNORDERED, rcmp = ORDERED;
9588	else
9589	  cmp = ORDERED, rcmp = UNORDERED;
9590	mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9591
9592	do_rev = 0;
9593	if (! can_compare_p (cmp, mode, ccp_jump)
9594	    && (can_compare_p (rcmp, mode, ccp_jump)
9595		/* If the target doesn't provide either UNORDERED or ORDERED
9596		   comparisons, canonicalize on UNORDERED for the library.  */
9597		|| rcmp == UNORDERED))
9598	  do_rev = 1;
9599
9600        if (! do_rev)
9601	  do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9602	else
9603	  do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9604      }
9605      break;
9606
9607    {
9608      enum rtx_code rcode1;
9609      enum tree_code tcode2;
9610
9611      case UNLT_EXPR:
9612	rcode1 = UNLT;
9613	tcode2 = LT_EXPR;
9614	goto unordered_bcc;
9615      case UNLE_EXPR:
9616	rcode1 = UNLE;
9617	tcode2 = LE_EXPR;
9618	goto unordered_bcc;
9619      case UNGT_EXPR:
9620	rcode1 = UNGT;
9621	tcode2 = GT_EXPR;
9622	goto unordered_bcc;
9623      case UNGE_EXPR:
9624	rcode1 = UNGE;
9625	tcode2 = GE_EXPR;
9626	goto unordered_bcc;
9627      case UNEQ_EXPR:
9628	rcode1 = UNEQ;
9629	tcode2 = EQ_EXPR;
9630	goto unordered_bcc;
9631
9632      unordered_bcc:
9633        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9634	if (can_compare_p (rcode1, mode, ccp_jump))
9635	  do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9636			       if_true_label);
9637	else
9638	  {
9639	    tree op0 = save_expr (TREE_OPERAND (exp, 0));
9640	    tree op1 = save_expr (TREE_OPERAND (exp, 1));
9641	    tree cmp0, cmp1;
9642
9643	    /* If the target doesn't support combined unordered
9644	       compares, decompose into UNORDERED + comparison.  */
9645	    cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9646	    cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9647	    exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9648	    do_jump (exp, if_false_label, if_true_label);
9649	  }
9650      }
9651      break;
9652
9653      /* Special case:
9654		__builtin_expect (<test>, 0)	and
9655		__builtin_expect (<test>, 1)
9656
9657	 We need to do this here, so that <test> is not converted to a SCC
9658	 operation on machines that use condition code registers and COMPARE
9659	 like the PowerPC, and then the jump is done based on whether the SCC
9660	 operation produced a 1 or 0.  */
9661    case CALL_EXPR:
9662      /* Check for a built-in function.  */
9663      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9664	{
9665	  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9666	  tree arglist = TREE_OPERAND (exp, 1);
9667
9668	  if (TREE_CODE (fndecl) == FUNCTION_DECL
9669	      && DECL_BUILT_IN (fndecl)
9670	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9671	      && arglist != NULL_TREE
9672	      && TREE_CHAIN (arglist) != NULL_TREE)
9673	    {
9674	      rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9675						    if_true_label);
9676
9677	      if (seq != NULL_RTX)
9678		{
9679		  emit_insn (seq);
9680		  return;
9681		}
9682	    }
9683	}
9684      /* fall through and generate the normal code.  */
9685
9686    default:
9687    normal:
9688      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9689#if 0
9690      /* This is not needed any more and causes poor code since it causes
9691	 comparisons and tests from non-SI objects to have different code
9692	 sequences.  */
9693      /* Copy to register to avoid generating bad insns by cse
9694	 from (set (mem ...) (arithop))  (set (cc0) (mem ...)).  */
9695      if (!cse_not_expected && GET_CODE (temp) == MEM)
9696	temp = copy_to_reg (temp);
9697#endif
9698      do_pending_stack_adjust ();
9699      /* Do any postincrements in the expression that was tested.  */
9700      emit_queue ();
9701
9702      if (GET_CODE (temp) == CONST_INT
9703	  || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9704	  || GET_CODE (temp) == LABEL_REF)
9705	{
9706	  rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9707	  if (target)
9708	    emit_jump (target);
9709	}
9710      else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9711	       && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9712	/* Note swapping the labels gives us not-equal.  */
9713	do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9714      else if (GET_MODE (temp) != VOIDmode)
9715	do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9716				 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9717				 GET_MODE (temp), NULL_RTX,
9718				 if_false_label, if_true_label);
9719      else
9720	abort ();
9721    }
9722
9723  if (drop_through_label)
9724    {
9725      /* If do_jump produces code that might be jumped around,
9726	 do any stack adjusts from that code, before the place
9727	 where control merges in.  */
9728      do_pending_stack_adjust ();
9729      emit_label (drop_through_label);
9730    }
9731}
9732
9733/* Given a comparison expression EXP for values too wide to be compared
9734   with one insn, test the comparison and jump to the appropriate label.
9735   The code of EXP is ignored; we always test GT if SWAP is 0,
9736   and LT if SWAP is 1.  */
9737
9738static void
9739do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9740     tree exp;
9741     int swap;
9742     rtx if_false_label, if_true_label;
9743{
9744  rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9745  rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9746  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9747  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9748
9749  do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9750}
9751
9752/* Compare OP0 with OP1, word at a time, in mode MODE.
9753   UNSIGNEDP says to do unsigned comparison.
9754   Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise.  */
9755
9756void
9757do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9758     enum machine_mode mode;
9759     int unsignedp;
9760     rtx op0, op1;
9761     rtx if_false_label, if_true_label;
9762{
9763  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9764  rtx drop_through_label = 0;
9765  int i;
9766
9767  if (! if_true_label || ! if_false_label)
9768    drop_through_label = gen_label_rtx ();
9769  if (! if_true_label)
9770    if_true_label = drop_through_label;
9771  if (! if_false_label)
9772    if_false_label = drop_through_label;
9773
9774  /* Compare a word at a time, high order first.  */
9775  for (i = 0; i < nwords; i++)
9776    {
9777      rtx op0_word, op1_word;
9778
9779      if (WORDS_BIG_ENDIAN)
9780	{
9781	  op0_word = operand_subword_force (op0, i, mode);
9782	  op1_word = operand_subword_force (op1, i, mode);
9783	}
9784      else
9785	{
9786	  op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9787	  op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9788	}
9789
9790      /* All but high-order word must be compared as unsigned.  */
9791      do_compare_rtx_and_jump (op0_word, op1_word, GT,
9792			       (unsignedp || i > 0), word_mode, NULL_RTX,
9793			       NULL_RTX, if_true_label);
9794
9795      /* Consider lower words only if these are equal.  */
9796      do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9797			       NULL_RTX, NULL_RTX, if_false_label);
9798    }
9799
9800  if (if_false_label)
9801    emit_jump (if_false_label);
9802  if (drop_through_label)
9803    emit_label (drop_through_label);
9804}
9805
9806/* Given an EQ_EXPR expression EXP for values too wide to be compared
9807   with one insn, test the comparison and jump to the appropriate label.  */
9808
9809static void
9810do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9811     tree exp;
9812     rtx if_false_label, if_true_label;
9813{
9814  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9815  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9816  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9817  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9818  int i;
9819  rtx drop_through_label = 0;
9820
9821  if (! if_false_label)
9822    drop_through_label = if_false_label = gen_label_rtx ();
9823
9824  for (i = 0; i < nwords; i++)
9825    do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9826			     operand_subword_force (op1, i, mode),
9827			     EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9828			     word_mode, NULL_RTX, if_false_label, NULL_RTX);
9829
9830  if (if_true_label)
9831    emit_jump (if_true_label);
9832  if (drop_through_label)
9833    emit_label (drop_through_label);
9834}
9835
9836/* Jump according to whether OP0 is 0.
9837   We assume that OP0 has an integer mode that is too wide
9838   for the available compare insns.  */
9839
9840void
9841do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9842     rtx op0;
9843     rtx if_false_label, if_true_label;
9844{
9845  int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9846  rtx part;
9847  int i;
9848  rtx drop_through_label = 0;
9849
9850  /* The fastest way of doing this comparison on almost any machine is to
9851     "or" all the words and compare the result.  If all have to be loaded
9852     from memory and this is a very wide item, it's possible this may
9853     be slower, but that's highly unlikely.  */
9854
9855  part = gen_reg_rtx (word_mode);
9856  emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9857  for (i = 1; i < nwords && part != 0; i++)
9858    part = expand_binop (word_mode, ior_optab, part,
9859			 operand_subword_force (op0, i, GET_MODE (op0)),
9860			 part, 1, OPTAB_WIDEN);
9861
9862  if (part != 0)
9863    {
9864      do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9865			       NULL_RTX, if_false_label, if_true_label);
9866
9867      return;
9868    }
9869
9870  /* If we couldn't do the "or" simply, do this with a series of compares.  */
9871  if (! if_false_label)
9872    drop_through_label = if_false_label = gen_label_rtx ();
9873
9874  for (i = 0; i < nwords; i++)
9875    do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9876			     const0_rtx, EQ, 1, word_mode, NULL_RTX,
9877			     if_false_label, NULL_RTX);
9878
9879  if (if_true_label)
9880    emit_jump (if_true_label);
9881
9882  if (drop_through_label)
9883    emit_label (drop_through_label);
9884}
9885
9886/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9887   (including code to compute the values to be compared)
9888   and set (CC0) according to the result.
9889   The decision as to signed or unsigned comparison must be made by the caller.
9890
9891   We force a stack adjustment unless there are currently
9892   things pushed on the stack that aren't yet used.
9893
9894   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9895   compared.  */
9896
9897rtx
9898compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9899     rtx op0, op1;
9900     enum rtx_code code;
9901     int unsignedp;
9902     enum machine_mode mode;
9903     rtx size;
9904{
9905  rtx tem;
9906
9907  /* If one operand is constant, make it the second one.  Only do this
9908     if the other operand is not constant as well.  */
9909
9910  if (swap_commutative_operands_p (op0, op1))
9911    {
9912      tem = op0;
9913      op0 = op1;
9914      op1 = tem;
9915      code = swap_condition (code);
9916    }
9917
9918  if (flag_force_mem)
9919    {
9920      op0 = force_not_mem (op0);
9921      op1 = force_not_mem (op1);
9922    }
9923
9924  do_pending_stack_adjust ();
9925
9926  if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9927      && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9928    return tem;
9929
9930#if 0
9931  /* There's no need to do this now that combine.c can eliminate lots of
9932     sign extensions.  This can be less efficient in certain cases on other
9933     machines.  */
9934
9935  /* If this is a signed equality comparison, we can do it as an
9936     unsigned comparison since zero-extension is cheaper than sign
9937     extension and comparisons with zero are done as unsigned.  This is
9938     the case even on machines that can do fast sign extension, since
9939     zero-extension is easier to combine with other operations than
9940     sign-extension is.  If we are comparing against a constant, we must
9941     convert it to what it would look like unsigned.  */
9942  if ((code == EQ || code == NE) && ! unsignedp
9943      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9944    {
9945      if (GET_CODE (op1) == CONST_INT
9946	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9947	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9948      unsignedp = 1;
9949    }
9950#endif
9951
9952  emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9953
9954  return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9955}
9956
9957/* Like do_compare_and_jump but expects the values to compare as two rtx's.
9958   The decision as to signed or unsigned comparison must be made by the caller.
9959
9960   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9961   compared.  */
9962
9963void
9964do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9965			 if_false_label, if_true_label)
9966     rtx op0, op1;
9967     enum rtx_code code;
9968     int unsignedp;
9969     enum machine_mode mode;
9970     rtx size;
9971     rtx if_false_label, if_true_label;
9972{
9973  rtx tem;
9974  int dummy_true_label = 0;
9975
9976  /* Reverse the comparison if that is safe and we want to jump if it is
9977     false.  */
9978  if (! if_true_label && ! FLOAT_MODE_P (mode))
9979    {
9980      if_true_label = if_false_label;
9981      if_false_label = 0;
9982      code = reverse_condition (code);
9983    }
9984
9985  /* If one operand is constant, make it the second one.  Only do this
9986     if the other operand is not constant as well.  */
9987
9988  if (swap_commutative_operands_p (op0, op1))
9989    {
9990      tem = op0;
9991      op0 = op1;
9992      op1 = tem;
9993      code = swap_condition (code);
9994    }
9995
9996  if (flag_force_mem)
9997    {
9998      op0 = force_not_mem (op0);
9999      op1 = force_not_mem (op1);
10000    }
10001
10002  do_pending_stack_adjust ();
10003
10004  if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10005      && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10006    {
10007      if (tem == const_true_rtx)
10008	{
10009	  if (if_true_label)
10010	    emit_jump (if_true_label);
10011	}
10012      else
10013	{
10014	  if (if_false_label)
10015	    emit_jump (if_false_label);
10016	}
10017      return;
10018    }
10019
10020#if 0
10021  /* There's no need to do this now that combine.c can eliminate lots of
10022     sign extensions.  This can be less efficient in certain cases on other
10023     machines.  */
10024
10025  /* If this is a signed equality comparison, we can do it as an
10026     unsigned comparison since zero-extension is cheaper than sign
10027     extension and comparisons with zero are done as unsigned.  This is
10028     the case even on machines that can do fast sign extension, since
10029     zero-extension is easier to combine with other operations than
10030     sign-extension is.  If we are comparing against a constant, we must
10031     convert it to what it would look like unsigned.  */
10032  if ((code == EQ || code == NE) && ! unsignedp
10033      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10034    {
10035      if (GET_CODE (op1) == CONST_INT
10036	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10037	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10038      unsignedp = 1;
10039    }
10040#endif
10041
10042  if (! if_true_label)
10043    {
10044      dummy_true_label = 1;
10045      if_true_label = gen_label_rtx ();
10046    }
10047
10048  emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10049			   if_true_label);
10050
10051  if (if_false_label)
10052    emit_jump (if_false_label);
10053  if (dummy_true_label)
10054    emit_label (if_true_label);
10055}
10056
10057/* Generate code for a comparison expression EXP (including code to compute
10058   the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10059   IF_TRUE_LABEL.  One of the labels can be NULL_RTX, in which case the
10060   generated code will drop through.
10061   SIGNED_CODE should be the rtx operation for this comparison for
10062   signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10063
10064   We force a stack adjustment unless there are currently
10065   things pushed on the stack that aren't yet used.  */
10066
10067static void
10068do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10069		     if_true_label)
10070     tree exp;
10071     enum rtx_code signed_code, unsigned_code;
10072     rtx if_false_label, if_true_label;
10073{
10074  rtx op0, op1;
10075  tree type;
10076  enum machine_mode mode;
10077  int unsignedp;
10078  enum rtx_code code;
10079
10080  /* Don't crash if the comparison was erroneous.  */
10081  op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10082  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10083    return;
10084
10085  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10086  if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10087    return;
10088
10089  type = TREE_TYPE (TREE_OPERAND (exp, 0));
10090  mode = TYPE_MODE (type);
10091  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10092      && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10093	  || (GET_MODE_BITSIZE (mode)
10094	      > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10095								      1)))))))
10096    {
10097      /* op0 might have been replaced by promoted constant, in which
10098	 case the type of second argument should be used.  */
10099      type = TREE_TYPE (TREE_OPERAND (exp, 1));
10100      mode = TYPE_MODE (type);
10101    }
10102  unsignedp = TREE_UNSIGNED (type);
10103  code = unsignedp ? unsigned_code : signed_code;
10104
10105#ifdef HAVE_canonicalize_funcptr_for_compare
10106  /* If function pointers need to be "canonicalized" before they can
10107     be reliably compared, then canonicalize them.  */
10108  if (HAVE_canonicalize_funcptr_for_compare
10109      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10110      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10111	  == FUNCTION_TYPE))
10112    {
10113      rtx new_op0 = gen_reg_rtx (mode);
10114
10115      emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10116      op0 = new_op0;
10117    }
10118
10119  if (HAVE_canonicalize_funcptr_for_compare
10120      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10121      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10122	  == FUNCTION_TYPE))
10123    {
10124      rtx new_op1 = gen_reg_rtx (mode);
10125
10126      emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10127      op1 = new_op1;
10128    }
10129#endif
10130
10131  /* Do any postincrements in the expression that was tested.  */
10132  emit_queue ();
10133
10134  do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10135			   ((mode == BLKmode)
10136			    ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10137			   if_false_label, if_true_label);
10138}
10139
10140/* Generate code to calculate EXP using a store-flag instruction
10141   and return an rtx for the result.  EXP is either a comparison
10142   or a TRUTH_NOT_EXPR whose operand is a comparison.
10143
10144   If TARGET is nonzero, store the result there if convenient.
10145
10146   If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10147   cheap.
10148
10149   Return zero if there is no suitable set-flag instruction
10150   available on this machine.
10151
10152   Once expand_expr has been called on the arguments of the comparison,
10153   we are committed to doing the store flag, since it is not safe to
10154   re-evaluate the expression.  We emit the store-flag insn by calling
10155   emit_store_flag, but only expand the arguments if we have a reason
10156   to believe that emit_store_flag will be successful.  If we think that
10157   it will, but it isn't, we have to simulate the store-flag with a
10158   set/jump/set sequence.  */
10159
10160static rtx
10161do_store_flag (exp, target, mode, only_cheap)
10162     tree exp;
10163     rtx target;
10164     enum machine_mode mode;
10165     int only_cheap;
10166{
10167  enum rtx_code code;
10168  tree arg0, arg1, type;
10169  tree tem;
10170  enum machine_mode operand_mode;
10171  int invert = 0;
10172  int unsignedp;
10173  rtx op0, op1;
10174  enum insn_code icode;
10175  rtx subtarget = target;
10176  rtx result, label;
10177
10178  /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10179     result at the end.  We can't simply invert the test since it would
10180     have already been inverted if it were valid.  This case occurs for
10181     some floating-point comparisons.  */
10182
10183  if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10184    invert = 1, exp = TREE_OPERAND (exp, 0);
10185
10186  arg0 = TREE_OPERAND (exp, 0);
10187  arg1 = TREE_OPERAND (exp, 1);
10188
10189  /* Don't crash if the comparison was erroneous.  */
10190  if (arg0 == error_mark_node || arg1 == error_mark_node)
10191    return const0_rtx;
10192
10193  type = TREE_TYPE (arg0);
10194  operand_mode = TYPE_MODE (type);
10195  unsignedp = TREE_UNSIGNED (type);
10196
10197  /* We won't bother with BLKmode store-flag operations because it would mean
10198     passing a lot of information to emit_store_flag.  */
10199  if (operand_mode == BLKmode)
10200    return 0;
10201
10202  /* We won't bother with store-flag operations involving function pointers
10203     when function pointers must be canonicalized before comparisons.  */
10204#ifdef HAVE_canonicalize_funcptr_for_compare
10205  if (HAVE_canonicalize_funcptr_for_compare
10206      && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10207	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10208	       == FUNCTION_TYPE))
10209	  || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10210	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10211		  == FUNCTION_TYPE))))
10212    return 0;
10213#endif
10214
10215  STRIP_NOPS (arg0);
10216  STRIP_NOPS (arg1);
10217
10218  /* Get the rtx comparison code to use.  We know that EXP is a comparison
10219     operation of some type.  Some comparisons against 1 and -1 can be
10220     converted to comparisons with zero.  Do so here so that the tests
10221     below will be aware that we have a comparison with zero.   These
10222     tests will not catch constants in the first operand, but constants
10223     are rarely passed as the first operand.  */
10224
10225  switch (TREE_CODE (exp))
10226    {
10227    case EQ_EXPR:
10228      code = EQ;
10229      break;
10230    case NE_EXPR:
10231      code = NE;
10232      break;
10233    case LT_EXPR:
10234      if (integer_onep (arg1))
10235	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10236      else
10237	code = unsignedp ? LTU : LT;
10238      break;
10239    case LE_EXPR:
10240      if (! unsignedp && integer_all_onesp (arg1))
10241	arg1 = integer_zero_node, code = LT;
10242      else
10243	code = unsignedp ? LEU : LE;
10244      break;
10245    case GT_EXPR:
10246      if (! unsignedp && integer_all_onesp (arg1))
10247	arg1 = integer_zero_node, code = GE;
10248      else
10249	code = unsignedp ? GTU : GT;
10250      break;
10251    case GE_EXPR:
10252      if (integer_onep (arg1))
10253	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10254      else
10255	code = unsignedp ? GEU : GE;
10256      break;
10257
10258    case UNORDERED_EXPR:
10259      code = UNORDERED;
10260      break;
10261    case ORDERED_EXPR:
10262      code = ORDERED;
10263      break;
10264    case UNLT_EXPR:
10265      code = UNLT;
10266      break;
10267    case UNLE_EXPR:
10268      code = UNLE;
10269      break;
10270    case UNGT_EXPR:
10271      code = UNGT;
10272      break;
10273    case UNGE_EXPR:
10274      code = UNGE;
10275      break;
10276    case UNEQ_EXPR:
10277      code = UNEQ;
10278      break;
10279
10280    default:
10281      abort ();
10282    }
10283
10284  /* Put a constant second.  */
10285  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10286    {
10287      tem = arg0; arg0 = arg1; arg1 = tem;
10288      code = swap_condition (code);
10289    }
10290
10291  /* If this is an equality or inequality test of a single bit, we can
10292     do this by shifting the bit being tested to the low-order bit and
10293     masking the result with the constant 1.  If the condition was EQ,
10294     we xor it with 1.  This does not require an scc insn and is faster
10295     than an scc insn even if we have it.  */
10296
10297  if ((code == NE || code == EQ)
10298      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10299      && integer_pow2p (TREE_OPERAND (arg0, 1)))
10300    {
10301      tree inner = TREE_OPERAND (arg0, 0);
10302      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10303      int ops_unsignedp;
10304
10305      /* If INNER is a right shift of a constant and it plus BITNUM does
10306	 not overflow, adjust BITNUM and INNER.  */
10307
10308      if (TREE_CODE (inner) == RSHIFT_EXPR
10309	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10310	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10311	  && bitnum < TYPE_PRECISION (type)
10312	  && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10313				   bitnum - TYPE_PRECISION (type)))
10314	{
10315	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10316	  inner = TREE_OPERAND (inner, 0);
10317	}
10318
10319      /* If we are going to be able to omit the AND below, we must do our
10320	 operations as unsigned.  If we must use the AND, we have a choice.
10321	 Normally unsigned is faster, but for some machines signed is.  */
10322      ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10323#ifdef LOAD_EXTEND_OP
10324		       : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10325#else
10326		       : 1
10327#endif
10328		       );
10329
10330      if (! get_subtarget (subtarget)
10331	  || GET_MODE (subtarget) != operand_mode
10332	  || ! safe_from_p (subtarget, inner, 1))
10333	subtarget = 0;
10334
10335      op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10336
10337      if (bitnum != 0)
10338	op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10339			    size_int (bitnum), subtarget, ops_unsignedp);
10340
10341      if (GET_MODE (op0) != mode)
10342	op0 = convert_to_mode (mode, op0, ops_unsignedp);
10343
10344      if ((code == EQ && ! invert) || (code == NE && invert))
10345	op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10346			    ops_unsignedp, OPTAB_LIB_WIDEN);
10347
10348      /* Put the AND last so it can combine with more things.  */
10349      if (bitnum != TYPE_PRECISION (type) - 1)
10350	op0 = expand_and (mode, op0, const1_rtx, subtarget);
10351
10352      return op0;
10353    }
10354
10355  /* Now see if we are likely to be able to do this.  Return if not.  */
10356  if (! can_compare_p (code, operand_mode, ccp_store_flag))
10357    return 0;
10358
10359  icode = setcc_gen_code[(int) code];
10360  if (icode == CODE_FOR_nothing
10361      || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10362    {
10363      /* We can only do this if it is one of the special cases that
10364	 can be handled without an scc insn.  */
10365      if ((code == LT && integer_zerop (arg1))
10366	  || (! only_cheap && code == GE && integer_zerop (arg1)))
10367	;
10368      else if (BRANCH_COST >= 0
10369	       && ! only_cheap && (code == NE || code == EQ)
10370	       && TREE_CODE (type) != REAL_TYPE
10371	       && ((abs_optab->handlers[(int) operand_mode].insn_code
10372		    != CODE_FOR_nothing)
10373		   || (ffs_optab->handlers[(int) operand_mode].insn_code
10374		       != CODE_FOR_nothing)))
10375	;
10376      else
10377	return 0;
10378    }
10379
10380  if (! get_subtarget (target)
10381      || GET_MODE (subtarget) != operand_mode
10382      || ! safe_from_p (subtarget, arg1, 1))
10383    subtarget = 0;
10384
10385  op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10386  op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10387
10388  if (target == 0)
10389    target = gen_reg_rtx (mode);
10390
10391  /* Pass copies of OP0 and OP1 in case they contain a QUEUED.  This is safe
10392     because, if the emit_store_flag does anything it will succeed and
10393     OP0 and OP1 will not be used subsequently.  */
10394
10395  result = emit_store_flag (target, code,
10396			    queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10397			    queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10398			    operand_mode, unsignedp, 1);
10399
10400  if (result)
10401    {
10402      if (invert)
10403	result = expand_binop (mode, xor_optab, result, const1_rtx,
10404			       result, 0, OPTAB_LIB_WIDEN);
10405      return result;
10406    }
10407
10408  /* If this failed, we have to do this with set/compare/jump/set code.  */
10409  if (GET_CODE (target) != REG
10410      || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10411    target = gen_reg_rtx (GET_MODE (target));
10412
10413  emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10414  result = compare_from_rtx (op0, op1, code, unsignedp,
10415			     operand_mode, NULL_RTX);
10416  if (GET_CODE (result) == CONST_INT)
10417    return (((result == const0_rtx && ! invert)
10418	     || (result != const0_rtx && invert))
10419	    ? const0_rtx : const1_rtx);
10420
10421  /* The code of RESULT may not match CODE if compare_from_rtx
10422     decided to swap its operands and reverse the original code.
10423
10424     We know that compare_from_rtx returns either a CONST_INT or
10425     a new comparison code, so it is safe to just extract the
10426     code from RESULT.  */
10427  code = GET_CODE (result);
10428
10429  label = gen_label_rtx ();
10430  if (bcc_gen_fctn[(int) code] == 0)
10431    abort ();
10432
10433  emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10434  emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10435  emit_label (label);
10436
10437  return target;
10438}
10439
10440
10441/* Stubs in case we haven't got a casesi insn.  */
10442#ifndef HAVE_casesi
10443# define HAVE_casesi 0
10444# define gen_casesi(a, b, c, d, e) (0)
10445# define CODE_FOR_casesi CODE_FOR_nothing
10446#endif
10447
10448/* If the machine does not have a case insn that compares the bounds,
10449   this means extra overhead for dispatch tables, which raises the
10450   threshold for using them.  */
10451#ifndef CASE_VALUES_THRESHOLD
10452#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10453#endif /* CASE_VALUES_THRESHOLD */
10454
10455unsigned int
10456case_values_threshold ()
10457{
10458  return CASE_VALUES_THRESHOLD;
10459}
10460
10461/* Attempt to generate a casesi instruction.  Returns 1 if successful,
10462   0 otherwise (i.e. if there is no casesi instruction).  */
10463int
10464try_casesi (index_type, index_expr, minval, range,
10465	    table_label, default_label)
10466     tree index_type, index_expr, minval, range;
10467     rtx table_label ATTRIBUTE_UNUSED;
10468     rtx default_label;
10469{
10470  enum machine_mode index_mode = SImode;
10471  int index_bits = GET_MODE_BITSIZE (index_mode);
10472  rtx op1, op2, index;
10473  enum machine_mode op_mode;
10474
10475  if (! HAVE_casesi)
10476    return 0;
10477
10478  /* Convert the index to SImode.  */
10479  if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10480    {
10481      enum machine_mode omode = TYPE_MODE (index_type);
10482      rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10483
10484      /* We must handle the endpoints in the original mode.  */
10485      index_expr = build (MINUS_EXPR, index_type,
10486			  index_expr, minval);
10487      minval = integer_zero_node;
10488      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10489      emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10490			       omode, 1, default_label);
10491      /* Now we can safely truncate.  */
10492      index = convert_to_mode (index_mode, index, 0);
10493    }
10494  else
10495    {
10496      if (TYPE_MODE (index_type) != index_mode)
10497	{
10498	  index_expr = convert (type_for_size (index_bits, 0),
10499				index_expr);
10500	  index_type = TREE_TYPE (index_expr);
10501	}
10502
10503      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10504    }
10505  emit_queue ();
10506  index = protect_from_queue (index, 0);
10507  do_pending_stack_adjust ();
10508
10509  op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10510  if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10511      (index, op_mode))
10512    index = copy_to_mode_reg (op_mode, index);
10513
10514  op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10515
10516  op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10517  op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10518		       op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10519  if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10520      (op1, op_mode))
10521    op1 = copy_to_mode_reg (op_mode, op1);
10522
10523  op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10524
10525  op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10526  op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10527		       op2, TREE_UNSIGNED (TREE_TYPE (range)));
10528  if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10529      (op2, op_mode))
10530    op2 = copy_to_mode_reg (op_mode, op2);
10531
10532  emit_jump_insn (gen_casesi (index, op1, op2,
10533			      table_label, default_label));
10534  return 1;
10535}
10536
10537/* Attempt to generate a tablejump instruction; same concept.  */
10538#ifndef HAVE_tablejump
10539#define HAVE_tablejump 0
10540#define gen_tablejump(x, y) (0)
10541#endif
10542
10543/* Subroutine of the next function.
10544
10545   INDEX is the value being switched on, with the lowest value
10546   in the table already subtracted.
10547   MODE is its expected mode (needed if INDEX is constant).
10548   RANGE is the length of the jump table.
10549   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10550
10551   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10552   index value is out of range.  */
10553
10554static void
10555do_tablejump (index, mode, range, table_label, default_label)
10556     rtx index, range, table_label, default_label;
10557     enum machine_mode mode;
10558{
10559  rtx temp, vector;
10560
10561  /* Do an unsigned comparison (in the proper mode) between the index
10562     expression and the value which represents the length of the range.
10563     Since we just finished subtracting the lower bound of the range
10564     from the index expression, this comparison allows us to simultaneously
10565     check that the original index expression value is both greater than
10566     or equal to the minimum value of the range and less than or equal to
10567     the maximum value of the range.  */
10568
10569  emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10570			   default_label);
10571
10572  /* If index is in range, it must fit in Pmode.
10573     Convert to Pmode so we can index with it.  */
10574  if (mode != Pmode)
10575    index = convert_to_mode (Pmode, index, 1);
10576
10577  /* Don't let a MEM slip thru, because then INDEX that comes
10578     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10579     and break_out_memory_refs will go to work on it and mess it up.  */
10580#ifdef PIC_CASE_VECTOR_ADDRESS
10581  if (flag_pic && GET_CODE (index) != REG)
10582    index = copy_to_mode_reg (Pmode, index);
10583#endif
10584
10585  /* If flag_force_addr were to affect this address
10586     it could interfere with the tricky assumptions made
10587     about addresses that contain label-refs,
10588     which may be valid only very near the tablejump itself.  */
10589  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10590     GET_MODE_SIZE, because this indicates how large insns are.  The other
10591     uses should all be Pmode, because they are addresses.  This code
10592     could fail if addresses and insns are not the same size.  */
10593  index = gen_rtx_PLUS (Pmode,
10594			gen_rtx_MULT (Pmode, index,
10595				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10596			gen_rtx_LABEL_REF (Pmode, table_label));
10597#ifdef PIC_CASE_VECTOR_ADDRESS
10598  if (flag_pic)
10599    index = PIC_CASE_VECTOR_ADDRESS (index);
10600  else
10601#endif
10602    index = memory_address_noforce (CASE_VECTOR_MODE, index);
10603  temp = gen_reg_rtx (CASE_VECTOR_MODE);
10604  vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10605  RTX_UNCHANGING_P (vector) = 1;
10606  convert_move (temp, vector, 0);
10607
10608  emit_jump_insn (gen_tablejump (temp, table_label));
10609
10610  /* If we are generating PIC code or if the table is PC-relative, the
10611     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
10612  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10613    emit_barrier ();
10614}
10615
10616int
10617try_tablejump (index_type, index_expr, minval, range,
10618	       table_label, default_label)
10619     tree index_type, index_expr, minval, range;
10620     rtx table_label, default_label;
10621{
10622  rtx index;
10623
10624  if (! HAVE_tablejump)
10625    return 0;
10626
10627  index_expr = fold (build (MINUS_EXPR, index_type,
10628			    convert (index_type, index_expr),
10629			    convert (index_type, minval)));
10630  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10631  emit_queue ();
10632  index = protect_from_queue (index, 0);
10633  do_pending_stack_adjust ();
10634
10635  do_tablejump (index, TYPE_MODE (index_type),
10636		convert_modes (TYPE_MODE (index_type),
10637			       TYPE_MODE (TREE_TYPE (range)),
10638			       expand_expr (range, NULL_RTX,
10639					    VOIDmode, 0),
10640			       TREE_UNSIGNED (TREE_TYPE (range))),
10641		table_label, default_label);
10642  return 1;
10643}
10644